微信统计数据的获取及存储

来源:互联网 发布:ubuntu samba 客户端 编辑:程序博客网 时间:2024/06/01 22:51

目录:

1、需求:

2、开发前期准备:

3、代码结构:

4、Mysql数据表设计:

5、代码展示:

6、结果展示:

———————————————————————————————-

1、需求:

熟悉获取微信统计数据的接口,并设计数据获取方案,微信数据接口文档地址:https://mp.weixin.qq.com/wiki/15/88726a421bfc54654a3095821c3ca3bb.html
这里写图片描述

2、开发前期准备:

1、ACCESS_TOKEN:获取微信唯一标识ACCESS_TOKEN,同时也是公众号的全局唯一票据
2、需要公司公众号的AppID和AppSecret从而得到ACCESS_TOKEN
3、需要Mysql的建表权限
4、学习步鄹:weiXinEntry.py–>DataAnalysis.py–>MysqlUtils.py

3、代码结构:

这里写图片描述

4、Mysql数据表设计:

WeiXinSQL

CREATE TABLE getUser(        id int not null auto_increment,        datasource VARCHAR(45) null,        ref_date VARCHAR(45) null,        user_source INT NULL,        new_user INT NULL,        cancel_user INT NULL,        cumulate_user INT NULL,        primary key (id,datasource) )ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getUser` (`datasource`, `ref_date`, `user_source`, `new_user` , `cancel_user` , `cumulate_user`) VALUES  ('getusersummary','2016-12-02',0,0,0,0)CREATE TABLE getArticle(        id int not null auto_increment,        datasource VARCHAR(45) null,        ref_date VARCHAR(45) null,        ref_hour INT NULL,        stat_date  VARCHAR(45) null,        msgid  VARCHAR(45) null,        title VARCHAR(45) null,        int_page_read_user  INT NULL,        int_page_read_count  INT NULL,        ori_page_read_user  INT NULL,        ori_page_read_count  INT NULL,        share_scene  INT NULL,        share_user  INT NULL,        share_count  INT NULL,        add_to_fav_user  INT NULL,        add_to_fav_count   INT NULL,        target_user   INT NULL,        user_source   INT NULL,        primary key (id,datasource) )ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`ref_hour`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`,`ori_page_read_count`,`share_scene`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`,`user_source`)VALUES  ('getuserreadhour','','2017-01-03',1500,'0','0',1,2,0,0,0,0,0,0,0,0,5)CREATE TABLE getInterface(        id int not null auto_increment,        datasource VARCHAR(45) null,        ref_date VARCHAR(45) null,        ref_hour  INT NULL,        callback_count INT NULL,        fail_count INT NULL,        total_time_cost INT NULL,        max_time_cost INT NULL,        primary key (id,datasource) )ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getInterface` (`datasource`,`ref_date`,`ref_hour`,`callback_count`,`fail_count`,`total_time_cost`,`max_time_cost`) VALUES  ('getinterfacesummary','2017-01-03',0,3,0,950,340) CREATE TABLE getupStreammsg(        id int not null auto_increment,        datasource VARCHAR(45) null,        ref_date VARCHAR(45) null,        ref_hour  INT NULL,        msg_type  INT NULL,        msg_user  INT NULL,        msg_count  INT NULL,        count_interval  INT NULL,        int_page_read_count  INT NULL,        ori_page_read_user  INT NULL,        primary key (id,datasource) )ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getupStreammsg` (`datasource`,`ref_date`,`ref_hour`,`msg_type`,`msg_user`,`msg_count`,`count_interval`,`int_page_read_count`,`ori_page_read_user`) VALUES  ('getupstreammsg','2016-12-01',0,1,1,101,0,0,0)

5、代码展示:

weiXinEntry.py

#!/usr/bin/python# -*- coding: UTF-8 -*-import datetime #导入日期时间模块import jsonimport urllib2import DataAnalysisdef get_content(url):        '获取网页内容'        html=urllib2.urlopen(url)        content=html.read()        html.close()        return contentdef getAccess_token(AppID,AppSecret):    '获取微信唯一标识ACCESS_TOKEN,access_token是公众号的全局唯一票据'    url="https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid="+AppID+"&secret="+AppSecret    info=get_content(url)    access_token=json.loads(info)["access_token"]    return access_tokenif __name__ == "__main__":    #公司公众号    AppID="公司的AppID"    AppSecret="公司的AppSecret"    ACCESS_TOKEN=getAccess_token(AppID,AppSecret)    print(ACCESS_TOKEN)    startDay="2016-12-25"    endDay="2017-01-16"    StartDay=datetime.datetime.strptime(startDay, "%Y-%m-%d").date()    EndDay=datetime.datetime.strptime(endDay, "%Y-%m-%d").date()    countdays=(EndDay-StartDay).days    count=0    dataAnalysis = DataAnalysis.getDataAnalysis()    while (count < countdays):        FirstDay=StartDay+datetime.timedelta(days=count)        print("FirstDay  :  "+str(FirstDay) )        dataAnalysis.getUser(ACCESS_TOKEN,FirstDay,FirstDay)        dataAnalysis.getArticle(ACCESS_TOKEN,FirstDay,FirstDay)        dataAnalysis.getupstreammsg(ACCESS_TOKEN,FirstDay,FirstDay)        dataAnalysis.getInterface(ACCESS_TOKEN,FirstDay,FirstDay)        count = count + 1

DataAnalysis.py

#!/usr/bin/python# -*- coding: UTF-8 -*-import jsonimport requestsimport MysqlUtilsclass getDataAnalysis:    # "微信数据获取"    def getUserInsert(self,name , datasource):        #getUser中包含的字段        tup2 = ("user_source","new_user","cancel_user","cumulate_user");        try:            for i in datasource['list']:                for tup in tup2:                    if i.has_key(tup) :                          continue                    else :                          i[tup] = 0                sql = "INSERT INTO `getUser` (`datasource`, `ref_date`, `user_source`, `new_user` , `cancel_user` , `cumulate_user`) " \                      "VALUES  ('%s','%s',%d,%d,%d,%d)"  %(name,i['ref_date'],i['user_source'],i['new_user'],i['cancel_user'],i['cumulate_user'])                print(sql)                MysqlUtils.dbexecute(sql)        except:            print("ERROR ----  " ,name ,datasource)    def getUser(self,access_token,begin_date,end_date):        "用户分析数据接口"        r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}        #获取用户增减数据        summary = requests.post("https://api.weixin.qq.com/datacube/getusersummary?access_token="+access_token, data=json.dumps(r_date))        getusersummary = json.loads(summary.content)        getDataAnalysis().getUserInsert("getusersummary",getusersummary)        #获取累计用户数据        cumulate = requests.post("https://api.weixin.qq.com/datacube/getusercumulate?access_token="+access_token, data=json.dumps(r_date))        getusercumulate = json.loads(cumulate.content)        getDataAnalysis().getUserInsert("getusercumulate",getusercumulate)    def getArticledetailsInsert(self,name , datasource):        tup2 = ("msgid","title");        details = ("stat_date","target_user","int_page_read_user","int_page_read_count","ori_page_read_user","ori_page_read_count","share_user",                "share_count","add_to_fav_user","add_to_fav_count");        try:            for i in datasource['list']:                for tup in tup2:                    if i.has_key(tup) :                          continue                    else :                          i[tup] = 0                for j in i['details']:                    for detail in details:                        if j.has_key(detail) :                              continue                        else :                              j[detail] = 0                    sql = "INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`," \                      "`ori_page_read_count`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`) " \                      "VALUES  ('%s','%s','%s','%s','%s',%d,%d,%d,%d,%d,%d,%d,%d,%d)"  %(name,i['title'],i['ref_date'],j['stat_date'],i['msgid'],                      j['int_page_read_user'],j['int_page_read_count'],j['ori_page_read_user'],j['ori_page_read_count'],j['share_user'],j['share_count'],                      j['add_to_fav_user'],j['add_to_fav_count'],j['target_user'])                    print sql                    MysqlUtils.dbexecute(sql)        except:            print("ERROR ----  " ,name ,datasource)    def getArticleInsert(self,name , datasource):        #getUser中包含的字段        tup2 = ("ref_hour","stat_date","msgid","int_page_read_user""int_page_read_count","ori_page_read_user","ori_page_read_count","share_scene","share_user",                "share_count","add_to_fav_user","add_to_fav_count","target_user","user_source");        try:            for i in datasource['list']:                for tup in tup2:                    if i.has_key(tup) :                          continue                    else :                          i[tup] = 0                    if i.has_key("title") :                          continue                    else :                          i["title"] = ""                sql = "INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`ref_hour`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`," \                      "`ori_page_read_count`,`share_scene`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`,`user_source`) " \                      "VALUES  ('%s','%s','%s',%d,'%s','%s',%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)"  %(name,i['title'],i['ref_date'],i['ref_hour'],i['stat_date'],i['msgid'],                      i['int_page_read_user'],i['int_page_read_count'],i['ori_page_read_user'],i['ori_page_read_count'],i['share_scene'],i['share_user'],i['share_count'],                      i['add_to_fav_user'],i['add_to_fav_count'],i['target_user'],i['user_source'])                print sql                MysqlUtils.dbexecute(sql)        except:            print("ERROR ----  " ,name ,datasource)    def getArticle(self,access_token,begin_date,end_date):        "图文分析数据接口"        r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}        #获取图文群发每日数据     OK        summary = requests.post("https://api.weixin.qq.com/datacube/getarticlesummary?access_token="+access_token, data=json.dumps(r_date))        getarticlesummary = json.loads(summary.content)        getDataAnalysis().getArticleInsert("getarticlesummary",getarticlesummary)        #获取图文群发总数据       有details        total = requests.post("https://api.weixin.qq.com/datacube/getarticletotal?access_token="+access_token, data=json.dumps(r_date))        getarticletotal = json.loads(total.content)        getDataAnalysis().getArticledetailsInsert("getarticletotal",getarticletotal)        #获取图文统计数据        OK        read = requests.post("https://api.weixin.qq.com/datacube/getuserread?access_token="+access_token, data=json.dumps(r_date))        getuserread = json.loads(read.content)        getDataAnalysis().getArticleInsert("getuserread",getuserread)        # #获取图文统计分时数据        OK        hour = requests.post("https://api.weixin.qq.com/datacube/getuserreadhour?access_token="+access_token, data=json.dumps(r_date))        getuserreadhour = json.loads(hour.content)        getDataAnalysis().getArticleInsert("getuserreadhour",getuserreadhour)        # #获取图文分享转发数据        share = requests.post("https://api.weixin.qq.com/datacube/getusershare?access_token="+access_token, data=json.dumps(r_date))        getusershare = json.loads(share.content)        getDataAnalysis().getArticleInsert("getusershare",getusershare)        # #获取图文分享转发分时数据        sharehour = requests.post("https://api.weixin.qq.com/datacube/getusersharehour?access_token="+access_token, data=json.dumps(r_date))        getusersharehour = json.loads(sharehour.content)        getDataAnalysis().getArticleInsert("getusersharehour",getusersharehour)    def getupstreammsgInsert(self,name , datasource):        #getUser中包含的字段        tup2 = ("ref_hour","msg_type","msg_user","msg_count","count_interval","int_page_read_count","ori_page_read_user");        try:            for i in datasource['list']:                for tup in tup2:                    if i.has_key(tup) :                          continue                    else :                          i[tup] = 0                sql = "INSERT INTO `getupStreammsg` (`datasource`,`ref_date`,`ref_hour`,`msg_type`,`msg_user`,`msg_count`,`count_interval`,`int_page_read_count`,`ori_page_read_user`) " \                      "VALUES  ('%s','%s',%d,%d,%d,%d,%d,%d,%d)"  %(name,i['ref_date'],i['ref_hour'],i['msg_type'],i['msg_user'],                      i['msg_count'],i['count_interval'],i['int_page_read_count'],i['ori_page_read_user'])                print sql                MysqlUtils.dbexecute(sql)        except:            print("ERROR ----  " ,name ,datasource)    def getupstreammsg(self,access_token,begin_date,end_date):        "消息分析数据接口"        r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}        #获取消息发送概况数据        streammsg = requests.post("https://api.weixin.qq.com/datacube/getupstreammsg?access_token="+access_token, data=json.dumps(r_date))        getupstreammsg = json.loads(streammsg.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsg",getupstreammsg)        #获取消息分送分时数据        streammsghour = requests.post("https://api.weixin.qq.com/datacube/getupstreammsghour?access_token="+access_token, data=json.dumps(r_date))        getupstreammsghour = json.loads(streammsghour.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsghour",getupstreammsghour)        #获取消息发送周数据        streammsgweek = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgweek?access_token="+access_token, data=json.dumps(r_date))        getupstreammsgweek = json.loads(streammsgweek.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsgweek",getupstreammsgweek)        #获取消息发送月数据        streammsgmonth = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgmonth?access_token="+access_token, data=json.dumps(r_date))        getupstreammsgmonth = json.loads(streammsgmonth.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsgmonth",getupstreammsgmonth)        #获取消息发送分布数据        streammsgdist = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdist?access_token="+access_token, data=json.dumps(r_date))        getupstreammsgdist = json.loads(streammsgdist.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsgdist",getupstreammsgdist)        #获取消息发送分布周数据        streammsgdistweek = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdistweek?access_token="+access_token, data=json.dumps(r_date))        getupstreammsgdistweek = json.loads(streammsgdistweek.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsgdistweek",getupstreammsgdistweek)        #获取消息发送分布月数据        streammsgdistmonth = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdistmonth?access_token="+access_token, data=json.dumps(r_date))        getupstreammsgdistmonth = json.loads(streammsgdistmonth.content)        getDataAnalysis().getupstreammsgInsert("getupstreammsgdistmonth",getupstreammsgdistmonth)    def getInterfaceInsert(self,name , datasource):        #getUser中包含的字段        tup2 = ("ref_hour","callback_count","fail_count","total_time_cost","max_time_cost");        try:            for i in datasource['list']:                for tup in tup2:                    if i.has_key(tup) :                          continue                    else :                          i[tup] = 0                sql = "INSERT INTO `getInterface` (`datasource`,`ref_date`,`ref_hour`,`callback_count`,`fail_count`,`total_time_cost`,`max_time_cost`) " \                      "VALUES  ('%s','%s',%d,%d,%d,%d,%d)"  %(name,i['ref_date'],i['ref_hour'],i['callback_count'],i['fail_count'],i['total_time_cost'],i['max_time_cost'])                print sql                MysqlUtils.dbexecute(sql)        except:            print("ERROR ----  " ,name ,datasource)    def getInterface(self,access_token,begin_date,end_date):        "接口分析数据接口"        r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}        #获取接口分析数据        summary = requests.post("https://api.weixin.qq.com/datacube/getinterfacesummary?access_token="+access_token, data=json.dumps(r_date))        getinterfacesummary = json.loads(summary.content)        getDataAnalysis().getInterfaceInsert("getinterfacesummary",getinterfacesummary)        #获取接口分析分时数据        summaryhour = requests.post("https://api.weixin.qq.com/datacube/getinterfacesummaryhour?access_token="+access_token, data=json.dumps(r_date))        getinterfacesummaryhour = json.loads(summaryhour.content)        getDataAnalysis().getInterfaceInsert("getinterfacesummaryhour",getinterfacesummaryhour)

MysqlUtils.py

#!/usr/bin/python# -*- coding: UTF-8 -*-import mysql.connector# 打开数据库连接db=mysql.connector.connect(host='ip',          user='root',          passwd='123456',          db='bi_data')# 使用cursor()方法获取操作游标cursor=db.cursor()def dbexecute(sql):    "插入数据 , 删除数据  "    try:       # 执行sql语句       cursor.execute(sql)       # 提交到数据库执行       db.commit()    except:       # 发生错误时回滚       db.rollback()       #关闭数据库连接       db.close()def dbquery(sql):    "SQL 查询语句"    try:       # 执行SQL语句       cursor.execute(sql)       # 获取所有记录列表       results = cursor.fetchall()       return  results    except:        print "Error: unable to fecth data"def dbClose():    # 关闭数据库连接    cursor.close()if __name__ == "__main__":    json = {"list":[{"ref_date":"2017-01-02","user_source":0,"cumulate_user":5}]}    tup2 = ("user_source","new_user","cancel_user","cumulate_user");    for i in json['list']:        for tup in tup2:            if i.has_key(tup) :                print i.has_key('cancel_user')            else :                i[tup] = 0        print(i)

6、结果展示:

这里写图片描述

这里写图片描述

1 0
原创粉丝点击