import pymysql
import requests
import json
from jsonpath import jsonpath

conn = pymysql.connect(host="localhost", user="root", password="admin", database="fp", charset="utf8")
cursor = conn.cursor()

headers={
'Cookie':'Hm_lvt_5859c7e2fd49a1739a0b0f5a28532d91=1578404934,1578443087,1578443556,1578532873; aliyungf_tc=AQAAAM71PkpcXwAAAwQacyeIuAA2zhzv; Hm_lpvt_5859c7e2fd49a1739a0b0f5a28532d91=1578532873; PHPSESSID=2ec06c9d9cc2c56e77d450a81c12b93d; visit-wxb-id=76f0f2d374860d103562d63ac4c81d39',
'Referer':'https://data.wxb.com/rank',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:71.0) Gecko/20100101 Firefox/71.0'
}

url='https://data.wxb.com/rank/day/2020-01-08/%E6%80%BB%E6%A6%9C?sort=&page=1&page_size=50&is_new=1'
response = requests.get(url, headers=headers)

json_str = '{"content":'+response.text+'}'
unicodestr = json.loads(json_str)

name = jsonpath(unicodestr,'$..name')
push_total = jsonpath(unicodestr,'$..push_total')  #发布次数
read_num_max = jsonpath(unicodestr,'$..read_num_max')  #总阅读数
top_read_num_avg = jsonpath(unicodestr,'$..top_read_num_avg')  #头条阅读数
avg_read_num = jsonpath(unicodestr,'$..avg_read_num')  #平均阅读数
avg_like_num = jsonpath(unicodestr,'$..avg_like_num')  #平均在看数
fans_num_estimate = jsonpath(unicodestr,'$..fans_num_estimate')  #预估粉丝数
cate_id = jsonpath(unicodestr,'$..cate_id')  #类别
index_scores = jsonpath(unicodestr,'$..index_scores')  #指数
print(name)
# hname = []
# releasetimes = []
# totalreadings = []
# headreading = []
# averagereading = []
# averagelooking = []
# numberoffans = []
# htype = []
# zhishu = []

i = 0
for flag in name:
    print(name[i])
    sql = "insert into gzh(hname, releasetimes, totalreadings, headreading, averagereading, averagelooking, numberoffans,htype,zhishu)" \
            " VALUES ('%s','%s','%s','%s','%s','%s','%s','%s','%s')"%\
            (name[i], push_total[i], read_num_max[i], top_read_num_avg[i],avg_read_num[i],avg_like_num[i],fans_num_estimate[i],cate_id[i],index_scores[i])
    i+=1
    try:
                # 执行sql语句
        cursor.execute(sql)
                # 提交到数据库执行
        conn.commit()
    except Exception as e:
            print(e)
                # 如果发生错误则回滚
            conn.rollback()

# print(name, push_total, read_num_max , top_read_num_avg , avg_read_num , avg_like_num , fans_num_estimate ,cate_id, index_scores )

# def find_pageA(c):  # 根据传递参数c（提取的页数）来选择目标url地址
#     return 'https://data.wxb.com/rank/day/2020-01-08/%E6%80%BB%E6%A6%9C?sort=&page='+str(c)+'&page_size=50&is_new='+str(c)+''
# # https://data.wxb.com/rank/day/2020-01-08/%E6%80%BB%E6%A6%9C?sort=&page=2&page_size=50&is_new=2
#
# def collect_30_pagesA():index_scores
#     c= 1
#     while(c<5):
#         time.sleep(2)
#         print('第', c, '页:')
#         response = requests.get(find_pageA(c), headers=headers)
#         json_str = '{"content":'+response.text+'}'
#         unicodestr = json.loads(json_str)
#         name = jsonpath(unicodestr, '$..name')
#         cate_id = jsonpath(unicodestr, '$..cate_id')
#         print(name, cate_id)
#         c+=1






