#!user/bin/env python3
# -*- coding: utf-8 -*-
import requests
import json
import time
import datetime
import threading
import pymongo
import constant
from apscheduler.schedulers.blocking import BlockingScheduler


# 更新代理
def get_proxy():
    # 代理服务器
    proxyHost = "http-dyn.abuyun.com"
    proxyPort = "9020"

    # 代理隧道验证信息
    proxyUser = "HH9M77VC14FM5Y2D"
    proxyPass = "14E09EC1E063DBF3"
    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host": proxyHost,
        "port": proxyPort,
        "user": proxyUser,
        "pass": proxyPass,
    }
    proxies = {
        "http": proxyMeta,
        "https": proxyMeta,
    }
    return proxies

def zbsj_request_proxies(rang, ct, PROXIES):
    for i in range(3):
        try:
            headers = {
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36'
            }
            url = 'https://sj.zhiboclub.com/api/chart/share_data?_t%20=' + str(int(time.time() * 1000))
            datalist = [{"df_id":"df_061d3c1ba48d3e1c436641791cf0a572","range":[rang],"data_type":"string","granularity":"","range_type":"1"}]
            post_data ={
              'ct_id': (None,ct),
              'linked_chart_type': (None,0),
              'chart_jump_info': (None,json.dumps({})),
              'sdo_id': (None,'sdo_a746c8739cd240c8fe9745344ca4f9fa'),
              'dsh_filter': (None,json.dumps(datalist)),
              '_t': (None,str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")))
            }
            response = requests.post(url,headers=headers,files=post_data, proxies=PROXIES)
            return json.loads(response.text)
        except:
            continue

def get_list_data_proxies(range, PROXIES):
    ct_id_list = [
        'ct_4b0ef31a2dc2e6a82bbbe62d32d5a664',
        'ct_e947ef072f7531738976ae3960ce9c8d',
        'ct_b87a77f6ca7f8771415b3ee7093615c9',
        'ct_e947ef072f7531738976ae3960ce9c8d',
        'ct_ddf0c3258ad9397ddb166d0e207ef761',
    ]
    all_data = []
    for ct in ct_id_list:
        all_data.append(zbsj_request_proxies(range, ct, PROXIES))

    return all_data

def zbsj_detail_request_proxies(rang,ct, PROXIES):
    for i in range(3):
        try:
            headers = {
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36'
            }
            url = 'https://sj.zhiboclub.com/api/chart/share_data?_t%20=' + str(int(time.time() * 1000))
            datalist = [{"df_id":"df_061d3c1ba48d3e1c436641791cf0a572","range":[rang],"data_type":"string","granularity":"","range_type":"1"}]
            post_data ={
              'ct_id': (None,ct),
              'linked_chart_type': (None,0),
              'chart_jump_info': (None,json.dumps({})),
              'sdo_id': (None,'sdo_a746c8739cd240c8fe9745344ca4f9fa'),
              'dsh_filter': (None,json.dumps(datalist)),
              '_t': (None,str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")))
            }
            response = requests.post(url,headers=headers,files=post_data,proxies=PROXIES)
            return json.loads(response.text)
        except:
            continue

def get_detail_data_proxies(range, PROXIES):
    ct_id_list = [
        'ct_e2a0a16e287bbb5de18095c8abaa8050',
    ]
    all_data = []
    for ct in ct_id_list:
        all_data.append(zbsj_detail_request_proxies(range, ct, PROXIES))

    return all_data


def get_anchor_list(id_begin, limit):
    # 代理记录
    PROXY_INDEX = 0
    # 建立 mongo 连接
    myclient = pymongo.MongoClient('mongodb://47.98.146.210:27017')
    mydb = myclient["WeChat"]
    zbsj_detail_col = mydb["zbsj_detail"]

    # 统计还剩多少个主播
    # count = mycol.count_documents({'drjId': {'$gt': id_begin}})
    # print('剩余待爬主播数：{}'.format(count))

    # # 拉取前50个主播的
    # anchor_list_cursor = mycol.find({'drjId': {'$gt': id_begin}}).sort([("drjId", 1)]).limit(limit).skip(0)
    # anchor_list = []
    # for item in anchor_list_cursor:
    #     anchor_list.append({
    #         "anchorName": item["anchorName"],
    #         "drjId": item["drjId"]
    #     })
    # print("ID取值：{} - {}".format(anchor_list[0]["drjId"], anchor_list[-1]["drjId"]))

    anchor_list = [{
        "anchorName": "雪欧尼Tiffany",
        "drjId": 7985
    }]

    for item in anchor_list:

        # if PROXY_INDEX%5 == 0:
        #     PROXIES = get_proxy()
        #     print('获取代理')
        PROXIES = {}
        PROXY_INDEX = PROXY_INDEX + 1

        new_data = {
            "anchorName": item["anchorName"],
            "drjId": item["drjId"]
        }

        # 获取 主播基础(zbsj_list) 数据
        udaren_info_res = get_list_data_proxies(item["anchorName"], PROXIES)
        # 获取 主播详细(zbsj_detail) 数据
        udaren_res = get_detail_data_proxies(item["anchorName"], PROXIES)

        # 写入 list
        try:
            new_data['fansNumber'] = int(udaren_info_res[0]['result']['data']['y'][0]['data'][0])
        except:
            new_data['fansNumber'] = ''
        try:
            new_data['institution'] = udaren_info_res[2]['result']['data']['x'][0]['data'][0]
        except:
            new_data['institution'] = ''
        try:
            new_data['level'] = udaren_info_res[3]['result']['data']['x'][4]['data'][0]
        except:
            new_data['level'] = ''
        try:
            new_data['isshop'] = udaren_info_res[4]['result']['data']['x'][0]['data'][0]
        except:
            new_data['isshop'] = ''

        # 写入 detail
        try:
            new_data['kbsj'] = udaren_res[0]['result']['data']['x'][0]['data']
        except:
            new_data['kbsj'] = []
        try:
            new_data['kbsj_h'] = udaren_res[0]['result']['data']['x'][1]['data']
        except:
            new_data['kbsj_h'] = []
        try:
            new_data['mxsgkrs'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][0]['data']]
        except:
            new_data['mxsgkrs'] = []
        try:
            new_data['mxsgks'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][1]['data']]
        except:
            new_data['mxsgks'] = []
        try:
            new_data['gkrs'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][2]['data']]
        except:
            new_data['gkrs'] = []
        try:
            new_data['gks'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][3]['data']]
        except:
            new_data['gks'] = []
        try:
            new_data['sc'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][4]['data']]
        except:
            new_data['sc'] = []
        try:
            new_data['hds'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][5]['data']]
        except:
            new_data['hds'] = []
        try:
            new_data['mrzfs'] = [int(x.split('.')[0]) for x in udaren_res[0]['result']['data']['y'][6]['data']]
        except:
            new_data['mrzfs'] = []

        new_data['isFinished'] = 1

        if new_data['kbsj'] == [] and new_data['kbsj_h'] == [] and new_data['mxsgkrs'] == [] and new_data[
            'mxsgks'] == [] and new_data['gkrs'] == [] and new_data['gks'] == [] and new_data['sc'] == [] and new_data[
            'hds'] == [] and new_data['mrzfs'] == []:
            new_data['isFinished'] = 0

        new_data['updateTime'] = str(datetime.date.today())

        zbsj_detail_col.insert_one(new_data)

        print("drjId={} - done".format(new_data["drjId"]))

    print('Mission Complete')


if __name__ == '__main__':
    print('------ Udaren Spider Begin ------')
    # 重要 id 522 < id <= 1105 没爬
    get_anchor_list(159697, 1000) # 76022
