import hashlib
import json
import time
import pandas as pd
import requests
from urllib3.exceptions import InsecureRequestWarning

from data.getdata import get_history_date


def get_dapan(date):
    """获取大盘数据
    :type date: str，日期
    """
    url = 'https://www.cls.cn/v3/transaction/anchor?'
    decode_in = 'app=CailianpressWeb&cdate=' + date + '&os=web&sv=7.7.5&token=KR4ESp4qqvjWFfGaLdR4OEf3Cn6NJr7V609640&uid=609640'
    decode_out = Decode().decode(decode_in)
    url = url + decode_in + '&sign=' + decode_out
    data = _get_data(url)

    return data


def _get_data(url):
    """获取网络数据"""
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0'}
    session = requests.Session()
    # 禁用安全请求警告
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    try:
        html = session.get(url=url, headers=headers, verify=False).text
        data = json.loads(html)
    except Exception as spider_error:
        print("获取网络数据 html抓取过程报错，错误信息为：%s" % spider_error)
        return None
    return data


def get_data_cls(timestamp):
    """财联社数据
    :type timestamp: str，时间戳
    """

    url = 'https://www.cls.cn/nodeapi/updateTelegraphList?'
    decode_in = 'app=CailianpressWeb&category=&hasFirstVipArticle=1&lastTime=' + timestamp + '&os=web&rn=20&subscribedColumnIds=&sv=7.5.5'
    decode_out = Decode().decode(decode_in)
    data = _get_data(url + decode_in + '&sign=' + decode_out)
    # print(url + decode_in + '&sign=' + decode_out)
    __data = timestamp
    lists = []
    if data is not None:
        try:
            data = json.loads(data)
            _data = data['data']['roll_data']
            if len(_data) > 0:
                print(url + decode_in + '&sign=' + decode_out)
                __data = str(_data[0]['ctime'])  # 时间戳
                # print(__data)
                for m in _data:
                    lists.append([str(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(m['ctime']))), m['content'],
                                  m['stock_list']])
                    # print(m['content'])
                    # print(m['content'].find(' '))
        except Exception as spider_error:
            print(spider_error)
    return __data, lists


class Decode:
    def use_sha(self, text):
        if not isinstance(text, bytes):
            text = bytes(text, 'utf-8')
        sha = hashlib.sha1(text)
        encrypts = sha.hexdigest()
        return encrypts

    def md5value(self, s):
        a = hashlib.md5(s.encode()).hexdigest()
        return a

    def decode(self, s):
        return self.md5value(self.use_sha(s))


if __name__ == '__main__':
    # print(get_dapan('2022-03-10')['data'])
    # print(pd.DataFrame(get_dapan('2022-03-10')['data']))
    count = 6
    while count > 0:
        count = count - 1
        dates = get_history_date(dates='10')
        if dates is not None:
            break
        time.sleep(1)
    data = pd.DataFrame()
    for date in dates:
        _data = pd.DataFrame(get_dapan(date)['data'])
        data = data.append(_data)
    print(data.symbol_name.value_counts())
