#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import datetime, time
import random
import requests
import json
from settings import my_settings

settings = my_settings.mysqlSettings(hostname='jyb_mysql')
settings = settings.mysql_conf
MYSQL_HOST = settings['MYSQL_HOST']
MYSQL_PORT = settings['MYSQL_PORT']
MYSQL_DB = settings['MYSQL_DB']
MYSQL_USER = settings['MYSQL_USER']
MYSQL_PASSWD = settings['MYSQL_PASSWD']
MYSQL_CHARTSET = settings['MYSQL_CHARTSET']

def get_nasdaq_cookie():
    nasdaq_cookie_dict={}
    with open('nasdaq_cookie.txt','r',encoding='utf-8') as f:
        datas=f.readlines()[1:]
        for line in datas:
            line=line.replace('\ufeff','').strip()
            cookie_key=line[:4]
            cookie_value=line[5:]
            nasdaq_cookie_dict[cookie_key]=cookie_value
    return nasdaq_cookie_dict

def update_nasdaq_cookie(nasdaq_cookie_dict,today_date):
    with open('nasdaq_cookie.txt','w',encoding='utf-8') as f:
        f.write(today_date+'\n')
        for k,v in nasdaq_cookie_dict:
            f.write(k+':'+v+'\n')

def crawl_nasdaq(self, crawl_nasdaq_ark_type_list):
    """
    https://www.nasdaq.com/market-activity/funds-and-etfs/arkk/historical
    https://api.nasdaq.com/api/quote/watchlist?symbol=arkx%7cstocks&symbol=arkf%7cetf&symbol=arkw%7cetf&symbol=izrl%7cetf&symbol=prnt%7cetf&symbol=arkq%7cetf&symbol=arkg%7cetf&symbol=arkk%7cetf&type=Rv
    """
    nasdaq_cookie_dict = get_nasdaq_cookie()
    today_date = str(datetime.datetime.today().date())
    for ark_type in crawl_nasdaq_ark_type_list:
        if ark_type == 'arkx':
            url = 'https://api.nasdaq.com/api/quote/ARKX/historical?assetclass=stocks&fromdate=2021-05-03&limit=18&todate=' + today_date
        else:
            url = 'https://api.nasdaq.com/api/quote/' + ark_type.upper() + '/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=' + today_date

        file_html_path = self.ark_fundflow_html_path + 'nasdaq' + "_" + ark_type + '_' + self.start_date_str + '.txt'

        User_Agent_list = self.my_user_agent_list
        random.shuffle(User_Agent_list)

        data_flag = False
        for i in range(3):
            temp_headers = {
                "authority": "api.nasdaq.com",
                "method": "GET",
                "path": "/api/quote/ARKF/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=2021-08-02",
                "scheme": "https",
                "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
                "accept-encoding": "gzip, deflate, br",
                "accept-language": "zh-CN,zh;q=0.9",
                "cookie": nasdaq_cookie_dict[ark_type],
                "sec-ch-ua": "'Chromium';v='92', ' Not A;Brand';v='99', 'Google Chrome';v='92'",
                "sec-ch-ua-mobile": "?0",
                "sec-fetch-dest": "document",
                "sec-fetch-mode": "navigate",
                "sec-fetch-site": "none",
                "sec-fetch-user": "?1",
                "upgrade-insecure-requests": "1",
                "user-agent": User_Agent_list[i]
            }
            try:
                conn = requests.session()
                response = conn.get(url, headers=temp_headers, timeout=10)
                datas=response.content.decode('utf-8')

                new_cookie = response.headers['Set-Cookie']
                if nasdaq_cookie_dict[ark_type]!=new_cookie:
                    nasdaq_cookie_dict[ark_type]=new_cookie

                # datas = requests.get(url=url, headers=temp_headers, timeout=10).content.decode('utf-8')

                # # self.chrome_options.add_argument(User_Agent_list[i])
                # self.driver.get(url=url)
                # cookie = {'name': 'cookie',
                #           'value': '_gcl_au=1.1.225347516.1622616363; _hjid=b6a6843d-4a0e-4465-b039-ed55f3717724; bfp_sn_rf_8b2087b102c9e3e5ffed1c1478ed8b78=Direct; usersync=eNqd0D0KgDAMBeC7ZO5gWpuqVxER0Q4F_7BFFPHuFsTddMwLHy_kgt1u3i0zVChgdYcdPVT1BW6IiRbgz7lvfei2EGeSkpC0-fJ-mdbRBhtXt3hNbn6b7DOoCn4R5ilIl_zzJBG_SRrFRwpTkE54uSLO95r7AWV9q4g.; _cc_id=497712385695067fe5fca647f0e8d637; recentlyViewedList=ARKQ|ETF,PRNT|ETF,IZRL|ETF,ARKW|ETF,ARKX|Stocks,ARKG|ETF,ARKF|ETF,ARKK|ETF; _fbp=fb.1.1624514885542.2064204685; _uetvid=40aa9da0c36e11eb8b6215124e6207a9; _ga=GA1.2.1681635705.1622616363; bfp_sn_rt_8b2087b102c9e3e5ffed1c1478ed8b78=1625801486522; __gads=ID=70d37bf2cfffb965:T=1625801478:S=ALNI_MZmhxxQ-YB8LGbG1YNRGjjqqQB9qg; _ga_4VZJSZ76VC=GS1.1.1625801482.10.1.1625801753.0; ak_bmsc=4D8273F83F520BF6ABB8402F6BBE361C~000000000000000000000000000000~YAAQXhiujLo2grF6AQAA8z3F9gxMiENrU7FwOm9kIQgstl8vLZzD1nmpomJ8dwAYHACY6SSXlK6Uw1L11cdF5ks/b/A34Jyy1Ppo9tLr2rG+pfF6RsjcAt/TOtGoZs0f4cLhI0+HT0+ra2KX29Np42y98AerhSoD6Mnmi9SwOCc5AZ/N84dVwqI3GT6PSJ/lhrE7vA6y1IxYHTcV2m2puy/hrQNMtplTM+HnqiRDlAZ1A7z45+tPSx0E75XQjCNwA0fyPhdBL//yy5WtPHIZcwQJVGn7uuF6gq9CTihlpr9PvtAMT6aw8Js/3IRqNaR/avCfVkbHxTMY2IStvAaIBw4XhtRbIdAr27IdI7qqq4pnJ7UFT98/y8ubnyd8aV5B0s8JRGgqFeFEUYvx; NSC_W.OEBR.DPN.7070=ffffffffc3a0f73145525d5f4f58455e445a4a422dae; bm_sv=C93C24EE4F69D6B3DD9CC6E85C34ECB8~LvWgMBUHHF2OGfuYwLOhF9PaGPXn5m970mr2+N7TvHkxRhxnnUeGr2ZnFN11Z0VuMespQ3dnDd3Iwv9zvLnDRDMY2yi0Q10hmRVDME22tIRcdxWqOFJHvXvMl53hBpdFzCYx8ZGim57Q9seyfRcatoQomIZcZdR/qswhZUJt7Nw='}
                # self.driver.add_cookie(cookie_dict=cookie)
                # self.driver.implicitly_wait(10)
                # datas = self.driver.page_source.encode('utf-8').decode('utf-8')

                json_data = json.loads(datas)
                data_data = json_data['data']
                if data_data != None:
                    with open(file_html_path, 'w', encoding='utf-8') as f:
                        f.write(datas)
                    self.logger.info('=====第%s次 nasdaq %s抓取%s更新数据成功====', i, ark_type, self.start_date_str)
                    data_flag = True
                    break
            except Exception as e:
                self.logger.warning(e)
                self.logger.warning('===第%s次 %s %s nasdaq数据抓取失败', i, ark_type, today_date)
            time.sleep(5)
        if data_flag != True:
            random.shuffle(User_Agent_list)
            for i in range(5):
                # 带有今天日期的链接请求数据失败，用带有昨天日期的链接请求数据
                today = datetime.datetime.today()
                # 昨天
                yesterday = today - datetime.timedelta(days=1)
                yesterday_str = str(yesterday.date())
                if ark_type == 'arkx':
                    yesterday_url = 'https://api.nasdaq.com/api/quote/ARKX/historical?assetclass=stocks&fromdate=2021-05-03&limit=18&todate=' + yesterday_str
                else:
                    yesterday_url = 'https://api.nasdaq.com/api/quote/' + ark_type.upper() + '/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=' + yesterday_str

                temp_headers = {
                    "authority": "api.nasdaq.com",
                    "method": "GET",
                    "path": "/api/quote/ARKF/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=2021-08-02",
                    "scheme": "https",
                    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
                    "accept-encoding": "gzip, deflate, br",
                    "accept-language": "zh-CN,zh;q=0.9",
                    "cookie": nasdaq_cookie_dict[ark_type],
                    "sec-ch-ua": "'Chromium';v='92', ' Not A;Brand';v='99', 'Google Chrome';v='92'",
                    "sec-ch-ua-mobile": "?0",
                    "sec-fetch-dest": "document",
                    "sec-fetch-mode": "navigate",
                    "sec-fetch-site": "none",
                    "sec-fetch-user": "?1",
                    "upgrade-insecure-requests": "1",
                    "user-agent": User_Agent_list[i]
                }
                try:
                    conn = requests.session()
                    response = conn.get(yesterday_url, headers=temp_headers, timeout=10)
                    datas = response.content.decode('utf-8')

                    new_cookie = response.headers['Set-Cookie']
                    if nasdaq_cookie_dict[ark_type] != new_cookie:
                        nasdaq_cookie_dict[ark_type] = new_cookie

                    # datas = requests.get(url=yesterday_url, headers=temp_headers, timeout=10).content.decode('utf-8')

                    # # self.chrome_options.add_argument(User_Agent_list[i])
                    # self.driver.get(url=yesterday_url)
                    # cookie = {'name': 'cookie',
                    #           'value': '_gcl_au=1.1.225347516.1622616363; _hjid=b6a6843d-4a0e-4465-b039-ed55f3717724; bfp_sn_rf_8b2087b102c9e3e5ffed1c1478ed8b78=Direct; usersync=eNqd0D0KgDAMBeC7ZO5gWpuqVxER0Q4F_7BFFPHuFsTddMwLHy_kgt1u3i0zVChgdYcdPVT1BW6IiRbgz7lvfei2EGeSkpC0-fJ-mdbRBhtXt3hNbn6b7DOoCn4R5ilIl_zzJBG_SRrFRwpTkE54uSLO95r7AWV9q4g.; _cc_id=497712385695067fe5fca647f0e8d637; recentlyViewedList=ARKQ|ETF,PRNT|ETF,IZRL|ETF,ARKW|ETF,ARKX|Stocks,ARKG|ETF,ARKF|ETF,ARKK|ETF; _fbp=fb.1.1624514885542.2064204685; _uetvid=40aa9da0c36e11eb8b6215124e6207a9; _ga=GA1.2.1681635705.1622616363; bfp_sn_rt_8b2087b102c9e3e5ffed1c1478ed8b78=1625801486522; __gads=ID=70d37bf2cfffb965:T=1625801478:S=ALNI_MZmhxxQ-YB8LGbG1YNRGjjqqQB9qg; _ga_4VZJSZ76VC=GS1.1.1625801482.10.1.1625801753.0; ak_bmsc=4D8273F83F520BF6ABB8402F6BBE361C~000000000000000000000000000000~YAAQXhiujLo2grF6AQAA8z3F9gxMiENrU7FwOm9kIQgstl8vLZzD1nmpomJ8dwAYHACY6SSXlK6Uw1L11cdF5ks/b/A34Jyy1Ppo9tLr2rG+pfF6RsjcAt/TOtGoZs0f4cLhI0+HT0+ra2KX29Np42y98AerhSoD6Mnmi9SwOCc5AZ/N84dVwqI3GT6PSJ/lhrE7vA6y1IxYHTcV2m2puy/hrQNMtplTM+HnqiRDlAZ1A7z45+tPSx0E75XQjCNwA0fyPhdBL//yy5WtPHIZcwQJVGn7uuF6gq9CTihlpr9PvtAMT6aw8Js/3IRqNaR/avCfVkbHxTMY2IStvAaIBw4XhtRbIdAr27IdI7qqq4pnJ7UFT98/y8ubnyd8aV5B0s8JRGgqFeFEUYvx; NSC_W.OEBR.DPN.7070=ffffffffc3a0f73145525d5f4f58455e445a4a422dae; bm_sv=C93C24EE4F69D6B3DD9CC6E85C34ECB8~LvWgMBUHHF2OGfuYwLOhF9PaGPXn5m970mr2+N7TvHkxRhxnnUeGr2ZnFN11Z0VuMespQ3dnDd3Iwv9zvLnDRDMY2yi0Q10hmRVDME22tIRcdxWqOFJHvXvMl53hBpdFzCYx8ZGim57Q9seyfRcatoQomIZcZdR/qswhZUJt7Nw='}
                    # self.driver.add_cookie(cookie_dict=cookie)
                    # self.driver.implicitly_wait(10)
                    # datas = self.driver.page_source.encode('utf-8').decode('utf-8')

                    json_data = json.loads(datas)
                    data_data = json_data['data']
                    if data_data != None:
                        with open(file_html_path, 'w', encoding='utf-8') as f:
                            f.write(datas)
                        self.logger.info('=====第%s次 nasdaq %s抓取%s更新数据成功====', i, ark_type, yesterday_str)
                        data_flag = True
                        break
                except Exception as e:
                    self.logger.warning(e)
                    self.logger.warning('===第%s次 %s %s nasdaq数据抓取失败', i, ark_type, yesterday_str)
                time.sleep(5)
        if data_flag != True:
            self.run_flag = False
            self.logger.error('===%s %s nasdaq数据抓取失败', ark_type, today_date)

    #更新nasdaq的cookie
    update_nasdaq_cookie(nasdaq_cookie_dict,today_date)

if __name__ == '__main__':
    ark_list = ['arkf', 'arkg', 'arkk', 'arkq', 'arkw', 'arkx', 'prnt', 'izrl']
    # crawl_nasdaq(ark_list)
    # print(str(datetime.datetime.now()))