#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import time
import datetime
import requests
import pymysql
import logging
from pathlib import Path
from requests.adapters import HTTPAdapter
s = requests.Session()
s.mount('http://', HTTPAdapter(max_retries=6))
s.mount('https://', HTTPAdapter(max_retries=6))

import sys, os
path = os.path.abspath('../..')+'/ark_project'
sys.path.append(path)  # 会追加到列表最尾部
from settings import my_settings


class ark_webholding_spider():
    def __init__(self,settings,logger):
        mysql_settings = settings.mysql_conf
        MYSQL_HOST = mysql_settings['MYSQL_HOST']
        MYSQL_PORT = mysql_settings['MYSQL_PORT']
        MYSQL_DB = mysql_settings['MYSQL_DB']
        MYSQL_USER = mysql_settings['MYSQL_USER']
        MYSQL_PASSWD = mysql_settings['MYSQL_PASSWD']
        MYSQL_CHARTSET = mysql_settings['MYSQL_CHARTSET']

        self.my_user_agent=my_settings.MyUserAgent().my_user_agent
        self.curr_path = os.getcwd() + '/' + 'ark_webholding/'

        self.start_date_str = str(time.strftime('%Y-%m-%d', time.localtime()))
        self.logger=logger
        self.arkholding_path=self.curr_path+"arkholding"
        if not os.path.exists(self.arkholding_path): os.makedirs(self.arkholding_path)
        self.ark_holdings_data_address=self.arkholding_path+'/'

        # ark各ETFcsv下载网址
        arkf_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_FINTECH_INNOVATION_ETF_ARKF_HOLDINGS.csv"
        arkg_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_GENOMIC_REVOLUTION_MULTISECTOR_ETF_ARKG_HOLDINGS.csv"
        arkk_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_INNOVATION_ETF_ARKK_HOLDINGS.csv"
        arkq_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_AUTONOMOUS_TECHNOLOGY_&_ROBOTICS_ETF_ARKQ_HOLDINGS.csv"
        arkw_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_NEXT_GENERATION_INTERNET_ETF_ARKW_HOLDINGS.csv"
        arkx_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_SPACE_EXPLORATION_&_INNOVATION_ETF_ARKX_HOLDINGS.csv"
        print3d_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/THE_3D_PRINTING_ETF_PRNT_HOLDINGS.csv"
        israel_url = "https://ark-funds.com/wp-content/fundsiteliterature/csv/ARK_ISRAEL_INNOVATIVE_TECHNOLOGY_ETF_IZRL_HOLDINGS.csv"

        # ark各ETF属性字典
        self.ark_etf = {'ARKF': arkf_url,
                        'ARKG': arkg_url,
                        'ARKK': arkk_url,
                        'ARKQ': arkq_url,
                        'ARKW': arkw_url,
                        'ARKX': arkx_url,
                        'PRNT': print3d_url,
                        'IZRL': israel_url}

        # 需要处理的ticker
        ticker_dict = {'TREE UW': 'TREE', 'ARCT UQ': 'ARCT', 'TCS LI': 'TCS.IL', 'TAK UN': 'TAK',
                       '6618': '6618.HK', '8473': '8473.T', '3690': '3690.HK', '4689': '4689.T',
                       '6060': '6060.HK', '4477': '4477.T', '9923': '9923.HK', 'ADYEN': 'ADYEY',
                       'KSPI': 'KSPI.IL'}

        self.headers = {
            "User-Agent": self.my_user_agent,
            'Connection': 'close',
            'cookie':'_ga=GA1.2.178807709.1622800356; hubspotutk=6994e7642d5af35b65a75173d480e8cd; __hs_opt_out=no; _gcl_au=1.1.1943146375.1623910274; messagesUtk=bf36c25bf6794097b1020c62e2d1baed; PHPSESSID=rjflsnn3960tausi64c6gm5qad; _gid=GA1.2.865385494.1627970820; _gat=1; _gat_UA-54486011-2=1; __hstc=6077420.6994e7642d5af35b65a75173d480e8cd.1622800360388.1625800255167.1627970820945.5; __hssrc=1; __hssc=6077420.1.1627970820945'}

        self.db = pymysql.connect(
            host=MYSQL_HOST,
            port=MYSQL_PORT,
            database=MYSQL_DB,
            user=MYSQL_USER,
            password=MYSQL_PASSWD,
            charset=MYSQL_CHARTSET)
        self.cursor = self.db.cursor()
        self.run_flag=True

    @staticmethod
    def get_list(date):
        return datetime.datetime.strptime(date, "%Y-%m-%d").timestamp()

    def run(self):
        holding_date_complete_flag=True    #当holding的数据完成抓取了才进行run_sql
        uncomplete_data_date=[]
        # 再从本地读取csv持仓Ark_holding_05-14-2021.csv，导入到mysql数据库ark_holdings
        arkholding_files = os.listdir(self.arkholding_path)

        arkholding_files_date = [file[12:22] for file in arkholding_files]
        arkholding_files_date_sort = sorted(arkholding_files_date, key=lambda date: self.get_list(date))
        new_arkholding_file_date=arkholding_files_date_sort[-1]
        new_arkholding_file='Ark_holding_'+new_arkholding_file_date+'.txt'

        new_data = open(self.ark_holdings_data_address + new_arkholding_file, 'r', encoding='utf-8').readlines()
        new_data=[line.replace('\ufeff','').strip() for line in new_data]
        new_data_fund_res=[line[11:15] for line in new_data]
        # new_data_fund_res = re.findall('ARKF|ARKG|ARKK|ARKQ|ARKW|ARKX|PRNT|IZRL', new_data, re.IGNORECASE)
        new_data_fund_res = list(set(new_data_fund_res))
        new_data_fund_res = [i.upper() for i in new_data_fund_res]
        new_data_fund_res = list(set(new_data_fund_res))

        today = datetime.datetime.today()
        yesterday = today - datetime.timedelta(days=1)
        yesterday = str(yesterday.date())

        ark_type = ['ARKF', 'ARKG', 'ARKK', 'ARKQ', 'ARKW', 'ARKX', 'PRNT', 'IZRL']
        diff = set(ark_type).difference(set(new_data_fund_res))
        crawl_ark_type = list(diff)

        if yesterday != new_arkholding_file_date: #如果最新的数据不是昨天的数据，则抓取全部8个基金，说明当天的数据没有抓取到
            self.logger.info("===本次抓取基金：%s",str(ark_type))
            self.getHolding(ark_type)
        elif len(new_data_fund_res)<8:#如果最新的数据是昨天的数据，判断是否8个基金没有遗漏，遗漏则抓取遗漏的
            self.logger.warning("===本次抓取遗漏抓取的基金：%s", str(crawl_ark_type))
            self.getHolding(crawl_ark_type)

        sql_select_arkholding="select DISTINCT date from ark_sql_holdings;"
        self.cursor.execute(sql_select_arkholding)
        arkholding_sql_date_list=self.cursor.fetchall()
        arkholding_sql_date_list = [t[0] for t in arkholding_sql_date_list]

        new_arkholding_files = os.listdir(self.arkholding_path)
        for arkholding_file in new_arkholding_files:
            data_date=arkholding_file.replace('Ark_holding_','').replace('.txt','')

            #网页数据分次爬取，数据库已有一部分当日数据，但不完整
            sql_select_data_data_fund = "select DISTINCT fund from ark_sql_holdings where date='%s'" % data_date
            self.cursor.execute(sql_select_data_data_fund)
            data_data_fund_list = self.cursor.fetchall()
            data_data_fund_list = [t[0] for t in data_data_fund_list]
            if data_date not in arkholding_sql_date_list or len(data_data_fund_list)!=8:
                self.holding_to_mysql(data_date)
                self.logger.info('===%s holding data 入库成功===',data_date)

            #插入新数据后，再次查询，判断数据库的某日8个基金数据是否完整
            self.cursor.execute(sql_select_data_data_fund)
            data_data_fund_list_new = self.cursor.fetchall()
            data_data_fund_list_new = [t[0] for t in data_data_fund_list_new]
            if len(data_data_fund_list_new)!=8:
                holding_date_complete_flag=False
                uncomplete_data_date.append(data_date)

        self.cursor.close()
        self.db.close()
        if holding_date_complete_flag==False:
            self.run_flag=False
        return holding_date_complete_flag,uncomplete_data_date

    # ark官网下载持仓函数，下载全部持仓到txt文件
    def getHolding(self,crawl_ark_type):
        global data_date_str #date_str
        for etf in crawl_ark_type:
            # 获取web链接text
            try:    #, verify=False
                # text = s.get(url=self.ark_etf[etf], headers=self.headers, timeout=10).content.decode('utf-8')
                text = requests.get(url=self.ark_etf[etf], headers=self.headers, timeout=10).content.decode('utf-8')
                text_list = text.split('\n')
                holding_list = []
                for line in text_list:
                    if etf in line or etf.lower() in line:
                        line_list = line.split(',')
                        line_list = [i.replace('\"', '').replace('\'', '') for i in line_list]
                        line_list = [i for i in line_list]
                        line_list[0] = self.transdatefomt(line_list[0])
                        data_date_str=line_list[0]
                        holding_list.append(','.join(line_list))
                today_data_txt_path = self.ark_holdings_data_address + 'Ark_holding_' + data_date_str + '.txt'
                fout = open(today_data_txt_path, 'a', encoding='utf-8')
                if os.path.exists(today_data_txt_path):
                    fin = open(today_data_txt_path, 'r', encoding='utf-8')
                    fin_data = fin.readlines()
                    fin_data = [line.replace('\ufeff', '').strip('\n') for line in fin_data]
                    for one_data in holding_list:
                        if one_data not in fin_data:
                            fout.write(one_data+'\n')
                        else:
                            continue
                fout.close()
                self.logger.info('===%s holding数据抓取存储成功===',etf)
            except Exception as e:
                self.run_flag = False
                self.logger.error(e)
                self.logger.error('===%s holding数据存储失败...====',etf)

    def holding_to_mysql(self,process_date):
        """处理爬取的持仓数据，及存入mysql数据库"""
        file_path=self.ark_holdings_data_address+'Ark_holding_'+process_date+'.txt'
        file = open(file_path, 'r', encoding='UTF-8')
        lines = file.readlines()
        if lines:
            for line in lines:
                line = line.strip('\n').replace('\ufeff','')
                line_list = line.split(',')

                date = line_list[0]
                fund = line_list[1]
                company = line_list[2]
                ticker = line_list[3]
                cusip = line_list[4]
                shares = line_list[5]
                value_usd = line_list[6]
                wgt_pct = line_list[7]

                sql = "insert into ark_sql_holdings(date,fund,company,ticker,cusip,shares,value_usd,wgt_pct) values(%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (date, fund, company, ticker, cusip, shares, value_usd, wgt_pct)
                try:
                    self.cursor.execute(sql, param)
                    self.db.commit()  # 提交
                    self.logger.info('---add a holding data----')
                except Exception as e:
                    # 如果发生错误则回滚
                    self.db.rollback()
                    self.logger.warning(e)
                    self.logger.warning('===插入holding数据失败====')
            self.logger.info('===%s 数据存入数据库成功===',file_path)
        file.close()

    # 日期格式转换函数，将5/14/2021改为2021-05-14
    @staticmethod
    def transdatefomt(date):
        data=datetime.datetime.strptime(date, "%m/%d/%Y").strftime('%Y-%m-%d')
        return data

