#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import time
import datetime
import requests
import re
import pymysql
from bs4 import BeautifulSoup
import logging
from pathlib import Path
import pandas as pd
import json
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import platform
from requests.adapters import HTTPAdapter
s = requests.Session()
s.mount('http://', HTTPAdapter(max_retries=3))
s.mount('https://', HTTPAdapter(max_retries=3))

import sys, os
path = os.path.abspath('../..')+'/ark_project'
sys.path.append(path)  # 会追加到列表最尾部
from settings import mysql_settings
settings=mysql_settings.mysqlSettings()
settings=settings.mysql_conf
MYSQL_HOST=settings['MYSQL_HOST']
MYSQL_PORT=settings['MYSQL_PORT']
MYSQL_DB=settings['MYSQL_DB']
MYSQL_USER=settings['MYSQL_USER']
MYSQL_PASSWD=settings['MYSQL_PASSWD']
MYSQL_CHARTSET=settings['MYSQL_CHARTSET']

class ark_fundflow_spider():
    def __init__(self,is_crawl=True,log_rank=logging.WARNING):
        self.my_user_agent = mysql_settings.MyUserAgent().my_user_agent
        self.curr_path = os.getcwd() + '/' + 'ark_fundflow/'
        self.is_crawl=is_crawl
        self.log_rank=log_rank
        self.start_date_str = str(time.strftime('%Y-%m-%d', time.localtime()))  #程序运行当日时间
        self.headers = {
            "User-Agent": self.my_user_agent,
            'Connection': 'close'}
        self.logger=self.log_process()

        self.ark_list = ['arkf', 'arkg', 'arkk', 'arkq', 'arkw', 'arkx', 'prnt', 'izrl']

        # 定义存储ark fundflow的文件夹地址
        if not os.path.exists(self.curr_path+'fundflow_html'): os.makedirs(self.curr_path+'fundflow_html')
        self.ark_fundflow_html_path = self.curr_path+'fundflow_html' + '/'

        if not os.path.exists(self.curr_path+'fundflow_csv'): os.makedirs(self.curr_path+'fundflow_csv')
        self.ark_fundflow_csv_path = self.curr_path+'fundflow_csv' + '/'

        # self.per_day_data = {
        #     'arkf_cashinout': 'None', 'arkg_cashinout': 'None', 'arkk_cashinout': 'None', 'arkq_cashinout': 'None',
        #     'arkw_cashinout': 'None', 'arkx_cashinout': 'None', 'prnt_cashinout': 'None', 'izrl_cashinout': 'None',
        #     'arkf_close': 'None','arkg_close': 'None','arkk_close': 'None','arkq_close': 'None',
        #     'arkw_close': 'None','arkx_close': 'None','prnt_close': 'None','izrl_close': 'None',
        # }

        self.connect = pymysql.connect(
            host=MYSQL_HOST,
            port=MYSQL_PORT,
            database=MYSQL_DB,
            user=MYSQL_USER,
            password=MYSQL_PASSWD,
            charset=MYSQL_CHARTSET
        )
        # 使用cursor()方法获取操作游标
        self.cursor = self.connect.cursor()

        self.chrome_options = Options()  # 模拟器设置
        self.chrome_options.add_argument('start-maximized')  # 指定浏览器分辨率
        self.chrome_options.add_argument('--disable-gpu')  # 谷歌文档提到需要加上这个属性来规避bug
        self.chrome_options.add_argument('--hide-scrollbars')  # 隐藏滚动条, 应对一些特殊页面
        self.chrome_options.add_argument('blink-settings=imagesEnabled=false')  # 不加载图片, 提升速度
        self.chrome_options.add_argument('-headless')  # 无界面设置
        self.chrome_options.add_experimental_option('excludeSwitches', ['enable-automation'])
        self.chrome_options.add_experimental_option('useAutomationExtension', False)
        self.chrome_options.add_argument('--ignore-certificate-errors')
        self.chrome_options.add_argument(
            "user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36")

        if platform.system().lower() == 'windows':
            # print("windows")
            self.driver = webdriver.Chrome(chrome_options=self.chrome_options,
                                           executable_path='C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe')
        elif platform.system().lower() == 'linux':
            # print("linux")
            self.driver = webdriver.Chrome(chrome_options=self.chrome_options,
                                           executable_path='/usr/local/bin/chromedriver')
    def log_process(self):
        if not os.path.exists(self.curr_path+'logs'): os.makedirs(self.curr_path+'logs')

        log_path=self.curr_path+'logs/' + Path(__file__).name.replace('.py', '') + '_%s.log' % self.start_date_str
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)  # or whatever
        handler = logging.FileHandler(log_path, 'w', 'utf-8')  # or whatever
        handler.setFormatter(logging.Formatter('%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'))  # or whatever
        logger.addHandler(handler)

        return logger

    @staticmethod
    def get_list(date):
        return datetime.datetime.strptime(date, "%Y-%m-%d").timestamp()

    def update_date(self):
        """从ark_holdings取最新的5个日期，在ark_fundvaluecashintout新增不存在日期数据条"""
        # 从ark_holdings取最新的5个日期
        sql_get_holedate = "SELECT DISTINCT date from ark_holdings ORDER BY date Desc LIMIT 7;"
        self.cursor.execute(sql_get_holedate)
        self.ark_holdings_date = self.cursor.fetchall()
        self.ark_holdings_date = [t[0] for t in self.ark_holdings_date]
        self.ark_holdings_date = self.ark_holdings_date[3:]  # 存ark_holdings最新的第4-7个日期
        # print('==self.ark_holdings_date',self.ark_holdings_date)
        self.logger.info('==self.ark_holdings_date=%s', str(self.ark_holdings_date))

        # 在ark_fundvaluecashintout新增不存在日期数据条
        sql_get_fundvalue_date = "SELECT DISTINCT date from ark_fundvaluecashintout ORDER BY date Desc LIMIT 7;"
        self.cursor.execute(sql_get_fundvalue_date)
        self.ark_fundvalue_date = self.cursor.fetchall()
        self.ark_fundvalue_date = [t[0] for t in self.ark_fundvalue_date]

        self.logger.info('==self.ark_fundvalue_date:%s', str(self.ark_fundvalue_date))

        ark_holdings_date_set = set(self.ark_holdings_date)
        ark_fundvalue_date_set = set(self.ark_fundvalue_date)

        self.ark_holdings_date_alone = ark_holdings_date_set - ark_fundvalue_date_set
        self.ark_holdings_date_alone = list(self.ark_holdings_date_alone)
        # 日期排序
        self.ark_holdings_date_alone = sorted(self.ark_holdings_date_alone, key=lambda date: self.get_list(date))
        # self.ark_holdings_date_alone.sort()
        # print('==self.ark_holdings_date_alone=',self.ark_holdings_date_alone)
        self.logger.info('==self.ark_holdings_date_alone=%s', self.ark_holdings_date_alone)
        for new_date in self.ark_holdings_date_alone:
            try:
                sql = "insert into ark_fundvaluecashintout(date) values(%s)"
                self.cursor.execute(sql, (new_date))
                self.connect.commit()
            except Exception as e:
                self.logger.error(e)
                self.logger.warning('===ark_fundvaluecashintout新增日期数据%s失败===', new_date)

        self.altogether_date = list(set(self.ark_holdings_date + self.ark_fundvalue_date))  # 这次所有涉及的日期，存储这些日期的数据
        # 日期排序
        self.altogether_date = sorted(self.altogether_date, key=lambda date: self.get_list(date))
        self.logger.info('===self.altogether_date=%s===',self.altogether_date)


    def run(self):
        self.update_date()

        ark_fundflow_html_files = os.listdir(self.ark_fundflow_html_path)
        etfdb_html_files_today_ark_type = []
        nasdaq_html_files_today_ark_type = []
        for file in ark_fundflow_html_files:
            if re.findall(self.start_date_str, file):
                if re.findall('etfdb', file):
                    etfdb_html_files_today_ark_type.append(file[6:10])
                elif re.findall('nasdaq', file):
                    nasdaq_html_files_today_ark_type.append(file[7:11])

        crawl_etfdb_ark_type_list=list(set(self.ark_list)-set(etfdb_html_files_today_ark_type))
        if crawl_etfdb_ark_type_list:
            self.crawl_etfdb(crawl_etfdb_ark_type_list)#抓取数据，并备份

        self.update_etfdb() #更新当日的etfdb的数据

        crawl_nasdaq_ark_type_list=list(set(self.ark_list)-set(nasdaq_html_files_today_ark_type))
        if crawl_nasdaq_ark_type_list:
            self.crawl_nasdaq(crawl_nasdaq_ark_type_list)

        self.update_nasdaq()

        for process_date in self.altogether_date:
            for ark_type in self.ark_list:
                self.insert_cashinout(process_date,ark_type)
                self.caculate_fundvalue(process_date,ark_type)
                self.insert_close(process_date,ark_type)

        self.driver.close()

    def crawl_etfdb(self,crawl_etfdb_ark_type_list):
        #爬取当日数据
        #https://etfdb.com/etf/ 的数据爬取
        for ark_type in crawl_etfdb_ark_type_list:
            url = 'https://etfdb.com/etf/' + ark_type
            file_html_path=self.ark_fundflow_html_path+'etfdb'+"_"+ark_type+'_'+self.start_date_str+'.txt'

            self.driver.implicitly_wait(10)  # 隐性等待，最长等10秒
            self.driver.get(url)
            html_text = self.driver.page_source
            with open(file_html_path, 'w', encoding='utf-8') as f:
                f.write(html_text)
                self.logger.info('=====etfdb %s抓取当日更新数据成功====', ark_type)

    def update_etfdb(self):
        """根据当日抓到的etfdb数据更新etfdb数据库数据"""
        for ark_type in self.ark_list:
            file_html_path = self.ark_fundflow_html_path + 'etfdb' + "_" + ark_type + '_' + self.start_date_str + '.txt'

            if os.path.exists(file_html_path):
                with open(file_html_path, 'r', encoding='utf-8') as f:
                    html_text=f.read()
                    self.logger.info('===从文件中读取etfdb的json数据 %s %s成功===', ark_type, self.start_date_str)
                    # 正则筛选
                    pattern = re.compile('\[\d{13}.*?\]', re.S)
                    fundflow_str = pattern.findall(html_text)

                    date_fundflow_dict = {}  # 类型str:float
                    for oneday_fundflow_str in fundflow_str:
                        # 将1414972800000改为date格式
                        date = time.strftime("%Y-%m-%d", time.localtime(int(oneday_fundflow_str[1:14]) / 1000))
                        # 将0.0改为float格式，并将单位改为美元，之前是百万美元
                        fundflow = float(oneday_fundflow_str[16:-1]) * 1000000000
                        date_fundflow_dict[date] = fundflow

                    sql_select='select date from etfdb;'
                    self.cursor.execute(sql_select)
                    etfdb_date_list=self.cursor.fetchall()
                    etfdb_date_list = [t[0] for t in etfdb_date_list]

                    for date,fundflow in date_fundflow_dict.items():
                        if date not in etfdb_date_list:
                            sql_insert="insert into etfdb(date) VALUES('%s');"%(date)
                            self.cursor.execute(sql_insert)

                        sql_select_fundflow = "select %s from etfdb where date='%s';" % (ark_type, date)
                        self.cursor.execute(sql_select_fundflow)
                        select_fundflow = self.cursor.fetchone()[0]
                        if select_fundflow ==None:
                            #数据插入数据库
                            sql_update = "UPDATE etfdb set %s=%s where date='%s';"%(ark_type,fundflow,date)
                            self.cursor.execute(sql_update)
                            self.connect.commit()
            else:
                self.logger.warning('===%s %s 数据没有抓取到，没有存储的HTML文件===',ark_type,self.start_date_str)

    def insert_cashinout(self,process_date,ark_type):
        """
        fundflow（cashinout），etfdb.com抓取，日期从近到远，取5个数据，
        如果date存在，而且这个数据为null，则update，如果没找到对应的date，不做处理
        """
        sql_check="select %s from ark_fundvaluecashintout where date='%s'"%(ark_type + "_cashinout",process_date)
        self.cursor.execute(sql_check)
        date_fundflow=self.cursor.fetchone()[0]
        if date_fundflow is None:
            sql_select_etfdb="select %s from etfdb where date='%s';"%(ark_type,process_date)
            self.cursor.execute(sql_select_etfdb)
            new_date_fundflow=self.cursor.fetchone()[0]

            sql = "UPDATE ark_fundvaluecashintout set %s=%s where date ='%s';"%(ark_type + '_cashinout',new_date_fundflow,process_date)
            try:
                # 执行sql语句
                self.cursor.execute(sql)
                # 提交到数据库执行
                self.connect.commit()
                self.logger.info('%s_cashinout数据更新成功！！！',ark_type)
            except Exception as e:
                self.logger.error('%s_cashinout数据更新失败...',ark_type)
                self.logger.error(e)
                # 如果发生错误则回滚
                self.connect.rollback()

            # update 加总combined_cashinout资金申赎数据
            sql = "UPDATE ark_fundvaluecashintout set combined_cashinout=if(arkf_cashinout is null,0,arkf_cashinout)" \
                  "+if(arkg_cashinout is null,0,arkg_cashinout)" \
                  "+if(arkk_cashinout is null,0,arkk_cashinout)" \
                  "+if(arkq_cashinout is null,0,arkq_cashinout)" \
                  "+if(arkw_cashinout is null,0,arkw_cashinout)" \
                  "+if(arkx_cashinout is null,0,arkx_cashinout)" \
                  "+if(prnt_cashinout is null,0,prnt_cashinout)" \
                  "+if(izrl_cashinout is null,0,izrl_cashinout)" \
                  "where date=\'" + process_date + "\'"
            try:
                # 执行sql语句
                self.cursor.execute(sql)
                # 提交到数据库执行
                self.connect.commit()
                self.logger.info('cashinout_combined数据更新成功！！！')
            except Exception as e:
                self.logger.error('cashinout_combined数据更新失败...')
                self.logger.error(e)
                # 如果发生错误则回滚
                self.connect.rollback()

    def caculate_fundvalue(self,process_date,ark_type):
        """
        fundvalue，通过ark_holdings表计算，日期从近到远，取5个数据，
        如果date存在，而且这个数据为null，则update，如果没找到对应的date，不做处理
        """
        sql_check="select %s from ark_fundvaluecashintout where date='%s'"%(ark_type + "_fundvalue",process_date)
        self.cursor.execute(sql_check)
        fundvalue=self.cursor.fetchone()[0]
        if fundvalue is None:
            sql = "select sum(value_usd) from (select * from ark_holdings where date=\'" + process_date + "\' and fund=\'" + ark_type.upper() + "\')t"
            # print(sql) #检查正确select sum(value_usd) from (select * from ark_holdings where date='2021-05-06' and fund='ARKF')t
            try:
                # 执行SQL语句
                self.cursor.execute(sql)
                # 获取对应日期记录列表datercdnum
                sumcddatefundvaluenum = self.cursor.fetchone()[0]
                # print(cddatefundvaluenum) #检查结果正确 3901068215.71875

                # update mysql相应ETF对应日期资金规模数据
                sql = "UPDATE ark_fundvaluecashintout set " + ark_type + "_fundvalue=" + str(
                    sumcddatefundvaluenum) + " where date=\'" + process_date + "\'"
                try:
                    # 执行sql语句
                    self.cursor.execute(sql)
                    # 提交到数据库执行
                    self.connect.commit()
                    self.logger.info('%s_fundvalue更新成功',ark_type)
                except Exception as e:
                    # 如果发生错误则回滚
                    self.connect.rollback()
                    self.logger.error('%s_fundvalue更新失败',ark_type)
                    self.logger.error(e)
                    self.logger.error(sql)
            except Exception as e:
                self.logger.error("Error: unable to fetch data")
                self.logger.error(e)

            # update 加总combined_cashinout资金申赎数据
            sql = "UPDATE ark_fundvaluecashintout set combined_fundvalue=if(arkf_fundvalue is null,0,arkf_fundvalue)" \
                  "+if(arkg_fundvalue is null,0,arkg_fundvalue)" \
                  "+if(arkk_fundvalue is null,0,arkk_fundvalue)" \
                  "+if(arkq_fundvalue is null,0,arkq_fundvalue)" \
                  "+if(arkw_fundvalue is null,0,arkw_fundvalue)" \
                  "+if(arkx_fundvalue is null,0,arkx_fundvalue)" \
                  "+if(prnt_fundvalue is null,0,prnt_fundvalue)" \
                  "+if(izrl_fundvalue is null,0,izrl_fundvalue)" \
                  "where date=\'" + process_date + "\'"
            try:
                # 执行sql语句
                self.cursor.execute(sql)
                # 提交到数据库执行
                self.connect.commit()
                self.logger.info('combined_fundvalue更新成功！！！')
            except Exception as e:
                # 如果发生错误则回滚
                self.connect.rollback()
                self.logger.error('combined_fundvalue更新失败...')
                self.logger.error(e)

    def crawl_nasdaq(self,crawl_nasdaq_ark_type_list):
        """
        https://www.nasdaq.com/market-activity/funds-and-etfs/arkk/historical
        https://api.nasdaq.com/api/quote/watchlist?symbol=arkx%7cstocks&symbol=arkf%7cetf&symbol=arkw%7cetf&symbol=izrl%7cetf&symbol=prnt%7cetf&symbol=arkq%7cetf&symbol=arkg%7cetf&symbol=arkk%7cetf&type=Rv
        """
        today_date = str(datetime.datetime.today().date())
        for ark_type in crawl_nasdaq_ark_type_list:
            if ark_type == 'arkx':
                url = 'https://api.nasdaq.com/api/quote/ARKX/historical?assetclass=stocks&fromdate=2021-05-03&limit=18&todate=' + today_date
            else:
                url = 'https://api.nasdaq.com/api/quote/' + ark_type.upper() + '/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=' + today_date

            file_html_path = self.ark_fundflow_html_path + 'nasdaq' + "_" + ark_type + '_' + self.start_date_str + '.txt'

            try:
                self.driver.implicitly_wait(10)  # 隐性等待，最长等10秒
                self.driver.get(url)
                datas = self.driver.find_element_by_tag_name('pre').text
                json_data = json.loads(datas)
                data_data=json_data['data']
                if data_data!=None:
                    with open(file_html_path, 'w', encoding='utf-8') as f:
                        f.write(datas)
                    self.logger.info('=====%s抓取%s更新数据成功====', ark_type, self.start_date_str)
                else:
                    #带有今天日期的链接请求数据失败，用带有昨天日期的链接请求数据
                    today = datetime.datetime.today()
                    # 昨天
                    yesterday = today - datetime.timedelta(days=1)
                    yesterday_str = str(yesterday.date())
                    if ark_type == 'arkx':
                        yesterday_url = 'https://api.nasdaq.com/api/quote/ARKX/historical?assetclass=stocks&fromdate=2021-05-03&limit=18&todate=' + yesterday_str
                    else:
                        yesterday_url = 'https://api.nasdaq.com/api/quote/' + ark_type.upper() + '/historical?assetclass=etf&fromdate=2021-05-03&limit=18&todate=' + yesterday_str

                    try:
                        self.driver.implicitly_wait(10)  # 隐性等待，最长等10秒
                        self.driver.get(yesterday_url)
                        datas = self.driver.find_element_by_tag_name('pre').text
                        json_data = json.loads(datas)
                        data_data = json_data['data']
                        if data_data != None:
                            with open(file_html_path, 'w', encoding='utf-8') as f:
                                f.write(datas)
                            self.logger.info('=====%s抓取%s更新数据成功====', ark_type, yesterday_str)
                    except Exception as e:
                        self.logger.error(e)
                        self.logger.error('===%s %s nasdaq数据抓取失败', ark_type, yesterday_str)
            except Exception as e:
                self.logger.error(e)
                self.logger.error('===%s %s nasdaq数据抓取失败',ark_type,today_date)

    def update_nasdaq(self):
        for ark_type in self.ark_list:
            file_html_path = self.ark_fundflow_html_path + 'nasdaq' + "_" + ark_type + '_' + self.start_date_str + '.txt'

            if os.path.exists(file_html_path):
                with open(file_html_path, 'r', encoding='utf-8') as f:
                    fin_data=f.read()
                    self.logger.info('===从文件中读取nasdaq的json数据 %s %s成功===', ark_type, self.start_date_str)

                    json_data = json.loads(fin_data)
                    row_data = json_data['data']['tradesTable']['rows']
                    date_closevalue_dict = {}
                    for each_data in row_data:
                        date = each_data['date']
                        new_date = str(datetime.datetime.strptime(date, '%m/%d/%Y').date())
                        close_value = each_data['close'].replace('$', '')
                        date_closevalue_dict[new_date] = close_value

                    select_nasdaq="select date from nasdaq;"
                    self.cursor.execute(select_nasdaq)
                    nasdaq_date_list = self.cursor.fetchall()
                    nasdaq_date_list = [t[0] for t in nasdaq_date_list]

                    for date,close_value in date_closevalue_dict.items():
                        if date not in nasdaq_date_list:
                            sql_insert="insert into nasdaq(date) VALUES('%s');"%(date)
                            self.cursor.execute(sql_insert)

                        sql_select_close = "select %s from nasdaq where date='%s';" % (ark_type, date)
                        self.cursor.execute(sql_select_close)
                        select_close = self.cursor.fetchone()[0]
                        if select_close ==None:
                            #数据插入数据库
                            sql_update = "UPDATE nasdaq set %s=%s where date='%s';"%(ark_type,close_value,date)
                            self.cursor.execute(sql_update)
                            self.connect.commit()

            else:
                self.logger.error('===nasdaq的json数据备份文件中不存在 %s %s 的备份===', ark_type, self.start_date_str)

    def insert_close(self,process_date,ark_type):
        sql_check = "select %s from ark_fundvaluecashintout where date='%s'" % (ark_type + '_close', process_date)
        self.cursor.execute(sql_check)
        select_close = self.cursor.fetchone()[0]
        if select_close is None:
            sql_select_nasdaq="select %s from nasdaq where date='%s'"%(ark_type,process_date)
            self.cursor.execute(sql_select_nasdaq)
            new_select_close = self.cursor.fetchone()[0]


            sql_update = "UPDATE ark_fundvaluecashintout set %s=%s where date='%s';" % (ark_type + '_close', new_select_close, process_date)
            self.cursor.execute(sql_update)
            self.connect.commit()
            self.logger.info('==%s:%s_close->%s数据更新成功', process_date, ark_type, new_select_close)

            # update 加权平均combined_close资金申赎数据
            sql_close_combined = """update ark_fundvaluecashintout set combined_close=(if(arkf_close is null,0,arkf_close)*if(arkf_fundvalue is null,0,arkf_fundvalue)
                                                        +if(arkg_close is null,0,arkg_close)*if(arkg_fundvalue is null,0,arkg_fundvalue)
                                                        +if(arkk_close is null,0,arkk_close)*if(arkk_fundvalue is null,0,arkk_fundvalue)
                                                        +if(arkq_close is null,0,arkq_close)*if(arkq_fundvalue is null,0,arkq_fundvalue)
                                                        +if(arkw_close is null,0,arkw_close)*if(arkw_fundvalue is null,0,arkw_fundvalue)
                                                        +if(arkx_close is null,0,arkx_close)*if(arkx_fundvalue is null,0,arkx_fundvalue)
                                                        +if(prnt_close is null,0,prnt_close)*if(prnt_fundvalue is null,0,prnt_fundvalue)
                                                        +if(izrl_close is null,0,izrl_close)*if(izrl_fundvalue is null,0,izrl_fundvalue))
                                                        /(
                                                            if(arkf_fundvalue is null,0,arkf_fundvalue)
                                                            +if(arkg_fundvalue is null,0,arkg_fundvalue)
                                                            +if(arkk_fundvalue is null,0,arkk_fundvalue)
                                                            +if(arkq_fundvalue is null,0,arkq_fundvalue)
                                                            +if(arkw_fundvalue is null,0,arkw_fundvalue)
                                                            +if(arkx_fundvalue is null,0,arkx_fundvalue)
                                                            +if(prnt_fundvalue is null,0,prnt_fundvalue)
                                                            +if(izrl_fundvalue is null,0,izrl_fundvalue)) where date='%(date)s';
                                    """ % dict(date=process_date)
            try:
                # 执行sql语句
                self.cursor.execute(sql_close_combined)
                # 提交到数据库执行
                self.connect.commit()
                self.logger.info('close_combined数据更新成功！！！')

            except Exception as e:
                self.logger.error('close_combined数据更新失败...')
                self.logger.error(e)
                # 如果发生错误则回滚
                self.connect.rollback()

    def __del__(self):
        self.cursor.close()
        self.connect.close()

if __name__=='__main__':
    fundflow_spider=ark_fundflow_spider(is_crawl=False,log_rank=logging.INFO)
    fundflow_spider.run()




