from pathlib import Path
import json
from simple_util import SUtil
from src.AkshareSpider import AkshareSpider
from src.ExchangeFileSpider import ExchangeFileSpider
from src.exts import TestLogger
from src.mxtf_api import MxtfApi
from datetime import datetime, timedelta,timezone
import pytz

east8 = pytz.timezone('Asia/Shanghai')
fieldMapping={
    'shenwan_industry':{
        "industryCode": "行业代码",
        "industryName": "行业名称",
        "compCount": "成份个数",
        "staticPeRatio": "静态市盈率",
        "ttmPeRatio": "TTM(滚动)市盈率",
        "pbRatio": "市净率",
        "staticDividendYield": "静态股息率",
        "industryLayer": "sw_industry_layer"
    },
    'shenwan_industry_history':{
        "swIndIndexId": "代码",
        "date": "日期",
        "close": "收盘",
        "open": "开盘",
        "high": "最高",
        "low": "最低",
        "volume": "成交量",
        "amount": "成交额"
    },
    'stocks_custom_history':{
        "date": '日期',
        "stockId": '股票代码',
        "open": '开盘',
        "close": '收盘',
        "high":'最高',
        "low":'最低',
        "quantity": '成交量',
        "amount": '成交额',
        "amplitude": '振幅',
        "changePct": '涨跌幅',
        "changeAmmount": '涨跌额',
        "turnover": '换手率'
    },
    'stocks_custom_history_factor':{
        "stockId": 'stock_id',
        'factorType':'factor_type',
        "date": 'date',
        'factorValue':'factor_value'
        },
    'measure_history':{
        "measureEntity": 'measure_entity',
        "measureDate": 'measure_date',
        "stockCode": 'stock_code',
        "corpName": 'corp_name',
        "measureSubtype": 'measure_subtype',
        "measureFileName": 'measure_file_name',
        # "measureFileLocalName": 'measure_file_local_name',
        # "descpAi": '',  # 不是爬下来的字段，ai读完函件后填入
        # "judgeResultAi": '',  # 不是爬下来的字段，ai读完函件后填入
        # "xujiachenshuDateAi": '',  # 不是爬下来的字段，ai读完函件后填入
        # "gengzhengDateAi": '',  # 不是爬下来的字段，ai读完函件后填入
        # "judgeResult": '',  # 不是爬下来的字段，人工读完函件后填入
        # "xujiachenshuDate": '',  # 不是爬下来的字段，人工读完函件后填入
        # "gengzhengDate": '',  # 不是爬下来的字段，人工读完函件后填入
        "measureFileUrl": 'measure_file_url',  # /measure_history/batch_add接口中无该字段，需添加
        "measureTitle": 'measure_title',  # /measure_history/batch_add接口中无该字段，需添加
        "measureTarget": 'measure_target',  # /measure_history/batch_add接口中无该字段，需添加
        "combinedTitle": 'combined_title'
    },
    'stocks':{
        'stockNo':'代码',
        'stockName':'名称',
    },
    'shenwan_history':{
        'symbol':'symbol',
        'startDate':'start_date',
        'industryCode':'industry_code',
        'updateTime':'update_time'
    }
}


class Spider():
    def __init__(self,logger=None):
        self.logger = logger
        if not self.logger:
            self.logger = TestLogger()
        self.api = AkshareSpider()
        self.mxtfapi = MxtfApi()
    def slice_data(self,data,size=1000):
        '''将数据分批提交'''
        for i in range(0,len(data),size):
            yield data[i:i+size]
    def fetch_sw_index_daily(self):
        '''申万行业指数历史行情'''
        self.api.fetch_sw_ind_comp()
        for batchIndexs in self.slice_data(self.api.sw_ind_index_list,1):
            datas = self.api.fetch_sw_ind_index_hist_mini(batchIndexs)
            datas = json.loads(datas)
            self.logger.debug(f"申万行业指数历史行情 开始处理数据:{batchIndexs}")
            for data in self.slice_data(datas):
                newdatas = self.translateFields('shenwan_industry_history',data)
                self.mxtfapi.save_shenwan_index_history(newdatas)
            self.logger.debug(f"申万行业指数历史行情 数据:{batchIndexs} 处理完成")
    @staticmethod
    def convert_timestamp_to_date_only(timestamp):
        # 将时间戳转换为 datetime 对象
        dt = datetime.fromtimestamp(timestamp, tz=east8)
        # 将时分秒设置为0
        date_only_dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
        # 将处理后的 datetime 对象转换回时间戳
        date_only_timestamp = int(date_only_dt.timestamp())
        return date_only_timestamp
    
    def translateFields(self,fieldMapper,datas):
        results = []
        shanghai_tz = pytz.timezone('Asia/Shanghai')
        for data in datas:
            newData= {}
            for key,value in fieldMapping[fieldMapper].items():
                newData[key] = data.get(value)
                if key in ['date','updateTime','startDate']:
                    if newData[key]<0:
                        newData=None
                        break
                    newData[key] = Spider.convert_timestamp_to_date_only(newData[key]/1000)
                elif key=='measureDate':
                    if newData[key]==None:
                        newData[key] = 0
                    else:
                        date_object = datetime.strptime(newData[key], '%Y-%m-%d')
                        date_object = shanghai_tz.localize(date_object)
                        timestamp = int(date_object.timestamp())
                        newData[key] = timestamp
            if newData:
                results.append(newData)
        return results
    def fetch_hist_sw_ind_comp(self):
        '''申万行业分类'''
        datas = self.api.fetch_sw_ind_comp()
        datas = json.loads(datas)
        for data in self.slice_data(datas):
            newdatas = self.translateFields('shenwan_industry',data)
            
            self.mxtfapi.save_shenwan_industry(newdatas)
    def fetch_sw_ind(self):
        '''申万索引表'''
        datas = self.api.fetch_sw_ind_comp_change_hist()
        datas = json.loads(datas)
        for data in self.slice_data(datas):
            newdatas = self.translateFields('shenwan_history',data)
            
            self.mxtfapi.save_shenwan_history(newdatas)
    def fetch_overall_market_trend(self):
        '''获取大盘指数历史行情数据，上证综指、深证成指和沪深300'''
        lastRecords = self.mxtfapi.overall_market_trend_Lastest_Record()
        symbols = {}
        if lastRecords.get('data'):
            symbols = {record['symbol']:datetime.fromtimestamp(record['date'],tz=east8) for record in lastRecords.get('data')}
        default_start = datetime.strptime('2010-01-01', '%Y-%m-%d')
        symbols_all =  ['sh000001','sh000300','sz399001']
        for symbol in symbols_all:
            startDate = symbols.get(symbol)
            if not startDate:
                startDate = default_start
        
            datas = self.api.fetch_mkt_index_hist(symbol,startDate,datetime.now())
            datas = json.loads(datas)
            for data in self.slice_data(datas):
                for row in data:
                    date_object = datetime.strptime(row['date'], '%Y-%m-%d')
                    timestamp = int(date_object.timestamp())
                    row['date'] = timestamp
                self.mxtfapi.save_overall_market_trend(data)
        self.logger.info("获取大盘指数历史行情数据完成")
    def fetch_stock_daily(self,beginDate,endDate,stocks=None):
        if not stocks:
            stocks = self.api.fetch_stock_ids()
        for stock in stocks:
            datas = self.api.fetch_stock_hist(beginDate,endDate,stock)
            if not datas:
                continue
            datas = json.loads(datas)
            for data in self.slice_data(datas):
                newdatas = self.translateFields('stocks_custom_history',data)
                self.mxtfapi.save_stocks_custom_history(newdatas)
    def is_same_day(self,date1, date2):
        return date1.year == date2.year and date1.month == date2.month and date1.day == date2.day
    
    def fetch_fuquan_daliy(self):
        try:
            self.logger.info(f'获取复权因子')
            fq_datas_str = self.api.fetch_stock_price_adj_factors()
            fq_datas = json.loads(fq_datas_str)
            if fq_datas:
                newdatas = self.translateFields('stocks_custom_history_factor',fq_datas)
                for data in self.slice_data(newdatas,200):
                    self.mxtfapi.stocks_custom_history_factor(data)
        except Exception as e:
            self.logger.error(f'复权因子获取失败： error:{e}')
        
    
    def fetch_fuquan_all(self,stocks=None):
        if not stocks:
            stocks = self.api.fetch_stock_ids()
        for stock in stocks:
            try:
                self.logger.info(f'stock:{stock} 获取复权因子')
                fq_datas_str = self.api.fetch_single_stock_price_adj_factors(stock).to_json(orient='records')
                fq_datas = json.loads(fq_datas_str)
                if fq_datas:
                    newdatas = self.translateFields('stocks_custom_history_factor',fq_datas)
                    self.mxtfapi.stocks_custom_history_factor(newdatas)
            except Exception as e:
                self.logger.error(f'stock:{stock} 复权因子获取失败： error:{e}')
        
    def fetch_stock_daily_all(self,start_date_str,stocks=None):
        if not stocks:
            stocks = self.api.fetch_stock_ids()
        now = datetime.now()
        now = east8.localize(now)
        todayStamp = int(now.timestamp())
        default_start = datetime.strptime(start_date_str, '%Y-%m-%d')
        default_start = east8.localize(default_start)
        for stock in stocks:
            try:
                # 首先获取stocks表中的fetchdate数据
                lastFetch = self.mxtfapi.getSocktLastFetchDate(stock)
                start_date = None
                if lastFetch.get('data') and lastFetch.get('data').get('lastFetchDate'):
                    start_date = lastFetch.get('data').get('lastFetchDate')
                if not start_date:
                    # 如果表中没有数据，第一次导入，用默认传入时间
                    self.logger.info('stock:'+stock+' begin:'+start_date_str)
                    start_date = default_start
                else:
                    start_date = datetime.fromtimestamp(start_date,tz=east8)
                    # 如果start_date小于默认时间，则用默认时间
                    if start_date<default_start:
                        start_date = default_start
                    self.logger.info(f'stock:{stock} lastRecord:{start_date}')
                start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
                while start_date <= now:
                    end_date = start_date + timedelta(days=30)
                    data_str = self.api.get_stock_day_record(stock,start_date,end_date)
                    datas = json.loads(data_str)
                    if datas:
                        self.logger.debug(f'stock:{stock} start:{start_date} end:{end_date} {len(data_str)}')
                        newdatas = self.translateFields('stocks_custom_history',datas)
                        self.mxtfapi.save_stocks_custom_history(newdatas)
                    
                    start_date = end_date
                self.mxtfapi.updateSocktLastFetchDate(stock,todayStamp)
                self.logger.info(f'stock:{stock} 扫描完成:{now}')
            except Exception as e:
                self.logger.error(f'stock:{stock} error:{e}')
                
    def generate_day(self,beginYear,endYear):
        current_year = beginYear
        while current_year <= endYear:
            current_month = 1
            while current_month <= 12:
                first_day_of_month = datetime(current_year, current_month, 1)
                if current_month == 12:
                    last_day_of_month = datetime(current_year + 1, 1, 1) - timedelta(days=1)
                else:
                    last_day_of_month = datetime(current_year, current_month + 1, 1) - timedelta(days=1)
                
                start_date = first_day_of_month.strftime('%Y%m%d')
                end_date = last_day_of_month.strftime('%Y%m%d')
                yield start_date,end_date
                
                current_month += 1
            current_year += 1
    def fetch_measure_history(self,stockCode=None,exchange='sz'):
        self.logger.info(f'爬监管数据:{exchange}-{stockCode}')
        exchange_file_spider = ExchangeFileSpider(model='linux', updatedb=False)
        datas = exchange_file_spider.get_exchange_file(exchange)
        if not datas:
            return
        datas = json.loads(datas)
        # measure_indexs = json.loads(datas['measure_index'])
        # {'stock_code': '600095', 'corp_name': '湘财股份', 'measure_subtype': '监管工作函', 'measure_title': '关于湘财股份有限公司媒体报道有关事项的监管工作函', 'measure_target': '上市公司,董事,监事,高级管理人员', 'measure_date': '2024-12-06', 'measure_file_url': 'None', 'measure_file_name': 'None', 'measure_entity': 'sh', 'measure_type': 'jgcs', 'measure_file_local_name': 'None__20241209153811803400.None'}
        lastest_measure = self.mxtfapi.get_latest_measure(exchange)
        lastest_date = 0
        if lastest_measure.get('data') and lastest_measure.get('data').get('measureDate'):
            lastest_date = lastest_measure.get('data').get('measureDate')
        for data in self.slice_data(datas,100):
            newdatas = self.translateFields('measure_history',data)
            # postDatas=[]
            for d in newdatas:
                try:
                    d['stockCode'] = str(d['stockCode']).replace('.0','').zfill(6)
                    if stockCode:
                        if d['stockCode'] !=stockCode:
                            continue
                    if d['measureDate']<=lastest_date:
                        continue
                    # 处理交易所处罚记录
                    new_d = exchange_file_spider.process_exchange_file_data(d)
                    if new_d:
                        res = self.mxtfapi.addMeasureHistorySingle(new_d)
                        self.logger.debug(f'save_single_measure_history to mxtf:{res}')
                    # postDatas.append(new_d)
                except Exception as e:
                    self.logger.error(f"fetch_measure_history {d} error :{e}")
            # if postDatas:
            #     res = self.mxtfapi.save_measure_history(postDatas)
            #     self.logger.debug(f'save_measure_history to mxtf:{res}')
    def fetch_all_measure_history(self):
        # for start_date,end_date in self.generate_day(2022,2024):
        #     self.fetch_measure_history(int(start_date),int(end_date))
        self.fetch_measure_history(exchange='sz')
        self.fetch_measure_history(exchange='sh')
    def fetch_stocks(self):
        datas = self.api.fetch_stocks()
        datas = json.loads(datas)
        for data in self.slice_data(datas,100):
            data = self.translateFields('stocks',data)
            '''
            '序号': 1, 
            '代码': '920098', 
            '名称': '科隆新材', 
            '最新价': 54.84, 
            '涨跌幅': 29.98, 
            '涨跌额': 12.65, 
            '成交量': 89128.0, 
            '成交额': 439419233.84, 
            '振幅': 27.73, 
            '最高': 54.84, 
            '最低': 43.14, 
            '今开': 43.94, 
            '昨收': 42.19, 
            '量比': 0.72, 
            '换手率': 62.28, 
            '市盈率-动态': 47.46, 
            '市净率': 5.03, 
            '总市值': 4336219036.0, 
            '流通市值': 784856535.0, 
            '涨速': 0.0, 
            '5分钟涨跌': 0.0, 
            '60日涨跌幅': 291.71, 
            '年初至今涨跌幅': 291.71'''
            self.mxtfapi.save_stocks(data)
    def init_regional_data(self):
        mxtfapi = MxtfApi()
        current_directory = Path(__file__).resolve().parent
        root_directory = current_directory.parent
        # 构建文件的完整路径
        file_path = f'{root_directory}/doc/pca-code (1).json'
        datas = json.load(open(file_path))

        def add_ind(data,parent_code,uni_code,level):
            return {
                "code":data.get('code'),
                "name":data.get('name'),
                "parentCode":parent_code,
                "level":level,
                "uniCode":uni_code
            }
            # mxtfapi.batch_add_regional(ind)

        indatas = []

        for data in datas:
            code = data.get('code')
            indatas.append(add_ind(data,"",code,1))
            if data.get('children'):
                for child in data.get('children'):
                    child_code = child.get('code')
                    child_code_uni = code+'-'+child.get('code')
                    indatas.append(add_ind(child,code,child_code_uni,2))
                    if child.get('children'):
                        for grandchild in child.get('children'):
                            grandchild_code = child_code_uni+'-'+grandchild.get('code')
                            indatas.append(add_ind(grandchild,child_code,grandchild_code,3))
                            
        for pdata in SUtil.slice_data(indatas,200):
            result = mxtfapi.batch_add_regional(pdata)
            self.logger.info(f"save_regional_data to mxtf:{result}")

        
if __name__ == '__main__':
    spider = Spider()
    # spider.fetch_sw_index_daily()
    # spider.fetch_hist_sw_ind_comp()
    # spider.fetch_stock_daily()
    # spider.fetch_stock_daily_all()
    # spider.fetch_a_index_daily()
    # spider.fetch_all_measure_history()
    # spider.fetch_stocks()