# _*_ coding:utf-8 _*_
# @Time  : 2022.12.23
# @Author: zizlee

# 广州期货交易所数据
import datetime
import json
import pathlib
import time

import pandas as pd
import requests
from zizlee_position import get_dominant_price_position, get_contract_price_position
from urllib3 import disable_warnings

disable_warnings()

pd.set_option('display.max_columns', None)
pd.set_option('display.unicode.ambiguous_as_wide', True)
pd.set_option('display.unicode.east_asian_width', True)
# 不换行显示
pd.set_option('display.width', 1000)

USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) " \
             "Chrome/78.0.3904.108 Safari/537.36"


class DailyQuoteSpider(object):
    VARIETIES = ['si', 'lc']

    def __init__(self, date: datetime.datetime):
        self.quote_date = datetime.datetime.strptime(date.strftime('%Y-%m-%d'), '%Y-%m-%d')
        self.quote_date_string = self.quote_date.strftime('%Y%m%d')
        self.headers = {'User-Agent': USER_AGENT,
                        'Host': 'www.gfex.com.cn',
                        'Origin': 'http://www.gfex.com.cn'}
        self.quote_url = "http://www.gfex.com.cn/u/interfacesWebTiDayQuotes/loadList"
        self.quote_file = pathlib.Path("gfex/daily/{}.json".format(self.quote_date_string))

        self.rank_url = "http://www.gfex.com.cn/u/interfacesWebTiMemberDealPosiQuotes/loadList"
        self.rank_folder = pathlib.Path("gfex/rank/{}/".format(self.quote_date_string))

        self.receipt_url = "http://www.gfex.com.cn/u/interfacesWebTdWbillWeeklyQuotes/loadList"
        self.receipt_file = pathlib.Path("gfex/receipt/{}.json".format(self.quote_date_string))
        if not self.rank_folder.exists():
            self.rank_folder.mkdir(parents=True)
        if not pathlib.Path('gfex/receipt/').exists():
            pathlib.Path('gfex/receipt/').mkdir(parents=True)

    def get_quote_file(self):
        form_params = {
            'trade_date': self.quote_date_string,
            'trade_type': 0
        }
        r = requests.post(self.quote_url, headers=self.headers, data=form_params)
        with open(self.quote_file, 'w', encoding='utf8') as fp:
            json.dump(r.json(), fp, indent=2, ensure_ascii=False)
        print('获取{}.GFE日行情数据完成.'.format(self.quote_date_string))

    def get_variety_contract(self, variety_en):
        url = f"https://210.13.218.130:9000/v1/ruizy/variety/contract/?v={variety_en}"
        r = requests.get(url, verify=False)
        return r.json()['data']

    def get_rank_file(self):
        # 读取品种
        # 请求品种的合约
        # 发送数据请求
        # 保存原始数据
        for vn in self.VARIETIES:
            contracts = self.get_variety_contract(vn.upper())
            # if vn == 'lc':
            #     contracts = [
            #         {'scontract': '2401'},
            #         {'scontract': '2402'},
            #         {'scontract': '2403'},
            #         {'scontract': '2404'},
            #         {'scontract': '2405'},
            #         {'scontract': '2406'},
            #         {'scontract': '2407'},
            #     ]
            if vn in PASS_VARIETY:
                continue
            for cnt in contracts:
                proxy_index = -1

                proxy = None
                for dt in [1, 2, 3]:  # 1.成交量 2.持买单量 3.卖单量
                    form_params = {
                        'trade_date': self.quote_date_string,
                        'trade_type': 0,
                        'variety': f'{vn}',
                        'contract_id': f'{vn}{cnt["scontract"]}',
                        'data_type': dt
                    }
                    try:
                        r = requests.post(self.rank_url, headers=self.headers, data=form_params)
                        if r.status_code != 200:
                            raise ValueError('响应出错：{}'.format(r.status_code))
                    except Exception as e:
                        proxy_index += 1
                        proxy = PROXY_LIST[proxy_index]
                        print('更换代理:', proxy)
                        r = requests.post(self.rank_url, headers=self.headers, data=form_params, proxies=proxy)
                        print(r.status_code)
                    else:
                        data = r.json()
                        if not data['data']:
                            print(f'{self.quote_date_string}.{vn}.{cnt["scontract"]}_{dt} 没有持仓数据')
                            break
                        # 将数据保存下来, 文件名：品种.合约_[1/2/3].json
                        json_file = self.rank_folder.joinpath(f'{vn}.{cnt["lcontract"]}_{dt}.json')
                        with open(json_file, 'w', encoding='utf8') as cf:
                            json.dump(data, cf, indent=2, ensure_ascii=False)
                        print(f'保存{self.quote_date_string}.{vn}.{cnt["lcontract"]}_{dt}的持仓数据成功.')
                time.sleep(1)

    def get_receipt_file(self):
        r = requests.post(self.receipt_url, headers=self.headers, data={'gen_date': self.quote_date_string})
        with open(self.receipt_file, 'w', encoding='utf8') as fp:
            json.dump(r.json(), fp=fp, ensure_ascii=False, indent=2)
        print('获取{}.GFE日仓单数据完成!'.format(self.quote_date_string))


class DailyQuoteParser(object):

    def __init__(self, date: datetime.datetime):
        self.quote_date = datetime.datetime.strptime(date.strftime('%Y-%m-%d'), '%Y-%m-%d')
        self.quote_date_string = self.quote_date.strftime('%Y%m%d')
        self.quote_file = pathlib.Path("gfex/daily/{}.json".format(self.quote_date_string))
        self.receipt_file = pathlib.Path("gfex/receipt/{}.json".format(self.quote_date_string))

        self.resolution_quote_file = pathlib.Path("resolution/{}/GFE_Quote.json".format(self.quote_date_string))
        self.resolution_rank_file = pathlib.Path("resolution/{}/GFE_Rank.json".format(self.quote_date_string))
        self.resolution_receipt_file1 = pathlib.Path("resolution/{}/GFE_Receipt.json".format(self.quote_date_string))  # 品种仓单
        self.resolution_receipt_file2 = pathlib.Path("resolution/{}/GFE_Receipt2.json".format(self.quote_date_string))  # 仓库仓单

        self.rank_folder = pathlib.Path("gfex/rank/{}/".format(self.quote_date_string))

        resolution_folder = pathlib.Path("resolution/{}/".format(self.quote_date_string))
        if not resolution_folder.exists():
            resolution_folder.mkdir(parents=True)

    def parse_quote_file(self):
        if not self.quote_file.exists():
            print('没有发现{}.GFE的行情文件,请先抓取数据.'.format(self.quote_date_string))
            return
        t_timestamp = int(self.quote_date.timestamp())
        with open(self.quote_file, 'r', encoding='utf8') as reader:
            source_quote = json.load(reader)

        df = pd.DataFrame(source_quote['data'])
        df.rename(columns={
            'varietyOrder': 'variety_en',
            'lastClear': 'pre_settlement',
            'open': 'open_price',
            'high': 'highest',
            'low': 'lowest',
            'close': 'close_price',
            'clearPrice': 'settlement',
            'diff': 'zd_1',
            'diff1': 'zd_2',
            'volumn': 'trade_volume',
            'turnover': 'trade_price',
            'openInterest': 'empty_volume',
            'diffI': 'increase_volume'
        }, inplace=True)
        # 修改并截取需要的数据
        df = df[~df['variety'].str.contains('小计|总计')] # 选取合计以外的行
        df['date'] = t_timestamp
        df['variety_en'] = df['variety_en'].apply(lambda x: x.upper())
        df['contract'] = df['variety_en'] + df['delivMonth']
        # print(df)
        df = df[['date', 'variety_en', 'contract', 'pre_settlement', 'open_price', 'highest',
                 'lowest', 'close_price', 'settlement', 'zd_1', 'zd_2', 'trade_volume',
                 'trade_price', 'empty_volume', 'increase_volume']]
        quote_data = df.to_dict(orient='records')
        print('----- 解析{}.GFE行情文件成功,数量:{} -----'.format(self.quote_date_string, len(quote_data)))
        if len(quote_data) > 1:
            # 保存到本地文件
            with open(self.resolution_quote_file, 'w', encoding='utf8') as fp:
                json.dump(quote_data, fp, indent=2, ensure_ascii=False)

    def parse_rank_file(self):
        # 解析含有的品种合约数据
        contract_list = list(set([f.name.split('_')[0] for f in self.rank_folder.glob('*.json')]))
        rank_data = []
        for cnt in contract_list:
            cnt_df = None
            for dt in [1, 2, 3]:  # 1.成交量 2.持买单量 3.卖单量
                filename = self.rank_folder.joinpath(f'{cnt}_{dt}.json')
                with open(filename, 'r', encoding='utf8') as cf:
                    data = json.load(cf)
                df = pd.DataFrame(data['data'])
                del df['cpFlag']
                del df['memberId']
                if dt == 1:
                    df.rename(columns={'abbr': 'trade_company', 'todayQty': 'trade', 'qtySub': 'trade_increase'},
                              inplace=True)
                    cnt_df = df
                if dt == 2:
                    df.rename(columns={'abbr': 'long_position_company', 'todayQty': 'long_position', 'qtySub': "long_position_increase"},
                              inplace=True)
                    cnt_df = pd.merge(cnt_df, df, on='contractId')
                if dt == 3:
                    df.rename(columns={'abbr': 'short_position_company', 'todayQty': 'short_position', 'qtySub': "short_position_increase"},
                              inplace=True)
                    cnt_df = pd.merge(cnt_df, df, on='contractId')
            # 去掉总计
            cnt_df = cnt_df[~cnt_df['contractId'].str.contains('总计')]
            cnt_df.rename(columns={'contractId': 'rank'}, inplace=True)
            # 加入日期时间戳
            cnt_df['date'] = int(self.quote_date.timestamp())
            # 品种合约数据
            cnt_df['variety_en'] = cnt.split('.')[0].upper()
            cnt_df['contract'] = cnt.upper().replace('.', '')
            cnt_df = cnt_df[['date', 'variety_en', 'contract', 'rank', 'trade_company', 'trade', 'trade_increase',
                             'long_position_company', 'long_position', 'long_position_increase',
                             'short_position_company', 'short_position', 'short_position_increase']]

            rank_data = rank_data + cnt_df.to_dict(orient='records')
        print('----- 解析{}.GFE排名文件成功,数量:{} -----'.format(self.quote_date_string, len(rank_data)))
        if len(rank_data) > 1:
            # 保存到本地文件
            with open(self.resolution_rank_file, 'w', encoding='utf8') as fp:
                json.dump(rank_data, fp, indent=2, ensure_ascii=False)

    def _pre_parse_receipt(self):
        if not self.receipt_file.exists():
            print("没有发现{}.GFE的仓单文件,请先抓取数据!".format(self.quote_date_string))
            return
        with open(self.receipt_file, "r", encoding="utf-8") as reader:
            source_content = json.load(reader)
        day_content = source_content['data']

        receipt = {}
        for row in day_content:
            v = row['varietyOrder'].strip()
            if v and row['whType'] == '2':  # 取仓库仓单
                receipt.setdefault(v, []).append(row)
        return receipt

    def parse_receipt_file(self):
        receipt_data = self._pre_parse_receipt()
        receipt1 = []
        receipt2 = []
        cur_date_ts = int(self.quote_date.timestamp())
        for ven, data_list in receipt_data.items():
            _obj = {
                'date': cur_date_ts,
                'variety_en': ven.upper(),
                'receipt': sum([int(r['wbillQty']) for r in data_list]),
                'increase': sum([int(r['diff']) for r in data_list]),
            }
            receipt1.append(_obj)  # 品种型的仓单
            for w in data_list:
                receipt2.append({
                    'warehouse': w['whAbbr'],
                    'variety_en': ven.upper(),
                    'receipt': w['wbillQty'],
                    'increase': w['diff'],
                    'ex_total': 0,  # 增加ex_total字段 ex_total=0的计入小计
                    'receipt_date': self.quote_date.strftime('%Y-%m-%d')
                })
        # 写入文件
        print('----- 解析{}.GFE仓单文件1成功,数量:{} -----'.format(self.quote_date_string, len(receipt1)))
        if len(receipt1) > 0:
            with open(self.resolution_receipt_file1, 'w', encoding='utf8') as f1:
                json.dump(receipt1, f1, ensure_ascii=False, indent=2)

        print('----- 解析{}.GFE仓单文件2成功,数量:{} -----'.format(self.quote_date_string, len(receipt2)))
        if len(receipt2) > 0:
            # 保存到本地文件
            with open(self.resolution_receipt_file2, 'w', encoding='utf8') as f2:
                json.dump(receipt2, f2, ensure_ascii=False, indent=2)


class DailyQuoteSaver(object):
    SERVER_API = "https://210.13.218.130:9000/api/"

    def __init__(self, date: datetime.datetime):
        self.quote_date = datetime.datetime.strptime(date.strftime('%Y-%m-%d'), '%Y-%m-%d')
        self.quote_date_string = self.quote_date.strftime('%Y%m%d')
        self.resolution_quote_file = pathlib.Path("resolution/{}/GFE_Quote.json".format(self.quote_date_string))
        self.resolution_rank_file = pathlib.Path("resolution/{}/GFE_Rank.json".format(self.quote_date_string))

        self.resolution_receipt_file1 = pathlib.Path("resolution/{}/GFE_Receipt.json".format(self.quote_date_string))  # 品种仓单
        self.resolution_receipt_file2 = pathlib.Path("resolution/{}/GFE_Receipt2.json".format(self.quote_date_string))  # 仓库仓单

        self.quote_save_url = self.SERVER_API + "exchange/gfe/daily/?date={}".format(self.quote_date.strftime("%Y-%m-%d"))
        self.rank_save_url = self.SERVER_API + "exchange/gfe/rank/?date={}".format(self.quote_date.strftime("%Y-%m-%d"))
        self.receipt_save_url = self.SERVER_API + "exchange/gfe/receipt/?date={}".format(self.quote_date.strftime("%Y-%m-%d"))

    # 读取解析好的行情数据
    def read_daily_quote(self):
        if not self.resolution_quote_file.exists():
            raise ValueError('{}.GFE行情数据文件不存在,请先解析保存！'.format(self.quote_date_string))
        # 读取本地文件
        with open(self.resolution_quote_file, 'r', encoding='utf8') as fp:
            quote_data = json.load(fp)
        return quote_data

    # 读取解析好的排名数据
    def read_daily_rank(self):
        if not self.resolution_rank_file.exists():
            raise ValueError('{}.GFE排名数据文件不存在,请先解析保存！'.format(self.quote_date_string))
        # 读取本地文件
        with open(self.resolution_rank_file, 'r', encoding='utf8') as fp:
            rank_data = json.load(fp)
        return rank_data

    def save_daily_quote_single(self):
        quote_data = self.read_daily_quote()
        try:
            r = requests.post(self.quote_save_url, json=quote_data, verify=False)
            print(r.json())
        except Exception as e:
            print('分库分表保存{}.GFE行情数据失败了:{}'.format(self.quote_date_string, e))
        time.sleep(1)

    def save_daily_quote_monolithic(self):
        quote_data = self.read_daily_quote()
        df = pd.DataFrame(quote_data)
        df['quotes_date'] = df['date'].apply(lambda x: datetime.datetime.fromtimestamp(x).strftime('%Y-%m-%d'))
        df.rename(columns={'empty_volume': 'position_volume'}, inplace=True)

        save_url = self.SERVER_API + 'dat/quotes/daily-quotes/'
        try:
            r = requests.post(save_url, json=df.to_dict(orient='records'), verify=False)
            r_data = r.json()
        except Exception as e:
            print('整体表保存{}.GFE行情数据失败了:{}'.format(self.quote_date_string, e))
        else:
            print('整体表保存{}.GFE行情数据成功:{},message:{}'.format(self.quote_date_string, r_data['count'], r_data['message']))
        time.sleep(1)

    def save_daily_rank_signal(self):
        rank_data = self.read_daily_rank()
        try:
            r = requests.post(self.rank_save_url, json=rank_data, verify=False)
            print(r.json())
        except Exception as e:
            print('保存{}.GFE排名数据失败了:{}'.format(self.quote_date_string, e))
        time.sleep(1)

    def save_daily_rank_monolithic(self):
        rank_data = self.read_daily_rank()
        df = pd.DataFrame(rank_data)
        df['rank_date'] = df['date'].apply(lambda x: datetime.datetime.fromtimestamp(x).strftime('%Y-%m-%d'))
        save_url = self.SERVER_API + 'dat/rank/daily-rank/'
        try:
            r = requests.post(save_url, json=df.to_dict(orient='records'), verify=False)
            r_data = r.json()
        except Exception as e:
            print('整体表保存{}.GFE排名数据失败了:{}'.format(self.quote_date_string, e))
        else:
            print('整体表保存{}.GFE排名数据成功:{},message:{}'.format(self.quote_date_string, r_data['count'], r_data['message']))
        time.sleep(1)

    def save_price_position(self):
        # 日行情
        quote_data = self.read_daily_quote()
        quote_df = pd.DataFrame(quote_data)
        quote_df.rename(columns={'date': 'quotes_ts', 'empty_volume': 'position_volume'}, inplace=True)
        # 日排名
        rank_data = self.read_daily_rank()
        rank_df = pd.DataFrame(rank_data)
        rank_df.rename(columns={'date': 'rank_ts'}, inplace=True)
        if quote_df.empty or rank_df.empty:
            print('{}.GFE行情或持仓数据为空!'.format(self.quote_date_string))
            return
        quotes_df = quote_df[
            ['quotes_ts', 'variety_en', 'contract', 'close_price', 'trade_volume', 'position_volume']]
        rank_df = rank_df[['rank_ts', 'variety_en', 'contract', 'long_position', 'short_position']]
        dominant_df = get_dominant_price_position(quotes_df.copy(), rank_df.copy())
        # 获取合约的持仓数据
        contract_df = get_contract_price_position(quotes_df.copy(), rank_df.copy())
        final_df = pd.concat([dominant_df, contract_df])
        for col in ['close_price', 'position_price', 'position_volume', 'long_position', 'short_position']:
            final_df[col] = final_df[col].apply(lambda x: int(x) if int(x) == float(x) else round(x, 4))
        final_df.sort_values(by='contract', inplace=True)
        # for row in final_df.to_dict(orient='records'):
        #     print(row)
        # 保存到服务器
        save_url = self.SERVER_API + 'dsas/price-position/'
        try:
            r = requests.post(save_url, json=final_df.to_dict(orient='records'), verify=False)
            r_data = r.json()
        except Exception as e:
            print('保存{}.GFE持仓价格数据失败了:{}'.format(self.quote_date_string, e))
        else:
            print('保存{}.GFE持仓价格数据成功:{}'.format(self.quote_date_string, r_data['message']))

    def save_daily_receipt(self):
        if not self.resolution_receipt_file1.exists():
            raise ValueError('{}.GFE仓单数据文件不存在,请先解析保存！'.format(self.quote_date_string))
        # 读取本地文件
        with open(self.resolution_receipt_file1, 'r', encoding='utf8') as fp:
            receipt_data = json.load(fp)
        if len(receipt_data) < 1:
            print('没有发现{}.GFE可以保存的仓单数据!'.format(self.quote_date_string))
            return
        try:
            r = requests.post(self.receipt_save_url, json=receipt_data, verify=False)
            print(r.json())
        except Exception as e:
            print('保存{}.GFE仓单数据失败了:{}'.format(self.quote_date_string, e))
        time.sleep(1)

    def save_daily_receipt_monolithic(self):
        if not self.resolution_receipt_file2.exists():
            raise ValueError('{}.GFE仓单数据文件不存在,请先解析保存！'.format(self.quote_date_string))
        # 读取本地文件
        with open(self.resolution_receipt_file2, 'r', encoding='utf8') as fp:
            receipt_data = json.load(fp)
        save_url = self.SERVER_API + 'dat/receipt/daily-receipt/'
        try:
            r = requests.post(save_url, json=receipt_data, verify=False)
            r_data = r.json()
        except Exception as e:
            print('新版保存{}.GFE仓单数据失败了:{}'.format(self.quote_date_string, e))
        else:
            print(
                '新版保存{}.GFE仓单数据成功:{},message:{}'.format(self.quote_date_string, r_data['count'], r_data['message']))
        time.sleep(1)


if __name__ == '__main__':
    SPIDER = 1
    PARSER = 1
    SAVER = 1
    PROXY_LIST = [
        {'http': '122.136.212.132:53281'},
        {'http': '58.220.95.86:9401'}
    ]
    PASS_VARIETY = []
    delta_days =  0
    # 持仓排名由于交易所接口关闭，只更新到2023.10.17(含)，2023.10.18~2023.11.09交易所没有公布数据
    handle_date = datetime.datetime.today() + datetime.timedelta(days=delta_days)
    if SPIDER:
        spider = DailyQuoteSpider(date=handle_date)
        spider.get_quote_file()
        spider.get_rank_file()
        spider.get_receipt_file()
    if PARSER:
        parser = DailyQuoteParser(date=handle_date)
        parser.parse_quote_file()
        parser.parse_rank_file()
        parser.parse_receipt_file()
    if SAVER:
        saver = DailyQuoteSaver(date=handle_date)
        saver.save_daily_quote_single()        # 分库分表 - 日行情
        saver.save_daily_rank_signal()         # 分库分表 - 日排名
        saver.save_daily_quote_monolithic()  # 整体表 - 日行情
        saver.save_daily_rank_monolithic()  # 整体表 - 日排名
        saver.save_price_position()  # 价格持仓表 - 行情与排名结合的数据
        saver.save_daily_receipt()             # 分库分表 - 日仓单
        saver.save_daily_receipt_monolithic()  # 整体表 - 日仓单

