import logging
import os
import pandas
import sys
import json
import math
from datafeed import TqsdkDatafeed
from indicator import CustomIndicator
from vutils.filepath import check_or_create_dir

logging.basicConfig(
    stream=sys.stdout, level=logging.INFO, format='%(asctime)s-%(levelname)s-%(message)s'
)


class Worker(object):
    def __init__(self, depth, directory, dt):
        self.datafeed = TqsdkDatafeed()
        self.directory = directory
        self.depth = depth
        self.dt = dt

    @staticmethod
    def resample(dataframe, interval):
        if interval == "H":
            period = "h"
        elif interval == "M":
            period = "ME"
        else:
            period = interval
        dataframe['datetime'] = pandas.to_datetime(dataframe['datetime'])
        dataframe = dataframe.set_index(dataframe['datetime'])
        dataframe = dataframe.resample(period).agg(
            dict(
                datetime='last', open='first', high='max', low='min',
                close='last', volume='sum', amount='sum'
            )
        ).dropna()
        return dataframe.reset_index(drop=True)

    def get_or_create_detail(self):
        now = pandas.Timestamp.now()
        filepath = check_or_create_dir(f"{self.directory}/detail.csv")
        if not os.path.exists(filepath) or pandas.to_datetime(os.path.getmtime(filepath), unit="s").date() < now.date():
            result = list()
            detail = self.datafeed.get_detail()
            for index, df in detail.groupby('product'):
                df = df.sort_values('pre_volume', ascending=False).reset_index(drop=True)
                df['sequence'] = df.index
                result.append(df)
            result = pandas.concat(result).reset_index(drop=True)
            result.to_csv(filepath, index=False)
        return pandas.read_csv(filepath)

    def get_tqsdk_kline(self, detail, interval):
        temp_df = self.datafeed.get_history(detail.kq_instrument_id, interval)
        temp_df['amount'] = (temp_df["high"] + temp_df["low"]) / 2 * temp_df["volume"] * detail.volume_multiple / 1e8
        temp_df = temp_df.loc[temp_df["datetime"].le(self.dt)]
        return temp_df

    def get_60min_kline(self, detail):
        temp_df = self.get_tqsdk_kline(detail, '15min')
        temp_df['datetime'] = temp_df['datetime'] + pandas.Timedelta(minutes=15)
        trading_time = json.loads(detail.trading_time.replace("'", "\""))
        temp_df = temp_df.loc[temp_df.datetime.gt(pandas.to_datetime('%s %s' % (temp_df.datetime.min().strftime('%Y-%m-%d'), trading_time['day'][-1][1])))]
        dfs, temp = list(), list()
        for _, row in temp_df.iterrows():
            temp.append(row.to_dict())
            if row.datetime.strftime('%H:%M:%S') == trading_time['day'][-1][1]:
                if temp:
                    temp = pandas.DataFrame(temp)
                    temp = temp.groupby(lambda x: math.floor(x / 4)).agg(
                        dict(datetime='last', open='first', high='max', low='min', close='last', volume='sum',
                             amount='sum'))
                    dfs.append(temp)
                temp = list()
        if temp:
            temp = pandas.DataFrame(temp)
            temp = temp.groupby(lambda x: math.floor(x / 4)).agg(
                dict(datetime='last', open='first', high='max', low='min', close='last', volume='sum', amount='sum'))
            dfs.append(temp)
        temp_df = pandas.concat(dfs)
        temp_df = temp_df.reset_index(drop=True)
        temp_df = temp_df.loc[temp_df.datetime.lt(pandas.Timestamp.now())]
        if (temp_df.iloc[-1].datetime - temp_df.iloc[-2].datetime).total_seconds() < 3600 and temp_df.iloc[-1].datetime.strftime('%H:%M:%S') != trading_time['day'][-1][1]:
            temp_df = temp_df.iloc[:-1]
        return temp_df

    def get_daily_kline(self, detail):
        return self.get_tqsdk_kline(detail, 'D')

    def get_kline_maps(self, detail):
        results = dict()
        results['H'] = self.get_60min_kline(detail)
        results['D'] = self.get_daily_kline(detail)
        results['W'] = self.resample(results['D'], 'W')
        results['M'] = self.resample(results['D'], 'M')
        return results

    def storage_csv(self, path, df):
        filepath = self.directory + path
        filepath = check_or_create_dir(filepath)
        df.to_csv(filepath, index=False)

    def extend_df(self, df):
        pds = dict()
        for column in df.columns:
            if column == "datetime":
                continue
            else:
                for i in range(1, self.depth):
                    pds[column + str(i)] = df[column].shift(i)
        fragment = pandas.concat(pds, axis=1, ignore_index=True)
        fragment.columns = pds.keys()
        fragment = pandas.concat([df, fragment], axis=1)
        return fragment

    @staticmethod
    def generate_snapshot(maps):
        result = dict()
        for interval, item in maps.items():
            for key in item:
                if key != "datetime":
                    result[f"{interval.lower()}_{key}"] = item[key]
                else:
                    if "datetime" not in result:
                        result[key] = item[key]
        return result

    def start(self):
        result = list()
        detail = self.get_or_create_detail()
        detail = detail.loc[detail["sequence"].lt(2)]
        detail = detail.loc[~detail["product"].isin(["TS", "TF", "T", "TL"])]
        detail = detail.reset_index(drop=True)
        for index, row in detail.iterrows():
            logging.info(f"{index + 1}/{detail.shape[0]}: {row.symbol}")
            try:
                data_maps = self.get_kline_maps(row)
                temp_maps = dict()
                for interval in data_maps:
                    temp_df = data_maps[interval].copy()
                    temp_df = CustomIndicator(temp_df).result()
                    temp_df = self.extend_df(temp_df)
                    self.storage_csv(f"/indicator/{interval}/{row.symbol}.csv", temp_df)
                    temp_maps[interval] = temp_df.iloc[-1].to_dict()
                snapshot = self.generate_snapshot(temp_maps)
                snapshot.update(row.to_dict())
                snapshot.pop("trading_time")
            except Exception as exp:
                logging.error(exp)
            else:
                result.append(snapshot)
        result = pandas.DataFrame(result)
        result = result.fillna(0)
        latest = result["datetime"].max()
        result = result.loc[result["datetime"].eq(latest)]
        self.storage_csv(f"/snapshot/{latest.strftime("%Y%m%d%H%M")}.csv", result)
