import pandas as pd
import datetime
import random
import os
import json
import decimal
import numpy as np
import Core.Gadget as Gadget
import Core.IO as IO
import Core.Quote as Quote
import Core.Gadget as Gadget
from Core.Config import Config
import Core.Realtimeview as RealTimeView
from Core.Exception import *


def GenearaCacheDocument(bar):
    document = {}
    document["symbol"] = bar["symbol"]
    document["datetime"] = Gadget.ToDateTimeString(bar["datetime"])
    document["date"] = Gadget.ToDateTimeString(bar["date"])
    # document["OpenDateTime"] = bar["OpenDateTime"]
    #
    dailybar_fields = ["open", "high", "low", "close", "volume", "money"]
    stock_field = ["adjfactor", "trade_status", "bopen", "bhigh", "blow", "bclose", "total_shares", "free_float_shares"]
    fields = dailybar_fields + stock_field

    for field in fields:
        if field in bar:
            document[field] = bar[field]
    #
    # for key, value in document.items():
    #     if isinstance(value, decimal.Decimal):
    #         document[key] = float(value)
    #         pass
    #
    return document


def CacheDailyBar(database, realtime, datetime1=None, datetime2=None, asHash=False, startBatch=0, instrumentType="Stock" ):
    #
    filter = {"DateTime": {}}
    if datetime1 != None:
        filter["DateTime"][">="] = datetime1
    if datetime2 !=None:
        filter["DateTime"]["<="] = datetime2
    if len(filter["DateTime"]) == 0:
        filter = {}

    #
    count = database.Count("financial_data", instrumentType + "_DailyBar", filter)
    batchSize = 100000
    batchCount = int(count / batchSize) + 1
    filter["limit"] = batchSize

    #
    for i in range(batchCount):
        if i < startBatch:
            continue
        #
        dataDic = {}
        filter["skip"] = i * batchSize
        bars = database.Find("DailyBar", instrumentType, filter)
        for bar in bars:
            # 生成文档本体
            document = GenearaCacheDocument(bar)
            # 生成Key
            symbol = bar["symbol"]
            key = symbol + "_" + Gadget.ToDateString(bar["datetime"])
            # realtime.SetDocument(key, document)
            # s = json.dumps(document)
            dataDic[key] = document

        # 多层存储
        if asHash:
            realtime.SetManyHashDocuments("DailyBar", dataDic)
        else: # 单层存储
            # 以每个Batch为单位，set一次
            realtime.SetManyDocuments(dataDic)

        print("Cached BatchCount", i, "of", batchCount, "Progress", round(i/batchCount,2)*100, "%", datetime.datetime.now())
        pass


def Automatic_CacheDailyBar(database, realtime, datetime2, start_index=0, instrumentType="Stock"):
    pass
    datetime1 = datetime.datetime(2010, 1, 1)
    df_calender = Gadget.GetTradingDays_DataFrame(database, datetime1, datetime2)
    df_calender.sort_values(by="Date", ascending=False, inplace=True)
    datetime2 = df_calender.iloc[0]["Date"] # 经修正后的最后一个交易日
    tradingDays_count = len(df_calender)

    # 不必处理太古老的股票
    instruments = database.Find("Instruments", "Stock")
    df_instruments = Gadget.DocumentsToDataFrame(instruments)
    df_instruments.rename(columns={"datetime1": "DateTime1",
                           "datetime2": "DateTime2",
                           "symbol": "Symbol",
                           }, inplace=True)

    df_instruments = df_instruments[df_instruments["DateTime2"] > datetime1]
    count1 = len(df_instruments)

    # Loop Instruments
    instrument_count = 0
    for index, row in df_instruments.iterrows():
        instrument_count += 1
        if instrument_count < start_index:
            continue
        #
        missing_from_date = datetime2
        symbol = row["Symbol"]
        ipo_date = row["DateTime1"].date()
        #
        if Quote.GetQuoteRealTimeView(realtime, symbol, datetime2, True):
            # 数据齐全
            print("CacheDailyBar, Data All Ready", symbol, datetime2)
            continue

        # 回滚确定从何时开始缺失
        backforward_count = 0
        found_missing_date = False

        while True:
            if backforward_count == 0:
                backforward_count += 5 # 第一次回滚7天，因为大部分时候是缺失一个礼拜的数据
            else:
                backforward_count += 20 # 之后每次回滚一个月

            # print(backforward_count)
            # 回滚越界
            if backforward_count >= tradingDays_count:
                missing_from_date = ipo_date
                break

            # 回滚日期对应的实际交易日
            missing_from_date = df_calender.iloc[backforward_count]["Date"]

            #
            quote = Quote.GetQuoteRealTimeView(realtime, symbol, missing_from_date, True)
            if quote:
                found_missing_date = True
                break

        #
        documents_bars = database.Find("DailyBar", instrumentType,
                                       filter=[("Symbol", symbol), ("Date", ">", missing_from_date)])
        # df_bars = Gadget.DocumentsToDataFrame(documents_bars)
        dataDic = {}
        for bar in documents_bars:
            # 生成文档本体
            document = GenearaCacheDocument(bar)
            # 生成Key
            symbol = document["Symbol"]
            key = symbol + "_" + Gadget.ToDateString(bar["datetime"])
            # realtime.SetDocument(key, document)
            # s = json.dumps(document)
            dataDic[key] = document
        #
        realtime.SetManyHashDocuments("DailyBar", dataDic)
        print("Cached ", symbol, instrument_count, "from", missing_from_date, "to", datetime2, "#", len(dataDic), "@", datetime.datetime.now())

    a = 0


def CacheMonthlyReturn(instruments,datetime1,datetime2):
    count = 0
    for instrument in instruments:
        count += 1
        #
        symbol = instrument["Symbol"]
        databaseName = "Factor"
        collectionName = "MonthlyReturn"
        filter = {"Symbol": symbol}
        quotes = database.find(databaseName, collectionName, beginDateTime=datetime1, endDateTime=datetime2, query=filter)


def CacheFactor(instruments, datetime1, datetime2):
    pass


def Cache_Dailybar_by_Monthly_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock"):
    #
    run_time1 = datetime.datetime.now()
    df_bars = database.Get_Daily_Bar_DataFrame(symbol, instrument_type, datetime1=datetime1, datetime2=datetime2)
    run_time2 = datetime.datetime.now()
    running_time = (run_time2 - run_time1).total_seconds()
    print(running_time)

    # 时间区间过滤
    if datetime1:
        datetime1_year_month = datetime1.year * 100 + datetime1.month
    if datetime2:
        datetime2_year_month = datetime2.year * 100 + datetime2.month
    #
    df_bars['YearMonth'] = df_bars['date'].map(lambda x: 100 * x.year + x.month)
    print(df_bars)
    df_group_by_month = df_bars.groupby("YearMonth")
    for group in df_group_by_month:
        #
        current_year_month = group[0]
        if datetime1 and datetime1_year_month < current_year_month:
            continue
        if datetime2 and datetime2_year_month > current_year_month:
            continue
        #
        monthly_key = symbol + "_" + str(group[0])
        data_dict = {}
        df_bars_by_month = group[1]
        for index, row in df_bars_by_month.iterrows():
            # 生成文档本体
            document = GenearaCacheDocument(row)
            # 生成Key
            symbol = row["symbol"]
            key = symbol + "_" + Gadget.ToDateString(row["date"])
            # realtime.SetDocument(key, document)
            # s = json.dumps(document)
            data_dict[key] = document
        #
        realtime.SetManyHashDocuments(monthly_key, data_dict)


def Get_Dailybar_by_Monthly_Key(realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock"):
    #
    if datetime1 == None:
        datetime1 = datetime.datetime(2000, 1, 1)
    if datetime2 == None:
        datetime2 = datetime.datetime.now()
    #
    dfs = []
    datetimes = Gadget.GenerateBeginDateofMonth(datetime1, datetime2, as_date=False)
    for dt in datetimes:
        year_month = dt.year * 100 + dt.month
        monthly_key = symbol + "_" + str(year_month)
        monthly_document_data = realtime.GetManyHashDocuments(monthly_key)
        if len(monthly_document_data) == 0:
            continue
        values = list(monthly_document_data.values())
        df_month = Gadget.DocumentsToDataFrame(values)
        dfs.append(df_month)
    #
    df = pd.concat(dfs, axis=0)
    df.sort_values(by="date", ascending=True, inplace=True)
    return df


# 第一层key是symbol+date，第二层key是field
def Cache_Dailybar_by_Daily_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock"):
    #
    df_bars = database.Get_Daily_Bar_DataFrame(symbol, instrument_type, datetime1=datetime1, datetime2=datetime2)

    i = 0
    for index, row in df_bars.iterrows():
        i += 1
        daily_key = symbol + "_" + Gadget.ToDateString(row["date"])
        document = GenearaCacheDocument(row)
        realtime.SetManyHashDocuments(daily_key, document)
        if i % 100 == 0:
            print("Cached", daily_key, "Count", i)
    #
    print("Cached as Daily Key", symbol, "From", df_bars.iloc[0]["date"], "To", df_bars.iloc[-1]["date"], datetime.datetime.now())


# 每个Symbol 是一个Key，相当于每次解析单券全部历史数据
# 第一层key是symbol ，第二层key是 symbol+date
def Cache_Dailybar_by_Symbol_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock"):
    #
    df_bars = database.Get_Daily_Bar_DataFrame(symbol, instrument_type, datetime1=datetime1, datetime2=datetime2)

    data_dict = {}
    for index, row in df_bars.iterrows():
        daily_key = symbol + "_" + Gadget.ToDateString(row["date"])
        document = GenearaCacheDocument(row)
        data_dict[daily_key] = document
    #
    realtime.SetManyHashDocuments(symbol, data_dict)
    print("Cached as Symbol Key", symbol, "From", df_bars.iloc[0]["date"], "To", df_bars.iloc[-1]["date"])


def Cache_Total_Daily_Return(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock"):
    #
    df_bars = database.Get_Daily_Bar_DataFrame(symbol, instrument_type, datetime1=datetime1, datetime2=datetime2)
    data_dict = {}
    data_list = []
    for index, row in df_bars.iterrows():
        daily_key = symbol + "_" + Gadget.ToDateString(row["date"])
        document = GenearaCacheDocument(row)
        data_dict[daily_key] = document
        data_list.append(document)
    #
    realtime.SetDocument(symbol + "_total_dict", data_dict)
    realtime.SetDocument(symbol + "_total", data_list)
    print("Cached Total", symbol, "From", df_bars.iloc[0]["date"], "To", df_bars.iloc[-1]["date"], datetime.datetime.now())


def Load_Multi_DailyBar_DataFrame_from_Redis(realtime, symbol_list, datetime1, datetime2):
    pass


def TestPerformance_Daily_Key(database, realtime, include_hash_key=False):
    #
    instruments = database.Get_Instruments_DataFrame(instrument_type="Stock")
    instruments = instruments[:5]
    symbol_lit = instruments["symbol"].tolist()
    #
    df_bars = database.Get_Daily_Bar_DataFrame("000001.SH", instrument_type="index")
    date_list = df_bars["date"].tolist()

    total_time = []
    for i in range(10000):
        #
        rad1 = random.randint(0, len(symbol_lit)-1)
        rad2 = random.randint(0, len(date_list)-1)
        symbol = symbol_lit[rad1]
        dt = date_list[rad2]
        #
        daily_key = symbol + "_" + Gadget.ToDateString(dt)
        datetime1 = datetime.datetime.now()
        if include_hash_key:
            b_close = realtime.GetHashDocument(daily_key, "bclose")
            adjust_factor = realtime.GetHashDocument(daily_key, "adjfactor")
        else:
            doc = realtime.GetDocument(daily_key)
        datetime2 = datetime.datetime.now()
        consume_time = (datetime2 - datetime1).total_seconds()
        total_time.append(consume_time)
    #
    mean_time = np.mean(total_time)
    print("Average time ", np.mean(total_time))
    print("Estimate 3Yr 100 Securities consume", mean_time * 3 * 245 * 100)


def TestPerformance_Symbol_Key(database, realtime, deserialize):
    #
    instruments = database.Get_Instruments_DataFrame(instrument_type="Stock")
    instruments = instruments[:10]
    symbol_lit = instruments["symbol"].tolist()
    #
    df_bars = database.Get_Daily_Bar_DataFrame("000001.SH", instrument_type="index")
    date_list = df_bars["date"].tolist()

    total_time = []
    for i in range(1000):
        #
        rad1 = random.randint(0, len(symbol_lit)-1)
        rad2 = random.randint(0, len(date_list)-1)
        symbol = symbol_lit[rad1]
        dt = date_list[rad2]
        #
        daily_key = symbol + "_" + Gadget.ToDateString(dt)
        datetime1 = datetime.datetime.now()
        doc = realtime.GetHashDocument(symbol, daily_key, deserialize)
        datetime2 = datetime.datetime.now()
        # print(datetime1)
        # print(datetime2)
        consume_time = (datetime2 - datetime1).total_seconds()
        total_time.append(consume_time)
    #
    mean_time = np.mean(total_time)
    print("Average time ", mean_time, " Total time", np.sum(total_time))
    print("Estimate 3Yr 100 Securities consume", mean_time * 3 * 245 * 100)


def TestPerformance_Monthly_Key():
    pass


def TestPerformance_Total_Bars(database, realtime, key_suffix="total"):
    #
    instruments = database.Get_Instruments_DataFrame(instrument_type="Stock")
    instruments = instruments[:10]
    symbol_lit = instruments["symbol"].tolist()

    total_time = []
    for i in range(100):
        #
        rad1 = random.randint(0, len(symbol_lit) - 1)
        symbol = symbol_lit[rad1]
        #
        symbol_key = symbol + "_" + key_suffix
        datetime1 = datetime.datetime.now()
        doc = realtime.GetDocument(symbol_key)
        # if doc:
        #     df = Gadget.DocumentsToDataFrame(doc)
        datetime2 = datetime.datetime.now()
        # print(datetime1)
        # print(datetime2)
        consume_time = (datetime2 - datetime1).total_seconds()
        total_time.append(consume_time)
    #
    mean_time = np.mean(total_time)
    print("Average time ", mean_time, " Total time", np.sum(total_time))


def Test_Cache_as_Monthly_Key():
    #
    pathfilename = os.getcwd() + "\..\Config\config_develop.json"
    config = Config(pathfilename)
    database = config.DataBase("JDMySQL")
    realtime = config.RealTime(db=0)
    #
    # realtime.Clear()
    # realtime.SetDocument("Test_Key", {"test1": 1, "test2": 2})
    # a = realtime.GetDocument("Test_Key")
    #
    symbol = "000001.SZ"
    Cache_Dailybar_by_Monthly_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock")

    run_time1 = datetime.datetime.now()
    df = Get_Dailybar_by_Monthly_Key(realtime, symbol)
    run_time2 = datetime.datetime.now()
    running_time = (run_time2 - run_time1).total_seconds()
    # print(df)
    print(running_time)
    a = 0


def Test_Cache_as_Symbol_Key(database, realtime):
    #
    # realtime.Clear()
    #
    instruments = database.Get_Instruments_DataFrame(instrument_type="Stock")
    instruments = instruments[2:10]
    symbols = instruments["symbol"].tolist()
    #
    count = 0
    for symbol in symbols:
        count += 1
        print(symbol, count)
        Cache_Dailybar_by_Symbol_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock")
        a = 0
    a = 0


def Test_Cache_as_Daily_Key(database, realtime):
    #
    # realtime.Clear()
    #
    instruments = database.Get_Instruments_DataFrame(instrument_type="Stock")
    instruments = instruments[0:10]
    symbols = instruments["symbol"].tolist()
    #
    count = 0
    for symbol in symbols:
        count += 1
        print(symbol, count)
        Cache_Dailybar_by_Daily_Key(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type="Stock")
        a = 0
    a = 0


def Test_Cache_Total(database, realtime):
    #
    # realtime.Clear()
    instrument_type = "Index"
    instruments = database.Get_Instruments_DataFrame(instrument_type=instrument_type)
    instruments = instruments[0:100]
    symbols = instruments["symbol"].tolist()
    #
    count = 0
    for symbol in symbols:
        count += 1
        print(symbol, count,  datetime.datetime.now())
        Cache_Total_Daily_Return(database, realtime, symbol, datetime1=None, datetime2=None, instrument_type=instrument_type)


def Test_Length_vs_Speed(realtime):
    def Generate_Document(length):
        d = {}
        for i in range(length):
            d[i] = "test"
        return d

    def Cache_Data(realtime, document_length):
        for i in range(1000):
            doc = Generate_Document(document_length)
            realtime.SetDocument("test_key_" + str(i), doc)

    def Test_Performance(realtime):
        total_time = []
        for i in range(1000):
            #
            rad1 = random.randint(0, 1000)
            #
            symbol_key = "test_key_" + str(rad1)
            datetime1 = datetime.datetime.now()
            doc = realtime.GetDocument(symbol_key)
            datetime2 = datetime.datetime.now()
            # print(datetime1)
            # print(datetime2)
            consume_time = (datetime2 - datetime1).total_seconds()
            total_time.append(consume_time)
        #
        mean_time = np.mean(total_time)
        print("Average time ", mean_time, " Total time", np.sum(total_time))
    #
    Cache_Data(realtime, 10)
    Test_Performance(realtime)

    Cache_Data(realtime, 100)
    Test_Performance(realtime)

    Cache_Data(realtime, 10000)
    Test_Performance(realtime)


if __name__ == '__main__':

    pathfilename = os.getcwd() + "\..\Config\config_develop.json"
    config = Config(pathfilename)
    database = config.DataBase("JDMySQL")
    realtime = config.RealTime(db=0)

    # Test_Length_vs_Speed(realtime)

    # Test_Cache_as_Symbol_Key()
    # TestPerformance_Symbol_Key(database, realtime, deserialize=True)
    # TestPerformance_Symbol_Key(database, realtime, deserialize=False)

    # Test_Cache_as_Daily_Key(database, realtime)
    # TestPerformance_Daily_Key(database, realtime, include_hash_key=True)
    # TestPerformance_Daily_Key(database, realtime, include_hash_key=False)

    # Test_Cache_Total(database, realtime)
    TestPerformance_Total_Bars(database, realtime, key_suffix="total")
    # TestPerformance_Total_Bars(database, realtime, key_suffix="total_dict")

    # datetime2 = datetime.datetime.now()
    # Aotumatic_CacheDailyBar(database, realtime, datetime2, start_index=0)

    # ---冷启动 Cache 全部---
    # realtime.Clear()
    # CacheDailyBar(database, realtime, asHash=True, startBatch=68, instrumentType="Stock")
    # CacheDailyBar(database, realtime, asHash=True, startBatch=0, instrumentType="Index")

    # ---Cache 部分---
    datetime1 = datetime.datetime(2020, 2, 14)
    datetime2 = datetime.datetime(2020, 3, 1)
    # datetime1 = None
    # datetime2 = None
    # CacheDailyBar(database, realtime, datetime1=datetime1, datetime2=datetime2, asHash=True, startBatch=0, instrumentType="Stock") #53
    # CacheDailyBar(database, realtime, datetime1=datetime1, datetime2=datetime2, asHash=True, startBatch=0, instrumentType="Index")

    # TestPerformance(instruments, stdDateTime1, stdDateTime2)

    # ---Test---

    # ---Caching Option Bars---
    # datetime1 = datetime.datetime(2015,1,1)
    # stdDateTime1 = Gadget.ToUTCDateTime(datetime1)
    # datetime2 = datetime.datetime(2019,1,2)
    # stdDateTime2 = Gadget.ToUTCDateTime(datetime2)
    #
    # instruments = []
    # instruments = database.Find("Instruments", "Option", filter={"Code":"510050.SH"})
    # underly = {"Symbol":"510050.SH", "InstrumentType":"Stock"}
    # instruments.append(underly)
    #
    # CacheDailyBar(instruments, stdDateTime1, stdDateTime2, asHash=True)