from utils import df_into_db, read_sql
import json
import pandas as pd
import datetime

def cal_gap_ratio(price_df):
    next_open = price_df["open"].shift(-1)
    grap_ratios = abs(next_open / price_df["close"] - 1)
    return grap_ratios.mean()


df = read_sql(f"select * from k_line where symbol='BTC' and frequency='1h' and `type` = 'spot' ",
              db_name="all_history_ohlcvm_coinmarketcap")
df["date"] = df["datetime"].dt.date
binance_df = df[df['datasource'] == 'binance']
okx_df = df[df['datasource'] == 'okx']
with open("miss_time.json", "r") as fp:
    data = json.load(fp)
miss_time_list = data["miss_time"]
miss_df = pd.DataFrame(miss_time_list, columns=["time"])
miss_df["datetime"] = pd.to_datetime(miss_df["time"])
miss_df["date"] = miss_df["datetime"].dt.date

normal_gap_ratio = cal_gap_ratio(binance_df.copy())*10000

flag_list = [1]
for i in range(1, len(miss_df)):
    last_flag = flag_list[-1]
    if (miss_df.iloc[i]["datetime"] - miss_df.iloc[i-1]["datetime"]).total_seconds() == 3600:
        flag_list.append(last_flag)
    else:
        last_flag += 1
        flag_list.append(last_flag)
miss_df["grp"] = flag_list
old_gap_list = []
new_gap_list = []
concat_df_list = []
for date, group in miss_df.groupby("grp"):
    group.sort_values(by="datetime", ascending=True, inplace=True)
    first_miss_time = group.iloc[0]["datetime"]
    last_miss_time = group.iloc[-1]["datetime"]
    before_miss_df = binance_df.loc[binance_df.datetime <= first_miss_time][-1:]
    after_miss_df = binance_df.loc[binance_df.datetime >= last_miss_time][:2]
    prev_close = before_miss_df.iloc[-1]["close"]
    next_open = after_miss_df.iloc[0]["open"]
    print(f"prev_close:{prev_close}, next_open:{next_open}")
    old_gap = next_open/prev_close -1
    old_gap_list.append(abs(old_gap))
    last_miss_time_fix = last_miss_time + datetime.timedelta(hours=1)
    okx_tmp_df = okx_df[(okx_df["datetime"] >= first_miss_time) & (okx_df["datetime"] <= last_miss_time_fix)]
    okx_tmp_df.sort_values(by="datetime", ascending=True, inplace=True)
    concat_df = pd.concat([before_miss_df, okx_tmp_df, after_miss_df], ignore_index=True)
    concat_df.sort_values(by="datetime", ascending=True, inplace=True)
    concat_df = concat_df[["datetime", "open", "close", "datasource"]]
    concat_df_list.append(concat_df)
    new_gap1 = okx_tmp_df.iloc[0]["open"]/prev_close - 1
    new_gap2 = next_open/okx_tmp_df.iloc[-1]["close"]- 1
    new_gap = (abs(new_gap1) + abs(new_gap2))/2
    new_gap_list.append(new_gap)
old_gap_ratio = pd.Series(old_gap_list).mean()*10000
new_gap_ratio = pd.Series(new_gap_list).mean()*10000
print("连续的gap_ratio:(万分之)", normal_gap_ratio)
print("填充前的gap_ratio:(万分之)", old_gap_ratio)
print("填充后的gap_ratio:(万分之)", new_gap_ratio)



concat_df_all = pd.concat(concat_df_list)
concat_df_all.to_csv("拼接处的数据3.csv", index=False)