import xgboost as xgb
import pandas as pd

import os

raw_path = "D:\daily work\ml\\raw"

data = pd.read_csv("D:\daily work\ml\\factors\\factor_data.csv")

params = {
        'booster': 'gbtree',
        'n_estimators': 300,
        'max_depth': 3,
        "learning_rate": 0.3,
        'objective': 'reg:squarederror',
        'eval_metric': 'rmse'
    }

model = xgb.XGBRegressor(**params)
results = []

start_n = 0
step = 10**5


for end_n in range(7, 12, 1):

    train_data = data[start_n * step: end_n * step]

    if end_n == 11:
        test_data = data[end_n * step:]
    else:
        test_data = data[end_n * step: (end_n + 1) * step]

    train_y = train_data["y1"]
    train_x = train_data.drop(["y1", "y2"], axis=1)
    test_y = test_data["y1"]
    test_x = test_data.drop(["y1", "y2"], axis=1)

    model.fit(train_x, train_y)
    pred_y = model.predict(test_x)

    result_df = test_data.loc[:, ["y1"]]
    result_df["pred_y"] = pred_y
    results.append(result_df)
    corr = result_df["y1"].corr(result_df["pred_y"])
    print(start_n, end_n, corr)
    start_n += 1

total_df = pd.concat(results)
corr = total_df["y1"].corr(total_df["pred_y"])
print("final", corr)