import os
import pandas as pd
import numpy as np
import akshare as ak
from datetime import datetime

import qlib
from qlib.data.dataset import DatasetH
from qlib.contrib.data.handler import Alpha360
from qlib.contrib.model.pytorch_gru import GRU
from qlib.workflow import R

PROVIDER_URI = r"h:\\2025github_project\\qlib-main\\akshare_cn_data"
INSTRUMENT = "SH600104"
START_TIME = "2020-01-01"
END_TIME = datetime.today().strftime("%Y-%m-%d")
REPORT_PATH = r"h:\\2025github_project\\qlib-main\\examples\\reports\\saic_next10_gru.csv"


def init_qlib():
    qlib.init(provider_uri=PROVIDER_URI, region="cn")


def build_dataset():
    handler_kwargs = {
        "start_time": START_TIME,
        "end_time": END_TIME,
        "fit_start_time": START_TIME,
        "fit_end_time": (pd.Timestamp(END_TIME) - pd.Timedelta(days=300)).strftime("%Y-%m-%d"),
        "instruments": [INSTRUMENT],
        "infer_processors": [
            {"class": "RobustZScoreNorm", "kwargs": {"fields_group": "feature", "clip_outlier": True}},
            {"class": "Fillna", "kwargs": {"fields_group": "feature"}},
        ],
        "learn_processors": [
            {"class": "DropnaLabel"},
        ],
        "label": ["Ref($close, -1) / $close - 1"],
    }
    handler = {
        "class": "Alpha360",
        "module_path": "qlib.contrib.data.handler",
        "kwargs": handler_kwargs,
    }

    segments = {
        "train": [START_TIME, (pd.Timestamp(END_TIME) - pd.Timedelta(days=365)).strftime("%Y-%m-%d")],
        "valid": [
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=365)).strftime("%Y-%m-%d"),
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=30)).strftime("%Y-%m-%d"),
        ],
        "test": [
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=30)).strftime("%Y-%m-%d"),
            END_TIME,
        ],
    }

    dataset = DatasetH(handler=handler, segments=segments)
    return dataset


def train_gru(dataset):
    model = GRU(
        d_feat=6,
        hidden_size=64,
        num_layers=2,
        dropout=0.1,
        n_epochs=100,
        lr=1e-3,
        batch_size=128,
        metric="loss",
        loss="mse",
        GPU=0,
    )
    with R.start(experiment_name="saic_gru_forecast"):
        model.fit(dataset)
    return model


def get_last_pred(model, dataset):
    preds = model.predict(dataset, segment="test")
    last_pred = float(preds.iloc[-1])
    return last_pred, preds.index[-1]


def next_business_days(last_date, n=10):
    days = []
    cur = pd.Timestamp(last_date)
    while len(days) < n:
        cur = cur + pd.Timedelta(days=1)
        if cur.weekday() < 5:
            days.append(cur.strftime("%Y-%m-%d"))
    return days


def main():
    init_qlib()
    dataset = build_dataset()
    model = train_gru(dataset)

    r1, last_feat_date = get_last_pred(model, dataset)

    # dynamic clipping based on recent volatility
    df = ak.stock_zh_a_daily(symbol="sh600104", adjust="qfq").sort_values("date")
    df["ret"] = df["close"].pct_change()
    vol = float(df["ret"].tail(60).std()) if len(df) >= 60 else float(df["ret"].std())
    base_cap = 0.03
    dyn_cap = min(base_cap, 1.5 * vol) if not np.isnan(vol) else base_cap
    r1 = np.nan_to_num(r1, nan=0.0)
    r1 = max(min(r1, dyn_cap), -dyn_cap) * 0.5

    last_row = df.iloc[-1]
    last_close = float(last_row["close"])  # last known close

    future_dates = next_business_days(df.iloc[-1]["date"], n=10)
    prices = []
    cur_close = last_close
    for _ in range(10):
        cur_close = cur_close * (1.0 + r1)
        prices.append(cur_close)

    out = pd.DataFrame({"date": future_dates, "pred_close": prices, "assumed_daily_return": [r1] * 10, "vol_cap": [dyn_cap] * 10})
    os.makedirs(os.path.dirname(REPORT_PATH), exist_ok=True)
    out.to_csv(REPORT_PATH, index=False)
    print(out.tail(10).to_string(index=False))
    print(f"Saved to {REPORT_PATH}")


if __name__ == "__main__":
    main()