import os
import sys
import pandas as pd
import numpy as np
import akshare as ak
from datetime import datetime

import qlib
from qlib.data.dataset import DatasetH
from qlib.contrib.data.handler import Alpha360
from qlib.data.dataset.handler import DataHandlerLP
from qlib.contrib.model.pytorch_lstm import LSTM

PROVIDER_URI = r"h:\\2025github_project\\qlib-main\\akshare_cn_data"
INSTRUMENT = "SH600104"
START_TIME = "2020-01-01"
END_TIME = datetime.today().strftime("%Y-%m-%d")
REPORT_PATH = r"h:\\2025github_project\\qlib-main\\examples\\reports\\saic_next10_lstm.csv"


def init_qlib():
    qlib.init(provider_uri=PROVIDER_URI, region="cn")


def build_dataset():
    handler_kwargs = {
        "start_time": START_TIME,
        "end_time": END_TIME,
        "fit_start_time": START_TIME,
        "fit_end_time": (pd.Timestamp(END_TIME) - pd.Timedelta(days=300)).strftime("%Y-%m-%d"),
        "instruments": [INSTRUMENT],
        "infer_processors": [
            {"class": "RobustZScoreNorm", "kwargs": {"fields_group": "feature", "clip_outlier": True}},
            {"class": "Fillna", "kwargs": {"fields_group": "feature"}},
        ],
        "learn_processors": [
            {"class": "DropnaLabel"}
        ],
        "label": ["Ref($close, -1) / $close - 1"],
    }
    handler = {
        "class": "Alpha360",
        "module_path": "qlib.contrib.data.handler",
        "kwargs": handler_kwargs,
    }

    segments = {
        "train": [START_TIME, (pd.Timestamp(END_TIME) - pd.Timedelta(days=365)).strftime("%Y-%m-%d")],
        "valid": [
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=365)).strftime("%Y-%m-%d"),
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=30)).strftime("%Y-%m-%d"),
        ],
        "test": [
            (pd.Timestamp(END_TIME) - pd.Timedelta(days=30)).strftime("%Y-%m-%d"),
            END_TIME,
        ],
    }

    dataset = DatasetH(handler=handler, segments=segments)
    return dataset


def train_lstm(dataset):
    model = LSTM(
        d_feat=6,
        hidden_size=64,
        num_layers=2,
        dropout=0.0,
        n_epochs=50,
        lr=1e-3,
        batch_size=512,
        metric="loss",
        loss="mse",
        GPU=0,
    )
    model.fit(dataset)
    return model


def get_last_pred(model, dataset):
    # Predict on the test segment and take last value as next-day return estimate
    preds = model.predict(dataset, segment="test")
    last_pred = float(preds.iloc[-1])
    return last_pred, preds.index[-1]


def next_business_days(last_date, n=10):
    days = []
    cur = pd.Timestamp(last_date)
    while len(days) < n:
        cur = cur + pd.Timedelta(days=1)
        # Monday-Friday as trading days approximation
        if cur.weekday() < 5:
            days.append(cur.strftime("%Y-%m-%d"))
    return days


def main():
    init_qlib()
    dataset = build_dataset()
    model = train_lstm(dataset)

    # get last predicted next-day return
    r1, last_feat_date = get_last_pred(model, dataset)

    # risk-aware clipping of daily return to avoid unrealistic paths
    # clip to [-3%, 3%] and damp by 50%
    r1 = np.nan_to_num(r1, nan=0.0)
    r1 = max(min(r1, 0.03), -0.03) * 0.5

    # fetch latest close
    df = ak.stock_zh_a_daily(symbol="sh600104", adjust="qfq")
    df = df.sort_values("date")
    last_row = df.iloc[-1]
    last_close = float(last_row["close"])  # last known close

    # project next 10 days price using compounding with adjusted r1
    future_dates = next_business_days(df.iloc[-1]["date"], n=10)
    prices = []
    cur_close = last_close
    for _ in range(10):
        cur_close = cur_close * (1.0 + r1)
        prices.append(cur_close)

    out = pd.DataFrame({"date": future_dates, "pred_close": prices, "assumed_daily_return": [r1] * 10})
    os.makedirs(os.path.dirname(REPORT_PATH), exist_ok=True)
    out.to_csv(REPORT_PATH, index=False)
    print(out.tail(10).to_string(index=False))
    print(f"Saved to {REPORT_PATH}")


if __name__ == "__main__":
    main()