# Copyright (c) 2025 User
# Use AkShare to fetch CN A-share daily data and dump to Qlib binary format

import os
import sys
import time
from pathlib import Path
from typing import List, Optional

import pandas as pd

# Ensure we can import dump_bin from the sibling scripts directory when running from repo root
_THIS_DIR = Path(__file__).resolve().parent
_REPO_ROOT = _THIS_DIR.parent
_SCRIPTS_DIR = _REPO_ROOT.joinpath("scripts")
if str(_SCRIPTS_DIR) not in sys.path:
    sys.path.insert(0, str(_SCRIPTS_DIR))

# Lazy import to allow running without AkShare installed until needed
try:
    import akshare as ak
except Exception:
    ak = None

from dump_bin import DumpDataAll

DEFAULT_SYMBOLS = [
    "sz000001",  # 平安银行
    "sh600519",  # 贵州茅台
    "sh601318",  # 中国平安
    "sz000858",  # 五粮液
    "sh600036",  # 招商银行
]


def normalize_symbol_to_ak(symbol: str) -> str:
    symbol = symbol.strip()
    if "." in symbol:  # e.g. 000001.SZ -> sz000001
        code, exch = symbol.split(".")
        return (exch.lower() + code).lower()
    if symbol[:2].upper() in {"SZ", "SH"}:
        return symbol[:2].lower() + symbol[2:]
    # assume already ak style like sz000001
    return symbol.lower()


def normalize_symbol_to_qlib(symbol: str) -> str:
    s = symbol.strip().lower()
    if "." in s:  # 000001.SZ -> SZ000001
        code, exch = s.split(".")
        return exch.upper() + code
    if s.startswith("sz") or s.startswith("sh"):
        return s[:2].upper() + s[2:]
    # default
    return s.upper()


def get_csi300_symbols(limit: Optional[int] = None) -> List[str]:
    """Fetch CSI300 constituents via AkShare; return ak-style symbols like sz000001."""
    if ak is None:
        return DEFAULT_SYMBOLS[: limit or len(DEFAULT_SYMBOLS)]
    try:
        # Try AkShare CSI300 components by name
        df = ak.stock_zh_index_component_df(symbol="沪深300")
        # Fallback if above API changes
        if df is None or df.empty:
            df = ak.index_stock_cons(symbol="000300")
        codes = []
        if df is not None and not df.empty:
            # Handle possible column names across AkShare versions
            possible_code_cols = [
                "代码", "con_code", "品种代码", "股票代码", "component_code",
            ]
            code_col = next((c for c in possible_code_cols if c in df.columns), None)
            if code_col is None:
                raise RuntimeError("Unable to find code column in AkShare return")
            for v in df[code_col].astype(str).tolist():
                v = v.strip()
                if "." in v:  # 000001.SZ style
                    codes.append(normalize_symbol_to_ak(v))
                else:
                    # If pure code, guess exchange by leading digit
                    if v.startswith("6"):
                        codes.append("sh" + v)
                    else:
                        codes.append("sz" + v)
        if not codes:
            codes = DEFAULT_SYMBOLS
        if limit is not None:
            codes = codes[:limit]
        return codes
    except Exception:
        # Fallback
        return DEFAULT_SYMBOLS[: limit or len(DEFAULT_SYMBOLS)]


def fetch_one_daily(ak_symbol: str, start: Optional[str] = None, end: Optional[str] = None) -> pd.DataFrame:
    if ak is None:
        raise RuntimeError("AkShare not installed; please install akshare first")
    # prefer qfq for continuity
    df = ak.stock_zh_a_daily(symbol=ak_symbol, adjust="qfq")
    if df is None or df.empty:
        return pd.DataFrame()
    # Standardize columns
    col_map = {
        "date": "date",
        "open": "open",
        "high": "high",
        "low": "low",
        "close": "close",
        # various ak versions: 'volume' or 'vol'
        "volume": "volume",
        "vol": "volume",
    }
    df = df.rename(columns={k: v for k, v in col_map.items() if k in df.columns})
    # Keep only necessary columns
    keep_cols = [c for c in ["date", "open", "high", "low", "close", "volume"] if c in df.columns]
    df = df.loc[:, keep_cols].copy()
    # Ensure datetime for filtering
    if "date" in df.columns:
        df["date"] = pd.to_datetime(df["date"])  # convert to Timestamp
    # Filter by date
    if start is not None:
        df = df[df["date"] >= pd.Timestamp(start)]
    if end is not None:
        df = df[df["date"] <= pd.Timestamp(end)]
    return df


def dump_to_qlib(tmp_csv_dir: Path, qlib_dir: Path):
    dumper = DumpDataAll(
        csv_path=str(tmp_csv_dir),
        qlib_dir=str(qlib_dir),
        freq="day",
        date_field_name="date",
        file_suffix=".csv",
        symbol_field_name="symbol",
        include_fields="open,high,low,close,volume",
    )
    dumper.dump()


def main(target_dir: str, start: str = "2018-01-01", end: str = None, limit: int = 50, use_index: bool = True, symbols: Optional[List[str]] = None, include_benchmark: bool = False):
    t0 = time.time()
    target = Path(target_dir).expanduser().resolve()
    tmp_dir = target.parent.joinpath("_ak_tmp_csv")
    tmp_dir.mkdir(parents=True, exist_ok=True)
    target.mkdir(parents=True, exist_ok=True)

    # determine symbol list
    if symbols:
        # allow comma-separated string passed in
        if isinstance(symbols, str):
            raw_list = [s.strip() for s in symbols.split(",") if s.strip()]
        else:
            raw_list = symbols
        symbols = [normalize_symbol_to_ak(s) for s in raw_list]
    else:
        symbols = get_csi300_symbols(limit=limit) if use_index else DEFAULT_SYMBOLS[:limit]

    if include_benchmark:
        # add CSI300 index for benchmark backtest if not present
        if "sh000300" not in symbols:
            symbols.append("sh000300")

    print(f"[AkShare] Fetching {len(symbols)} symbols...")

    saved = 0
    for sym in symbols:
        try:
            ak_sym = normalize_symbol_to_ak(sym)
            qlib_sym = normalize_symbol_to_qlib(sym)
            df = fetch_one_daily(ak_sym, start=start, end=end)
            if df is None or df.empty:
                print(f"[Skip] No data for {sym}")
                continue
            df["symbol"] = qlib_sym
            # Ensure dtypes
            df["date"] = pd.to_datetime(df["date"]).dt.strftime("%Y-%m-%d")
            # Write CSV named by ak symbol for predictable mapping
            out_fp = tmp_dir.joinpath(f"{ak_sym}.csv")
            df.to_csv(out_fp, index=False)
            saved += 1
        except Exception as e:
            print(f"[Error] {sym}: {e}")
    if saved == 0:
        raise RuntimeError("No CSVs saved; AkShare fetch may have failed.")

    print(f"[Dump] Converting {saved} CSVs into Qlib binary at {target}")
    dump_to_qlib(tmp_csv_dir=tmp_dir, qlib_dir=target)

    print(f"[Done] Elapsed: {time.time()-t0:.1f}s; provider_uri={target}")


if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description="Fetch AkShare data and dump to Qlib format")
    parser.add_argument("--target_dir", type=str, required=True, help="Target Qlib provider directory")
    parser.add_argument("--start", type=str, default="2018-01-01", help="Start date YYYY-MM-DD")
    parser.add_argument("--end", type=str, default=None, help="End date YYYY-MM-DD")
    parser.add_argument("--limit", type=int, default=50, help="Limit number of symbols")
    parser.add_argument("--use_index", action="store_true", help="Fetch CSI300 constituents via AkShare")
    parser.add_argument("--symbols", type=str, default=None, help="Comma-separated symbols like SH600104,SZ000001")
    parser.add_argument("--include_benchmark", action="store_true", help="Include CSI300 index (SH000300) for backtest")
    args = parser.parse_args()

    main(target_dir=args.target_dir, start=args.start, end=args.end, limit=args.limit, use_index=args.use_index, symbols=args.symbols, include_benchmark=args.include_benchmark)