import akshare as ak

from util import *


@lru_cache()
def fetch_fund_name() -> Dict[str, str]:
    tmp = ak.fund.fund_etf_ths.fund_etf_spot_ths()
    res = tmp.set_index("基金代码")["基金名称"].to_dict()
    return res


class FundDaily(pd.DataFrame):
    column_types: Dict[str, str] = {
        "time": "datetime64[ns, Asia/Shanghai]",
        "timestamp": "int64",  # seconds
        "open": "float64",
        "high": "float64",
        "low": "float64",
        "close": "float64",
        "volume": "int64",
        "turnover": "float64",
        "code": "int64",
        "name": "object",
    }

    @classmethod
    def default(cls) -> pd.DataFrame:
        return pd.DataFrame({col: pd.Series(dtype=dtype) for col, dtype in cls.column_types.items()})

    @classmethod
    def validate(cls, data: pd.DataFrame):
        for col, dtype in cls.column_types.items():
            dtype_ = data[col].dtype
            assert dtype_ == dtype, f"table column {col}: invalid dtype {dtype_}"

    @classmethod
    def save_offline(cls, code: int, data: pd.DataFrame):
        """
        save daily dataframe to the disk
        `data` is all of `code` in `month`
        """
        if not os.path.exists(ASHARE_OFFLINE_DIR):
            os.makedirs(ASHARE_OFFLINE_DIR)
        full_path = os.path.join(ASHARE_OFFLINE_DIR, f"etf_{code}.csv")
        if not os.path.exists(full_path):
            logging.debug(f"saving offline data: {full_path}")
        else:
            logging.debug(f"overwriting offline data: {full_path}")
        data["time"] = data["time"].apply(lambda x: str(x)[:10])
        data.to_csv(full_path, index=False)

    @classmethod
    def load_offline(cls, code: int) -> Optional[pd.DataFrame]:
        """
        load daily dataframe from the disk
        """
        full_path = os.path.join(ASHARE_OFFLINE_DIR, f"etf_{code}.csv")
        if not os.path.exists(full_path):
            return
        logging.debug(f"loading offline data: {full_path}")
        data = pd.read_csv(full_path)
        data["time"] = data["time"].apply(to_pd_time)
        return data

    @classmethod
    def sync_query(cls, code: int, start: FRDate, end: FRDate) -> pd.DataFrame:
        tmp = ak.fund.fund_etf_em.fund_etf_hist_em(
            symbol=str(code),
            period="daily",
            start_date=FRDatetime(start).datetime.strftime("%Y%m%d"),
            end_date=FRDatetime(end).datetime.strftime("%Y%m%d"),
            adjust="",
        )
        logging.debug(f"query for etf({code}), from {start} to {end}")

        tmp["time"] = tmp["日期"].apply(to_pd_time)
        tmp["timestamp"] = tmp["time"].apply(lambda x: int(x.timestamp()))
        tmp["open"] = tmp["开盘"]
        tmp["high"] = tmp["最高"]
        tmp["low"] = tmp["最低"]
        tmp["close"] = tmp["收盘"]
        tmp["volume"] = tmp["成交量"]
        tmp["turnover"] = tmp["成交额"]
        tmp["code"] = code
        tmp["name"] = fetch_fund_name()[str(code)]
        return tmp[list(cls.column_types)].reset_index(drop=True)

    @classmethod
    def load(
        cls,
        code: int,
        *,
        start: FRDate,
        end: Optional[FRDate] = None,
        use_offline_cache: bool = USE_OFFLINE_CACHE,
    ) -> "cls":
        if end is not None and FRDatetime(start) >= FRDatetime(end):
            return cls.default()
        if use_offline_cache:
            result = cls.load_offline(code)
            if result is None:
                result = cls.sync_query(code, DEFAULT_START_DATE, DEFAULT_END_DATE)
                cls.save_offline(code, result.copy())

            result = result[result["timestamp"] >= FRDatetime(start).timestamp]
            if end is not None:
                result = result[result["timestamp"] < FRDatetime(end).timestamp]
            result.reset_index(drop=True, inplace=True)
        else:
            result = cls.sync_query(code, start, end or FRDatetime.now().offset(hours=-168))
        cls.validate(result)
        return result

    @classmethod
    def cache_all(cls):
        tmp = {}
        for code in DEFAULT_CODE_SET:
            time.sleep(1)
            try:
                tmp[code] = len(cls.load(code, start=DEFAULT_START_DATE))
            except Exception as e:
                logging.error(f"load etf({code}) failed, {type(e)}-{e}")
        logging.info(f"cache all successfully, data length {tmp}")


if __name__ == "__main__":
    FundDaily.cache_all()
