import akshare as ak

from util import *


class IndexDaily(pd.DataFrame):
    column_types: Dict[str, str] = {
        "time": "datetime64[ns, Asia/Shanghai]",
        "timestamp": "int64",  # seconds
        "open": "float64",
        "high": "float64",
        "low": "float64",
        "close": "float64",
        "volume": "float64",
        "turnover": "float64",
        "code": "object",
        "name": "object",
    }

    @classmethod
    def default(cls) -> pd.DataFrame:
        return pd.DataFrame({col: pd.Series(dtype=dtype) for col, dtype in cls.column_types.items()})

    @classmethod
    def validate(cls, data: pd.DataFrame):
        for col, dtype in cls.column_types.items():
            dtype_ = data[col].dtype
            assert dtype_ == dtype, f"table column {col}: invalid dtype {dtype_}"

    @classmethod
    def save_offline(cls, code: str, data: pd.DataFrame):
        """
        save daily dataframe to the disk
        `data` is all of `code` in `month`
        """
        if not os.path.exists(ASHARE_OFFLINE_DIR):
            os.makedirs(ASHARE_OFFLINE_DIR)
        full_path = os.path.join(ASHARE_OFFLINE_DIR, f"index_{code}.csv")
        if not os.path.exists(full_path):
            logging.debug(f"saving offline data: {full_path}")
        else:
            logging.debug(f"overwriting offline data: {full_path}")
        data["time"] = data["time"].apply(lambda x: str(x)[:10])
        data.to_csv(full_path, index=False)

    @classmethod
    def load_offline(cls, code: str) -> Optional[pd.DataFrame]:
        """
        load daily dataframe from the disk
        """
        full_path = os.path.join(ASHARE_OFFLINE_DIR, f"index_{code}.csv")
        if not os.path.exists(full_path):
            return
        logging.debug(f"loading offline data: {full_path}")
        data = pd.read_csv(
            full_path,
            dtype={
                "time": "str",
                "timestamp": "int64",  # seconds
                "open": "float64",
                "high": "float64",
                "low": "float64",
                "close": "float64",
                "volume": "float64",
                "turnover": "float64",
                "code": "str",
                "name": "str",
            },
        )
        data["time"] = data["time"].apply(to_pd_time)
        return data

    @classmethod
    def sync_query(cls, code: str, start: FRDate, end: FRDate) -> pd.DataFrame:
        tmp = ak.index.index_stock_zh_csindex.stock_zh_index_hist_csindex(
            symbol=code,
            start_date=FRDatetime(start).datetime.strftime("%Y%m%d"),
            end_date=FRDatetime(end).datetime.strftime("%Y%m%d"),
        )
        logging.debug(f"query for index({code}), from {start} to {end}")

        tmp["time"] = tmp["日期"].apply(lambda x: to_pd_time(str(x)[:10]))
        tmp["timestamp"] = tmp["time"].apply(lambda x: int(x.timestamp()))
        tmp["open"] = tmp["开盘"]
        tmp["high"] = tmp["最高"]
        tmp["low"] = tmp["最低"]
        tmp["close"] = tmp["收盘"]
        tmp["volume"] = tmp["成交量"]
        tmp["turnover"] = tmp["成交金额"]
        tmp["code"] = tmp["指数代码"]
        tmp["name"] = tmp["指数中文全称"]
        return tmp[list(cls.column_types)].reset_index(drop=True)

    @classmethod
    def load(
        cls,
        code: str,
        *,
        start: FRDate,
        end: Optional[FRDate] = None,
        use_offline_cache: bool = USE_OFFLINE_CACHE,
    ) -> "cls":
        if end is not None and FRDatetime(start) >= FRDatetime(end):
            return cls.default()
        if use_offline_cache:
            result = cls.load_offline(code)
            if result is None:
                result = cls.sync_query(code, DEFAULT_START_DATE, DEFAULT_END_DATE)
                cls.save_offline(code, result.copy())

            result = result[result["timestamp"] >= FRDatetime(start).timestamp]
            if end is not None:
                result = result[result["timestamp"] < FRDatetime(end).timestamp]
        else:
            result = cls.sync_query(code, start, end or FRDatetime.now().offset(hours=-168))
        cls.validate(result)
        return result.set_index("time")

    @classmethod
    def cache_all(cls):
        tmp = {}
        for code in DEFAULT_INDEX_CODE_SET:
            time.sleep(1)
            try:
                data = cls.load(code, start=DEFAULT_START_DATE)
                tmp[code] = (data["name"].iloc[0], len(data))
            except Exception as e:
                logging.error(f"load index({code}) failed, {type(e)}-{e}")
        logging.info(f"cache all successfully, {tmp}")


if __name__ == "__main__":
    IndexDaily.cache_all()
