from __future__ import annotations
import copy
from functools import reduce
from abc import abstractmethod
import warnings
import datetime

import numpy as np
from dateutil import parser

from investment.lixinger_api import CompanyFundamentalRangePost, find_url_result_series, CompanyFinanceRangePost
from investment.lixinger_api import IndexFundamentalRangePost, IndexFinanceRangePost, IndexSamplePost
from investment.constants import INDEX_METRICS_FUNDAMENTAL, INDEX_METRICS_FINANCE, STOCK_METRICS
from investment.date_utilities import divide_over_10_years

__all__ = [
    "IndexTimeSeries", "StockTimeSeries", "TimeSeries", "IndexBasketTimeSeries",
    "concat_time", "filter_indexes_connection", "find_in_series"
    ]


class TimeSeries:
    def __init__(self, stock_code, time_start, time_end, fill_flag):
        self.stock_code = stock_code
        self.time_interval = parser.parse(time_end) - parser.parse(time_start)
        if self.time_interval > datetime.timedelta(3650):
            warnings.warn("lixinger don't support time range larger than 10 years")
        self.time_start = time_start
        self.time_end = time_end
        self.dates_series = []  # from previous to current
        self.fundamental_metrics = {}
        self.finance_times = []
        self.finance_metrics = {}
        if fill_flag:
            self.fill_metrics()

    @abstractmethod
    def fill_metrics(self):
        pass

    def deep_copy(self) -> "new TimeSeries":
        if len(self.dates_series) == 0:
            raise ValueError("Empty metrics of time series")
        new_time_series = self.__class__(self.stock_code, self.time_start, self.time_end, False)
        new_time_series.dates_series = self.dates_series[:]
        new_time_series.fundamental_metrics = copy.deepcopy(self.fundamental_metrics)
        new_time_series.finance_metrics = copy.deepcopy(self.finance_metrics)
        new_time_series.finance_times = self.finance_times[:]
        return new_time_series

    def __add__(self, other) -> "new TimeSeries":
        """ adding metrics of TimeSeries , not time.
        note: will return new
        """
        assert isinstance(other, TimeSeries)
        if len(other.dates_series) == 0:
            raise ValueError("Empty metrics of time series")
        if self.time_start != other.time_start and self.time_end != other.time_start:
            assert TypeError("not same stock time range")
        new_time_series = self.deep_copy()
        for ky in new_time_series.fundamental_metrics.keys():
            new_time_series.fundamental_metrics[ky] = new_time_series.fundamental_metrics[ky] + \
                                                      other.fundamental_metrics[ky]
        for ky in new_time_series.finance_metrics.keys():
            new_time_series.finance_metrics[ky] = new_time_series.finance_metrics[ky] + \
                                                  other.finance_metrics[ky]
        return new_time_series

    def __iadd__(self, other):
        """ note: will change attribute of self inplace"""
        assert isinstance(other, TimeSeries)
        if len(other.dates_series) == 0:
            raise ValueError("Empty metrics of time series")
        if self.time_start != other.time_start and self.time_end != other.time_start:
            assert TypeError("not same stock time range")
        for ky in self.fundamental_metrics.keys():
            self.fundamental_metrics[ky] = self.fundamental_metrics[ky] + other.fundamental_metrics[ky]
        for ky in self.finance_metrics.keys():
            self.finance_metrics[ky] = self.finance_metrics[ky] + other.finance_metrics[ky]
        return self

    def __rmul__(self, scalar):
        new_time_series = self.deep_copy()
        for ky in new_time_series.fundamental_metrics.keys():
            new_time_series.fundamental_metrics[ky] = scalar * new_time_series.fundamental_metrics[ky]
        for ky in new_time_series.finance_metrics.keys():
            new_time_series.finance_metrics[ky] = scalar * new_time_series.finance_metrics[ky]
        return new_time_series

    def __eq__(self, other):
        assert isinstance(other, TimeSeries)
        res = self.stock_code == other.stock_code and self.time_start == other.time_start \
            and self.time_end == other.time_end and self.dates_series == other.dates_series \
            and self.finance_times == other.finance_times
        for ky in self.fundamental_metrics.keys():
            res = res and np.any(self.fundamental_metrics[ky] == other.fundamental_metrics[ky])
        return res


class StockTimeSeries(TimeSeries):
    def __init__(self, stock_code, time_start, time_end, company_type='non_financial', fill_flag=False):
        self.company_type = company_type
        super().__init__(stock_code, time_start, time_end, fill_flag)

    def fill_metrics(self):
        divide_time = divide_over_10_years(self.time_start, self.time_end)
        start, end = divide_time[0]
        self.dates_series, self.fundamental_metrics, self.finance_times, self.finance_metrics = self.post(start, end)
        for start, end in divide_time[1:]:
            dates_series, fundamental_metrics, finance_times, finance_metrics = self.post(start, end)
            self.dates_series += dates_series
            for ky in fundamental_metrics.keys():
                self.fundamental_metrics[ky] = np.append(self.fundamental_metrics[ky], fundamental_metrics[ky])
            self.finance_times += finance_times
            for ky in finance_metrics.keys():
                self.finance_metrics[ky] = np.append(self.finance_metrics[ky], finance_metrics[ky])

    def post(self, start, end):
        fundamental_url = CompanyFundamentalRangePost(start, end, self.stock_code, self.company_type)
        dates_series, fundamental_metrics = find_url_result_series(fundamental_url)
        finance_url = CompanyFinanceRangePost(start, end, self.stock_code, self.company_type)
        finance_times, finance_metrics = find_url_result_series(finance_url)
        return dates_series, fundamental_metrics, finance_times, finance_metrics

    def deep_copy(self):
        new_time_series = super().deep_copy()
        new_time_series.company_type = self.company_type
        return new_time_series

    def __add__(self, other):
        if self.company_type != other.company_type:
            assert TypeError(f"not same stock company_type {self.company_type}, {other.company_type}")
        return super(StockTimeSeries, self).__add__(other)

    def __iadd__(self, other):
        if self.company_type != other.company_type:
            assert TypeError(f"not same stock company_type {self.company_type}, {other.company_type}")
        return super(StockTimeSeries, self).__iadd__(other)

    def get_fundamental_metrics(self, metrics_name) -> np.ndarray:
        """
        :param metrics_name: PE, PE_POS, PB, PB_POS, DYR, Stoke_Price, Turnover_Volume, Frontal_Right, Market_CAP
        :return: time series of metrics
        """
        assert hasattr(STOCK_METRICS, metrics_name)
        if metrics_name.endswith("PE_POS") and self.time_interval > datetime.timedelta(3650):
            warnings.warn("can't use position of Lixinger, because merge 20 years position in 10y-interval.")
        return np.array(self.fundamental_metrics.get(eval(f"STOCK_METRICS.{metrics_name}.value")))

    def get_finance_metrics(self, metrics_name) -> np.ndarray:
        """
        :param metrics_name: FCF, ROE, Net_Profit
        :return: time series of metrics
        """
        assert hasattr(STOCK_METRICS, metrics_name)
        return np.array(self.finance_metrics.get(eval(f"STOCK_METRICS.{metrics_name}.value")))


class IndexTimeSeries(TimeSeries):
    def __init__(self, stock_code, time_start, time_end, fill_flag=False):
        self.samples = []
        self.fundamental_json_path = None
        self.finance_json_path = None
        self.sample_json_path = None
        super(IndexTimeSeries, self).__init__(stock_code, time_start, time_end, fill_flag)

    def set_json_path(self, *args: "fundamental_json_path, finance_json_path, sample_json_path"):
        self.fundamental_json_path = args[0] if len(args) > 0 else None
        self.finance_json_path = args[1] if len(args) > 1 else None
        self.sample_json_path = args[2] if len(args) > 2 else None

    def fill_metrics(self):
        sample_url = IndexSamplePost([self.stock_code])
        _, samples = find_url_result_series(sample_url, self.sample_json_path)
        self.samples = samples[self.stock_code]

        divide_time = divide_over_10_years(self.time_start, self.time_end)
        start, end = divide_time[0]
        self.dates_series, self.fundamental_metrics, self.finance_times, self.finance_metrics = self.post(start, end)
        for start, end in divide_time[1:]:
            dates_series, fundamental_metrics, finance_times, finance_metrics = self.post(start, end)
            self.dates_series += dates_series
            for ky in fundamental_metrics.keys():
                self.fundamental_metrics[ky] = np.append(self.fundamental_metrics[ky], fundamental_metrics[ky])
            self.finance_times += finance_times
            for ky in finance_metrics.keys():
                self.finance_metrics[ky] = np.append(self.finance_metrics[ky], finance_metrics[ky])

    def post(self, start, end):
        fundamental_url = IndexFundamentalRangePost(start, end, self.stock_code)
        finance_url = IndexFinanceRangePost(start, end, self.stock_code)
        dates_series, fundamental_metrics = find_url_result_series(fundamental_url, self.fundamental_json_path)
        finance_times, finance_metrics = find_url_result_series(finance_url, self.finance_json_path)
        return dates_series, fundamental_metrics, finance_times, finance_metrics

    def get_stock_samples(self):
        sample_time_series = []
        for stock in self.samples:
            sample_time = StockTimeSeries(stock, self.time_start, self.time_end)
            sample_time.fill_metrics()
            sample_time_series.append(sample_time)
        return sample_time_series

    def get_fundamental_metrics(self, metrics_name) -> np.ndarray:
        """
        :param metrics_name: fundamental_PE_PB_MC_CP_DYR PE_POS, PB_POS
        :return: time series of metrics
        """
        assert hasattr(INDEX_METRICS_FUNDAMENTAL, metrics_name)
        if metrics_name.endswith("PE_POS") and self.time_interval > datetime.timedelta(3650):
            warnings.warn("can't use position of Lixinger, because merge 20 years position in 10y-interval.")
        return np.array(self.fundamental_metrics.get(eval(f"INDEX_METRICS_FUNDAMENTAL.{metrics_name}.value")))

    def get_finance_metrics(self, metrics_name) -> np.ndarray:
        """
        :param metrics_name: finance_ROE_FCF_NP_S_R_TOTAL_ASSETS_TOTAL_LIABILITIES
        :return: time series of metrics
        """
        assert hasattr(INDEX_METRICS_FINANCE, metrics_name)
        return np.array(self.finance_metrics.get(eval(f"INDEX_METRICS_FINANCE.{metrics_name}.value")))

    def get_net_asset(self) -> np.ndarray:
        """
        :return 净资产 市值mc/pb
        """
        return self.get_fundamental_metrics('MC') / self.get_fundamental_metrics('PB')

    def get_roe_increasing(self) -> np.ndarray:
        pass

    def get_dyr_increasing(self) -> np.ndarray:
        return self.get_fundamental_metrics('MC') * self.get_fundamental_metrics('DYR')

    def get_net_value(self) -> np.ndarray:
        """
        :return 净值 cp/cp[0]
        """
        cp = self.get_fundamental_metrics('CP')
        return cp / cp[0]

    def get_thermometer(self) -> np.ndarray:
        thermometer = (self.get_fundamental_metrics('PE_POS') + self.get_fundamental_metrics('PE_POS')) / 2
        return thermometer


def concat_time(lh: object, rh: object) -> object:
    assert isinstance(lh, TimeSeries) and isinstance(rh, TimeSeries)
    assert lh.stock_code == rh.stock_code, "can't concat TimeSeries which have different stock code."
    assert parser.parse(lh.time_end) - parser.parse(rh.time_start) == datetime.timedelta(-1),\
        "ending time of left TimeSeries should equals to right's starting time."
    result_obj = lh.deep_copy()
    result_obj.time_interval += rh.time_interval
    result_obj.time_end = rh.time_end
    result_obj.dates_series += rh.dates_series
    for ky in result_obj.fundamental_metrics.keys():
        result_obj.fundamental_metrics[ky] = np.append(result_obj.fundamental_metrics[ky], rh.fundamental_metrics[ky])
    result_obj.finance_times += rh.finance_times
    for ky in result_obj.finance_metrics.keys():
        result_obj.finance_metrics[ky] = np.append(result_obj.finance_metrics[ky], rh.finance_metrics[ky])
    return result_obj


# a basket of indexes for investment
class IndexBasketTimeSeries:
    def __init__(self, time_start, time_end,
                 index_stock_list: "list of index stock", weight_list: "weight of index" = None):
        if weight_list is None:
            weight_list = [1 for _ in index_stock_list]
        else:
            assert len(weight_list) == len(index_stock_list), RuntimeError("indexes length not equals to weights")
        self.time_start = time_start
        self.time_end = time_end
        self.weights = np.array(weight_list)
        if self.weights.sum() != 1:
            self.weights = self.weights[:] / self.weights.sum()
        self.index_time_series_list = []
        first_index = IndexTimeSeries(index_stock_list[0], self.time_start, self.time_end, fill_flag=True)
        self.basket_time_series = self.weights[0] * first_index.deep_copy()
        self.index_time_series_list.append(first_index)
        for i, stock in enumerate(index_stock_list[1:]):
            index_series = IndexTimeSeries(stock, self.time_start, self.time_end, fill_flag=True)
            self.index_time_series_list.append(index_series)
            self.basket_time_series += self.weights[i] * index_series
        self.dates_series = self.index_time_series_list[0].dates_series

    def get_merged_attribute(self, attribute_func_name) -> np.ndarray:
        """
        :param: attribute_func_name like get_net_capital, get_roe_increasing, get_dyr_increasing ...
        :return: np.array which shape of gets_attribute
        """
        assert hasattr(self.index_time_series_list[0], attribute_func_name)

        def multi_func(i, j):
            return self.weights[i] * getattr(self.index_time_series_list[i], attribute_func_name)() \
                   + self.weights[j] * getattr(self.index_time_series_list[j], attribute_func_name)()

        return reduce(multi_func, range(len(self.weights)))


# some index of series utilities

def filter_indexes_connection(arr, pad=5) -> np.ndarray:
    """
    filter index connection-continuously-area to one sample index.
    :param arr: index array, should be increasing
    :param pad: padding for judge in one continuously-area
    :param filter_type: mean or start
    :return: list of filtered index
    """
    assert len(arr.shape) == 1
    if arr.shape[0] <= 1:
        return arr
    # note: append one more connection, which as EOF will be filter in loop below.
    arr_append = np.append(arr, [arr[-1] + pad + 1])
    length = arr_append.shape[0]
    i_start = 0
    res = []
    sum_of_area = 0
    cache = arr_append[0]
    for i_end in range(length):
        if abs(cache - arr_append[i_end]) <= pad:
            sum_of_area += arr_append[i_end]
        else:
            ave = sum_of_area // (i_end - i_start) if (i_end - i_start) > 0 else sum_of_area
            res.append(ave)
            i_start = i_end
            sum_of_area = arr_append[i_end]
        cache = arr_append[i_end]
    return np.array(res)


def filter_indexes_by_spacer(indexes, spacer=100) -> np.ndarray:
    assert len(indexes) > 0
    cache = indexes[0]
    res = [cache]
    for idx in indexes:
        if idx > cache + spacer:
            res.append(idx)
            cache = idx
    return np.array(res)


def find_in_series(series, value, filter_pad: float | None = 100, margin=1e-2) -> np.ndarray:
    indexes = np.where(np.abs(series - value) < margin)[0]
    assert len(indexes) > 0, RuntimeError("can't find this value in series")
    return filter_indexes_by_spacer(indexes, filter_pad) if filter_pad is not None else indexes
