# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function

import os
import abc
import six
import json
import bisect
import importlib
import numpy as np
import pandas as pd

from .config import C
from .utils import requests_with_retry, lower_bound, upper_bound


@six.add_metaclass(abc.ABCMeta)
class Provider(object):
    """Provider Abstract Class

    All provider should provide 3D data, which are:

    - calendar
    - instruments
    - features
    """
    def __init__(self):
        self._calendar_cache = dict()
        self._instruments_cache = dict()
        
    def calendar(self, start_time=None, end_time=None, freq='day'):
        """Get calendar of certain market in given time range.

        Parameters
        ----------
        start_time : str
            start of the time range
        end_time : str
            end of the time range
        freq : str
            time frequency, available: year/quarter/month/week/day
        
        Returns
        ----------
        list
            calendar list
        """
        # load
        if freq in self._calendar_cache:
            _calendar, _calendar_index = self._calendar_cache[freq]
        else:
            _calendar = np.array(self._load_calendar(freq))
            _calendar_index = {x: i for i, x in enumerate(_calendar)} # for fast search
            self._calendar_cache[freq] = _calendar, _calendar_index
        # strip
        si = 0
        if start_time:
            start_time = pd.Timestamp(start_time)
            si = bisect.bisect_left(_calendar, start_time) #lower_bound
        ei = len(_calendar)
        if end_time:
            end_time = pd.Timestamp(end_time)
            ei = bisect.bisect_right(_calendar, end_time) #upper_bound
        return _calendar[si:ei]

    @abc.abstractmethod
    def _load_calendar(self, freq):
        pass
    
    def instruments(self, market='all', start_time=None, end_time=None, as_list=False):
        """Get instruments of certain market in given time range.

        .. note:: you may hindsight instruments with large time range

        Parameters
        ----------
        market : str
            market/industry/index shortname, eg all/sh/idx500, default all
        start_time : str
            start of the time range
        end_time : str
            end of the time range
        as_list : bool
            return instruments as list or dict
        
        Returns
        ----------
        dict or list
            instruments list or dictionary with time spans
        """
        # load
        if market in self._instruments_cache:
            _instruments = self._instruments_cache[market]
        else:
            _instruments = self._load_instruments(market)
            self._instruments_cache[market] = _instruments
        # strip
        start_time = pd.Timestamp(start_time or '1900-01-01')
        end_time = pd.Timestamp(end_time or '2099-12-31')
        _instruments_filtered = {
            inst: list(filter(
                lambda x: x[0] <= x[1],
                [(max(start_time, x[0]), min(end_time, x[1])) for x in spans]
            )) for inst, spans in _instruments.items()
        }
        _instruments_filtered = {
            key: value
            for key, value in _instruments_filtered.items()
            if value
        }
        # as list
        if as_list:
            return list(_instruments_filtered)
        return _instruments_filtered
            
    @abc.abstractmethod
    def _load_instruments(self, market):
        pass
    
    def features(self, instruments, fields=[], start_time=None, end_time=None, freq='day'):
        """Get batch features of instruments and fields in given time range.

        Parameters
        ----------
        instruments : list or dict
            list of instruments
        fields : list
            list of feature instances
        start_time : str
            start of the time range
        end_time : str
            end of the time range
        freq : str
            time frequency

        Returns
        ----------
        pd.DataFrame
            a pandas dataframe with <instrument, datetime> index
        """
        # validate
        if len(instruments) == 0:
            raise ValueError('instruments cannot be empty')
        if len(fields) == 0:
            raise ValueError('fields cannot be empty')
        # calendar
        calendar = self.calendar(start_time, end_time, freq)
        if len(calendar) == 0:
            return pd.DataFrame(columns=[str(x) for x in fields])
        _calendar, _ = self._calendar_cache[freq]
        start_time = calendar[0]
        end_time = calendar[-1]
        # load
        data = dict()
        for inst in instruments:
            obj = dict()
            for field in fields:
                obj[str(field)] = field.load(inst, start_time, end_time, freq)
            data[inst] = pd.DataFrame(obj, columns=obj.keys())
        data = pd.concat(data)
        data = data.replace([np.inf, -np.inf], np.nan)
        data.index.set_levels(_calendar[data.index.get_level_values(1)], level=1, inplace=True)
        data.index.names = ['instrument', 'datetime']
        # filter
        if isinstance(instruments, dict):
            new_data = dict()
            for inst, spans in instruments.items():
                df = data.loc[inst]
                mask = np.zeros(len(df), dtype=np.bool)
                for begin, end in spans:
                    mask |= (df.index >= begin) & (df.index <= end)
                new_data[inst] = df[mask]
            data = pd.concat(new_data)
        return data

    @abc.abstractmethod
    def _load_feature(self, instrument, field, start_time, end_time, freq):
        """Get single feature of instrument in given time range"""
        pass

    
class SimProvider(Provider):
    """Simulated Provider

    Used for API development and testing.
    """
    def _load_calendar(self, freq):
        return pd.date_range('1990-01-01', '2018-12-31', freq='D').tolist()

    def _load_instruments(self, market):
        _instruments = {
            'SZ000001': [
                (pd.Timestamp('1990-01-01'), pd.Timestamp('2099-12-31')),
            ],
            'SH600000': [
                (pd.Timestamp('2000-01-01'), pd.Timestamp('2099-12-31')),
            ]
        }
        return _instruments
        
    def _load_feature(self, instrument, field, start_time, end_time, freq):
        index = self.calendar(start_time, end_time, freq)
        _, _calendar_index = self._calendar_cache[freq]
        np.random.seed(0)
        series = pd.Series(np.random.normal(size=(len(index),)))
        series.index += _calendar_index[start_time]
        return series

    
class LocalProvider(Provider):
    """Local Provider

    Serving data from local file system.
    """
    # use class attribute
    _uri_cal = os.path.join(C.provider_uri, 'calendar', '{}.txt')
    _uri_inst = os.path.join(C.provider_uri, 'instruments', '{}.txt')
    _uri_data = os.path.join(C.provider_uri, 'features', '{}', '{}', 'data.{}.bin') #instrument/field/freq
    _uri_meta = os.path.join(C.provider_uri, 'features', '{}', '{}', 'data.{}.meta') #instrument/field/freq
    
    def _load_calendar(self, freq):
        fname = self._uri_cal.format(freq)
        if not os.path.exists(fname):
            raise ValueError('calendar not exists for freq ' + freq)
        with open(fname) as f:
            return [pd.Timestamp(x.strip()) for x in f]
            # NOTE: `pd.Timestamp` is much more efficient than `datetime.strptime` 
            # or `pd.to_datetime`, experiments:
            # %timeit pd.Timestamp('20100101') -> 1.14us
            # %timeit pd.Timestamp('20100101\n') -> 70.6us
            # %timeit pd.to_datetime('20100101') -> 36.8us
            # %timeit pd.to_datetime('20100101\n') -> 135us
            # %timeit datetime.strptime('20100101\n'.strip(),'%Y%m%d') -> 7.01us

    def _load_instruments(self, market):
        fname = self._uri_inst.format(market)
        if not os.path.exists(fname):
            raise ValueError('instruments not exists for market ' + market)
        _instruments = dict()
        with open(fname) as f:
            for line in f:
                inst, begin, end = line.strip().split()
                _instruments.setdefault(inst, []).append((pd.Timestamp(begin), pd.Timestamp(end)))
        return _instruments

    # @profile # for kernprof
    def _load_feature(self, instrument, field, start_time, end_time, freq):
        # validate
        uri_meta = self._uri_meta.format(instrument, field, freq)
        if not os.path.exists(uri_meta):
            print('[WARN] uri_meta not found: ' + uri_meta)
            return pd.Series()
        uri_data = self._uri_data.format(instrument, field, freq)
        if not os.path.exists(uri_data):
            raise ValueError('uri_data not found: ' + uri_data)
        # calendar index
        if freq not in self._calendar_cache:
            self.calendar(freq=freq) # update cache
        _calendar, _calendar_index = self._calendar_cache[freq]
        if start_time not in _calendar_index:
            # NOTE: if this method is called from 'self.features', start_time will always be valid
            # this search branch will be skipped
            start_time = _calendar[bisect.bisect_left(_calendar, start_time)]
        start_index = _calendar_index[start_time]
        if end_time not in _calendar_index:
            end_time = _calendar[bisect.bisect_right(_calendar, end_time)-1]
        end_index = _calendar_index[end_time]
        # load
        with open(uri_meta) as f:
            meta = json.load(f)
        ref_start_index = meta['start_index']
        ref_end_index = meta['end_index']
        dtype = np.dtype(meta['data_type'])
        si = max(start_index, ref_start_index)
        ei = min(end_index, ref_end_index)
        if ei <= si: return pd.Series()
        offset = si - ref_start_index
        count = ei - si + 1
        with open(uri_data, 'rb') as f:
            f.seek(offset*dtype.itemsize, os.SEEK_SET)
            series = pd.Series(
                np.fromfile(f, dtype=dtype, count=count),
            )
            series.index += si
        # # interpolate
        # series.interpolate('linear', inplace=True) 
        return series

    
class RESTProvider(Provider):
    """RESTful API Provider
    
    Serving data from RESTful API service.
    """
    _domain = 'http://spectatorview.fareast.corp.microsoft.com'
    _uri_data = _domain + '/uidata/{}/{}/data.{}.bin'
    _uri_meta = _domain + '/uidata/{}/{}/meta.{}.bin.json'
    _uri_cal = 'http://file.tushare.org/tsdata/calAll.csv' # TODO
    _uri_inst = 'http://file.tushare.org/tsdata/all.csv' # TODO
    _field_map = {
        'close': 'market/close_price',
        'volume': 'market/trade_volume',
    }
    _dtype_map = {
        'float32': '>f',
        'int': '>i',
        'double': '>d',
        'uint': '>I'
    } # hard-coded, big-endian

    def _load_calendar(self, freq):
        if freq not in {'day'}:
            raise NotImplementedError
        ret = requests_with_retry(self._uri_cal)
        return [pd.Timestamp(x[:10]) for x in ret.content.decode().strip().split('\r\n') if x[-1]=='1']

    def _load_instruments(self, market):
        if market not in {'all'}:
            raise NotImplementedError
        ret = requests_with_retry(self._uri_inst)
        data = [x.split(',') for x in ret.content[175:].decode('gbk').strip().split('\r\n')]
        _instruments = dict()
        for row in data:
            if row[15] == '0': continue
            inst = 'SH'+row[0] if row[0][0]=='6' else 'SZ'+row[0]
            _instruments.setdefault(inst, []).append((pd.Timestamp(row[15]), pd.Timestamp('2099-12-31')))
        return _instruments

    # @profile # for kernprof
    def _load_feature(self, instrument, field, start_time, end_time, freq):
        # validate
        if field not in self._field_map:
            raise ValueError('field %s is not supported in this api yet' % field)
        uri_data = self._uri_data.format(instrument[2:], self._field_map[field], freq)
        uri_meta = self._uri_meta.format(instrument[2:], self._field_map[field], freq)
        # calendar
        if freq not in self._calendar_cache:
            self.calendar(freq=freq)
        _calendar, _calendar_index = self._calendar_cache[freq]
        if start_time not in _calendar_index:
            # NOTE: if this method is called from 'self.features', start_time will always be valid
            # this search branch will be skipped
            start_time = _calendar[bisect.bisect_left(_calendar, start_time)]
        start_index = _calendar_index[start_time]
        if end_time not in _calendar_index:
            end_time = _calendar[bisect.bisect_right(_calendar, end_time)-1]
        end_index = _calendar_index[end_time]
        # load
        # TODO: upgrade meta format
        meta = requests_with_retry(uri_meta).json()
        ref_start_index = _calendar_index[pd.Timestamp(meta['start'])] # TODO: check key
        ref_end_index = _calendar_index[pd.Timestamp(meta['end'])] # TODO: check key
        si = max(start_index, ref_start_index)
        ei = min(end_index, ref_end_index)
        if ei <= si: return pd.Series()
        offset = si - ref_start_index
        count = ei - si + 1
        dtype = np.dtype(self._dtype_map[meta['data_type']]) # TODO: check key
        nbytes = dtype.itemsize
        # TODO: Range header is not supported well
        # headers = {'Range': 'bytes={}-{}'.format(offset*nbytes, (offset+count)*nbytes - 1)}
        # data = requests_with_retry(uri_data, headers=headers).content
        # series = pd.Series(np.frombuffer(data, dtype=dtype))
        ret = requests_with_retry(uri_data)
        series = pd.Series(np.frombuffer(
            ret.content[offset*nbytes:], dtype=dtype, count=count).byteswap().newbyteorder()
        ) # always use little-endian
        series.index += si
        # # interpolate
        # series.interpolate('linear', inplace=True)
        return series

# global provider as singleton
D = getattr(importlib.import_module('.data', package='qlib'), C.provider)()
