# -*- coding: UTF-8 -*-
import datetime

import pandas as pd
import xgboost as xgb
import numpy as np
from datetime import datetime, timezone

# 设置输出窗口大小
from pymongo import MongoClient

desired_width = 320
pd.set_option('display.width', desired_width)
pd.set_option('display.max_columns', 20)


class XGBoostModel:
    """xgboost model"""

    def __init__(self):
        pass

    def train_and_save_model(self, data_train, sequence_length, window, model_name, normalise=True):
        train_features, train_labels = self._get_train_data(data_train, sequence_length, normalise)
        train_features = train_features.reshape(train_features.shape[0],
                                                train_features[0].shape[0] * train_features[0].shape[1])
        xg_reg = xgb.XGBRegressor(learning_rate=0.1, n_estimators=100,
                                  max_depth=5, min_child_weight=1,
                                  gamma=0, subsample=0.8,
                                  colsample_bytree=0.8, objective="reg:linear",
                                  nthread=-1, scale_pos_weight=1, seed=27, silent=False)
        xg_reg.fit(train_features, train_labels)
        saved_model_path = '../saved_models/test_1201/%s-%d-%d-%s.xgb' % (
            model_name, window, sequence_length, '20181201')
        print('Finished fit model. Model saved to : %s' % saved_model_path)
        xg_reg.save_model(saved_model_path)
        return saved_model_path

    def load_model_and_predict(self, model_path, test_features, normalise=True):
        xg_reg = xgb.XGBRegressor()
        booster = xgb.Booster()
        booster.load_model(model_path)
        xg_reg._Booster = booster
        test_features = self._normalise_windows(test_features, single_window=True)[0] if normalise else test_features
        test_features = np.array(test_features)
        test_features = test_features.reshape(1, test_features.shape[0] * test_features.shape[1])
        return xg_reg.predict(test_features)

    def _get_train_data(self, data_train, sequence_length, normalise):
        data_x = []
        data_y = []
        for i in range(len(data_train) - sequence_length):
            window = data_train[i:i + sequence_length]
            window = self._normalise_windows(window, single_window=True)[0] if normalise else window
            data_x.append(window[:-1])
            data_y.append(window[-1, [0]])
        return np.array(data_x), np.array(data_y)

    @staticmethod
    def _normalise_windows(window_data, single_window=False):
        """Normalise window with a base value of zero"""
        normalised_data = []
        window_data = [window_data] if single_window else window_data
        for window in window_data:
            normalised_window = []
            for col_i in range(window.shape[1]):
                index = 0
                num = float(window[index, col_i])
                while num == 0:
                    index += 1
                    num = float(window[index, col_i])
                normalised_col = [((float(p) / num) - 1) for p in window[:, col_i]]
                normalised_window.append(normalised_col)
            normalised_window = np.array(
                normalised_window).T  # reshape and transpose array back into original multidimensional format
            normalised_data.append(normalised_window)
        return np.array(normalised_data)


def train_and_predict_demo():
    data = pd.read_csv('../data/okex/OKEX_BTC_20180601_20181206.csv')
    signal = 'return'
    sampling_frequency = 60
    window = 60
    price_column = 'close'
    if signal == 'return':
        data['return'] = (data[price_column].shift(-window) - data[price_column].shift(-1)) \
                         / data[price_column].shift(-1)
    elif signal == 'volatility':
        data['volatility'] = data[price_column].rolling(window).std().shift(-window)
    data = data[::sampling_frequency]
    data = data[:-1]
    data = data.get(['time', signal])
    feature = pd.read_csv('../data/features/block_0601_1207-feature-60.csv')
    data = data.merge(feature, on='time', how='left')
    del data['time']

    xgboost_model = XGBoostModel()
    model_path = xgboost_model.train_and_save_model(sequence_length=72, window=window, data_train=data.values,
                                                    model_name=signal)
    predict_res = xgboost_model.load_model_and_predict(model_path=model_path, test_features=data.values[0:71])
    print('predict result: %s' % predict_res[0])
    print('real result: %s' % data.values[72][0])
    print('un-normalise result: %s' % ((predict_res[0] + 1) * data.values[0][0]))


def get_price_features(price_db, predict_type, window, beg_timestamp, end_timestamp,
                       price_feature_column='price_feature'):
    param = {'_id': {'$lt': end_timestamp * 1000, '$gte': beg_timestamp * 1000}}
    print('price find param: ', param)

    price_iter = price_db['okex_btc_usdt'].find(param).sort('_id', 1)
    price_feature_list = list(price_iter)
    df = pd.DataFrame(price_feature_list)
    df['time'] = pd.to_datetime(df['_id'], unit='ms').astype(str)
    df['timestamp_sec'] = (df['_id'] / 1000).astype(int)
    if predict_type == 'return':
        df[price_feature_column] = df['close']
    elif predict_type == 'volatility':
        df[price_feature_column] = df['close'].rolling(window).std().shift(-window)
    else:
        print('predict type error, get price feature failed, process exited.')
        exit()
    df = df[::window]
    return df


def train_from_db(ip, predict_type, window, sequence_length, begin_utctime, end_utctime, debug=False):
    begin_timestamp = int(
        datetime.strptime(begin_utctime, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc).timestamp())
    end_timestamp = int(
        datetime.strptime(end_utctime, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc).timestamp())

    features_db = MongoClient('mongodb://%s:27019/' % ip)['features']
    param = {'_id': {'$lt': end_timestamp, '$gte': begin_timestamp}}
    feature_cursor = features_db['block_%s' % window].find(param)
    block_df = pd.DataFrame(list(feature_cursor))
    print(block_df)

    price_db = MongoClient('mongodb://%s:27018/' % ip)['exchange']
    price_df = get_price_features(price_db, predict_type, window, begin_timestamp, end_timestamp)

    data = price_df.merge(block_df, on='time', how='left', suffixes=('_price', '_block'))

    price_feature_column = 'price_feature'
    column_names = [price_feature_column, 'block_total_tx_count', 'block_total_value', 'block-hist-1',
                    'block-hist-2', 'block-hist-3', 'block-hist-4', 'block-hist-5', 'block-hist-6', 'block-hist-7',
                    'block-hist-8', 'block-hist-9', 'block-hist-10', 'block-hist-11']
    if debug:
        column_names.insert(0, 'time')
        column_names.insert(1, '_id_block')
    feature_df = data.get(column_names)
    print(feature_df)
    del feature_df['time']
    del feature_df['_id_block']

    xgboost_model = XGBoostModel()
    model_path = xgboost_model.train_and_save_model(sequence_length=sequence_length, window=window,
                                                    data_train=feature_df.values, model_name=predict_type)
    print(model_path)


if __name__ == '__main__':
    # ip = '39.104.227.148'
    # predict_type = 'return'
    # window = 30
    # sequence_length = 72
    # begin_utctime = '2018-06-01 00:00:00'
    # end_utctime = '2018-12-01 00:00:00'
    # train_from_db(ip, predict_type, window, sequence_length, begin_utctime, end_utctime, debug=True)

    begin_utctime = '2018-06-01 00:00:00'
    end_utctime = '2018-12-01 00:00:00'
    return_15_v1 = {'ip': '39.104.227.148',
                    'predict_type': 'return', 'window': 15, 'sequence_length': 72,
                    'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    return_30_v1 = {'ip': '39.104.227.148',
                    'predict_type': 'return', 'window': 30, 'sequence_length': 72,
                    'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    return_60_v1 = {'ip': '39.104.227.148',
                    'predict_type': 'return', 'window': 60, 'sequence_length': 72,
                    'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    return_15_v2 = {'ip': '39.104.227.148',
                    'predict_type': 'return', 'window': 15, 'sequence_length': 132,
                    'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    return_30_v2 = {'ip': '39.104.227.148',
                    'predict_type': 'return', 'window': 30, 'sequence_length': 132,
                    'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    volatility_15_v1 = {'ip': '39.104.227.148',
                        'predict_type': 'volatility', 'window': 15, 'sequence_length': 72,
                        'begin_utctime': begin_utctime, 'end_utctime': end_utctime}

    volatility_30_v1 = {'ip': '39.104.227.148',
                        'predict_type': 'volatility', 'window': 30, 'sequence_length': 72,
                        'begin_utctime': begin_utctime, 'end_utctime': end_utctime}
    param_list = (
        return_15_v1, return_30_v1, return_60_v1, return_15_v2, return_30_v2, volatility_15_v1, volatility_30_v1)
    for param in param_list:
        train_from_db(param['ip'], param['predict_type'], param['window'], param['sequence_length'],
                      param['begin_utctime'], param['end_utctime'], debug=True)
