# -*- coding: UTF-8 -*-
import sys

from pymongo import MongoClient
from datetime import datetime
import pandas as pd
import time
import traceback
import numpy as np

from core.utils import MailSender
from core.xgboost_model import XGBoostModel
import requests
import json
import logging

desired_width = 320
pd.set_option('display.width', desired_width)
pd.set_option('display.max_columns', 20)


def post_signal_retry(signal_data):
    post_retry_count = 3
    post_retry_interval_sec = 1
    i = 0
    urlstr = 'http://39.104.227.148:8999/qt/realtime'
    logging.info('post_signal_retry param : %s' % (str(signal_data)))
    while i < post_retry_count:
        r = requests.post(urlstr, json=signal_data, timeout=1)
        if r.status_code == 200:
            res_json = json.loads(r.text)
            logging.info('post_signal_retry status_code: %d, result: %s' % (r.status_code, res_json))
            if res_json['resultCode'] == 0:
                logging.info('post_signal_retry success')
                return
        logging.info('post_signal_retry failed, retry post!')
        time.sleep(post_retry_interval_sec)
        i += 1


class PredictorControl:

    def __init__(self, ip, xgboost_model_path, window, predict_type,
                 sequence_length=72,
                 retry_count=20, retry_interval_sec=3,
                 product_feature_delay_second=60):
        self.price_db = MongoClient('mongodb://%s:27018/' % ip)['exchange']
        self.features_db = MongoClient('mongodb://%s:27019/' % ip)['features']
        self.predict_signal_db = MongoClient('mongodb://%s:27019/' % ip)['predict_signal']
        self.block_feature_collection = 'block_%s' % window
        self.okex_price_collection = 'okex_btc_usdt'
        self.return_volatility_coll = '%s_%s_%s' % (predict_type, window, sequence_length)
        self.xgboost_model_path = xgboost_model_path
        self.window = window
        self.predict_type = predict_type
        self.price_feature_column = 'price_feature'
        self.sequence_length = sequence_length - 1
        self.retry_count = retry_count
        self.retry_interval_sec = retry_interval_sec
        self.product_feature_delay_second = product_feature_delay_second

    def get_block_features_retry(self, beg_timestamp, end_timestamp):
        i = 0
        if self.predict_type == 'volatility':
            beg_timestamp -= self.window * 60
        param = {'_id': {'$gte': beg_timestamp, '$lte': end_timestamp}}
        while i < self.retry_count:
            logging.info(
                'block: begin utc time: %s, end utc time: %s, now local time: %s, get_block_features find param: %s' % (
                    datetime.utcfromtimestamp(beg_timestamp), datetime.utcfromtimestamp(end_timestamp),
                    str(datetime.now()),
                    str(param)))
            block_features = self.features_db[self.block_feature_collection].find(param).sort('_id', 1)
            block_feature_list = list(block_features)
            df = pd.DataFrame(block_feature_list)

            if df[-1:]['_id'].values[0] == end_timestamp:
                logging.info('block: feature df shape: %s ', df.shape)
                logging.info('block: feature  [0]:\n %s', df[0:1])
                logging.info('block: feature [-1]:\n %s', df[-3:])
                logging.info('block: time list: %s' % (df['time'].tolist()))
                logging.info('block: time list size: %s', len(df['time']))

                if self.predict_type == 'volatility':
                    return df[1:-1]
                else:
                    return df[1:]
            else:
                logging.info('block: get block feature failed, retry i: %d, timestamp: %d' % (i, end_timestamp))
                i += 1
                time.sleep(self.retry_interval_sec)
        return pd.DataFrame()

    def get_price_features_retry(self, beg_timestamp, end_timestamp):
        i = 0
        param = {'_id': {'$lte': end_timestamp * 1000, '$gte': beg_timestamp * 1000}}
        logging.info(
            'price: begin utc time: %s, end utc time: %s, now local time: %s, get_predict_features_retry param: %s'
            % (datetime.utcfromtimestamp(int(beg_timestamp)),
               datetime.utcfromtimestamp(int(end_timestamp)),
               str(datetime.now()), str(param)))

        df = None
        while i < self.retry_count:
            price_iter = self.price_db[self.okex_price_collection].find(param).sort('_id', 1)
            price_feature_list = list(price_iter)
            df = pd.DataFrame(price_feature_list)

            if df[-1:]['_id'].values[0] != end_timestamp * 1000:
                logging.info('price: last item: %s ', df[-1:])
                logging.info('price: end timestamp: %d', end_timestamp)
                logging.info('price: get signal feature failed, retry i: %d, timestamp: %d', i, end_timestamp)
                time.sleep(5)
                i += 1
            else:
                break

        # df.drop([0, 1], inplace=True)
        timestamp_list = []
        k = beg_timestamp
        while k <= end_timestamp:
            timestamp_list.append(k * 1000)
            k += 60
        time_df = pd.DataFrame(timestamp_list, columns=['_id'])

        diff_time_items = list(set(timestamp_list).difference(set(df['_id'].tolist())))
        diff_time_items = sorted(diff_time_items)

        if len(diff_time_items) != 0:
            logging.info('price loss count: %d, loss detail: %s', len(diff_time_items), diff_time_items)
            df = pd.merge(time_df, df, left_on='_id', right_on='_id', how='left')
            df.fillna(method='pad')
            df.fillna(0)
            local_time_list = [str(datetime.fromtimestamp(e / 1000)) for e in diff_time_items]
            email_content = '缺失时间戳: ' + str(diff_time_items) + '\n' + '北京时间: ' + str(local_time_list) + '\n'
            if len(diff_time_items) <= 3:
                # MailSender().send_email(mail_content=email_content, mail_title='WARN:【价格数据缺失-容错范围内】')
                logging.info('loss email_content: %s', email_content)
            else:
                MailSender().send_email(mail_content=email_content,
                                        mail_title='ERROR:【%s-%d-%d-价格数据缺失-容错范围外】' % (
                                            self.predict_type, self.window, self.sequence_length))
                raise Exception(
                    'ERROR:【%s-%d-%d-价格数据缺失-容错范围外】' % (self.predict_type, self.window, self.sequence_length))
        df['time'] = pd.to_datetime(df['_id'], unit='ms').astype(str)
        df['timestamp_sec'] = (df['_id'] / 1000).astype(int)

        if self.predict_type == 'volatility':
            df[self.price_feature_column] = df['close'].rolling(self.window).std().shift(-self.window)
        elif self.predict_type == 'return':
            df[self.price_feature_column] = df['close']

        df = df[::self.window]
        logging.info('price: feature df shape: %s', df.shape)
        logging.info('price: feature  [0]:\n %s', df[0:3])
        logging.info('price: feature [-1]:\n %s', df[-3:])
        logging.info('price: time list: %s' % (df['time'].tolist()))
        logging.info('price: time list size: %s', len(df['time']))
        if self.predict_type == 'volatility':
            df = df[:-1]
        elif self.predict_type == 'return':
            df = df[1:]
        return df

    def product_signal(self, end_timestamp, test=False, store_db=True, post_signal=True):
        logging.info('time arrive, run task, timestamp: %d, utc: %s, local: %s',
                     end_timestamp, datetime.utcfromtimestamp(end_timestamp), datetime.fromtimestamp(end_timestamp))
        begin_timestamp = end_timestamp - self.sequence_length * (self.window * 60)
        feature_df = self.get_block_features_retry(begin_timestamp, end_timestamp)
        price_df = self.get_price_features_retry(begin_timestamp, end_timestamp)
        logging.info('block: time list: %s', feature_df['time'].tolist())
        logging.info('price: time list: %s', price_df['time'].astype(str).tolist())
        logging.info('block: time list size: %s', len(feature_df['time'].tolist()))
        logging.info('price: time list size: %s', len(price_df['time'].astype(str).tolist()))
        feature_df.reset_index(inplace=True, drop=True)
        price_df.reset_index(inplace=True, drop=True)
        df = pd.merge(feature_df, price_df, left_index=True, right_index=True, suffixes=('_block', '_price'))
        column_names = [self.price_feature_column, 'block_total_tx_count', 'block_total_value', 'block-hist-1',
                        'block-hist-2', 'block-hist-3', 'block-hist-4', 'block-hist-5', 'block-hist-6', 'block-hist-7',
                        'block-hist-8', 'block-hist-9', 'block-hist-10', 'block-hist-11']
        df = df.get(column_names)
        model = XGBoostModel()
        logging.info('used feature df[0]: \n %s', df[0: 3])
        logging.info('used feature df[-1]: \n %s', df[-3:])
        res = model.load_model_and_predict(model_path=self.xgboost_model_path, test_features=np.array(df))

        predict_res = float(res)
        next_price = 0
        predict_signal = 0
        if self.predict_type == 'return':
            next_price = (predict_res + 1) * np.array(df)[0][0]
            predict_signal = (next_price - np.array(df)[-1][0]) / np.array(df)[-1][0]
        elif self.predict_type == 'volatility':
            predict_signal = (predict_res + 1) * np.array(df)[0][0]
        logging.info('y0: %s', np.array(df)[0][0])
        logging.info('predict_result: %s', predict_res)
        logging.info('predict %s: %s', self.predict_type, predict_signal)

        signal_store_db = {'_id': end_timestamp,
                           'predict_time': str(datetime.fromtimestamp(int(time.time()))),
                           'predict_local_time': str(datetime.fromtimestamp(end_timestamp)),
                           'predict_result': predict_res,
                           'next_price': next_price,
                           'predict_%s' % self.predict_type: predict_signal,
                           }
        logging.info('product signal: %s', str(signal_store_db))
        if test:
            self.predict_signal_db['test_' + self.return_volatility_coll].insert_one(signal_store_db)

        if store_db:
            curr_signal = self.predict_signal_db[self.return_volatility_coll].find_one({'_id': end_timestamp})
            if curr_signal is None:
                self.predict_signal_db[self.return_volatility_coll].insert_one(signal_store_db)

        ver = 'v1'
        logging.info('self.sequence_length : %d', self.sequence_length)
        if self.sequence_length == 132 or self.sequence_length == 131:
            ver = 'v2'

        signal_data = {"topic": "okex", "data": [{"predict_signal": predict_signal, "timestamp": end_timestamp,
                                                  "window": self.window, "type": self.predict_type, "version": ver}]}
        if post_signal:
            post_signal_retry(signal_data)
        logging.info('\n\n')

    def run_push_signal_task(self):
        while True:
            now_timestamp = int(time.time())
            try:
                # if now_timestamp % self.window != self.product_feature_delay_second:
                if now_timestamp % 120 != 0:
                    if now_timestamp % 10 == 0:
                        logging.info(
                            'timestamp: %d, utc: %s, local: %s, time not arrive to run task, sleep waiting ... ',
                            now_timestamp, datetime.utcfromtimestamp(now_timestamp),
                            datetime.fromtimestamp(now_timestamp))
                else:
                    self.product_signal(now_timestamp - (now_timestamp % (self.window * 60)))
                sys.stdout.flush()
            except Exception as e:
                logging.error('product signal error: %s', e)
                logging.info(
                    'product signal failed, timestamp: %s', (now_timestamp - self.product_feature_delay_second))
                logging.info('product signal failed, utc   time: %s', datetime.utcfromtimestamp(
                    now_timestamp - self.product_feature_delay_second))
                logging.info('product signal failed, local time: %s', datetime.fromtimestamp(
                    now_timestamp - self.product_feature_delay_second))
                traceback.print_exc()
            time.sleep(1)


def main():
    try:
        if len(sys.argv) < 4:
            print('[%s] model_file_path predict_type window sequence_length')
            print(
                'example: ./predict_control.py ./saved_models/return-60-20180601-20181201.xgboost return 30 72')
            exit()

        xgboost_model_path = sys.argv[1]
        predict_type = sys.argv[2]
        window = int(sys.argv[3])
        sequence_length = int(sys.argv[4])

        LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
        DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
        log_file_path = 'logs/' + predict_type + '_' + str(window) + '_' + str(sequence_length) + '.log'
        logging.basicConfig(filename=log_file_path, level=logging.INFO, format=LOG_FORMAT, datefmt=DATE_FORMAT)

        print('Argv: model path     : %s' % xgboost_model_path)
        print('Argv: predict type   : %s' % predict_type)
        print('Argv: window         : %d' % window)
        print('Argv: sequence length: %d' % sequence_length)
        print('Argv: log_file_path: %s' % log_file_path)

        p = PredictorControl(ip='39.104.227.148', xgboost_model_path=xgboost_model_path, window=window,
                             predict_type=predict_type, sequence_length=sequence_length)
        p.run_push_signal_task()
    except Exception as e:
        logging.error('error: predict task failed, please check!!!', e)
        traceback.print_exc()


if __name__ == '__main__':
    main()
