# -*- coding: UTF-8 -*-

import multiprocessing

import pandas as pd
import numpy as np
from exchangeFeatureGenerator import get_exchange_features
from blockFeatureGenerator import get_block_feature

multi_processing_count = 5  # 多进程个数


def get_exchange_set(exchange_name, start_time, end_time, out_path, step_minute, value_threshold,
                     maker_threshold, histogram_threshold, bins):
    """
    获取交易所链上特征
    :param exchange_name: 交易所名
    :param start_time: 开始时间(开始时间一点时间后可能才有交易)
    :param end_time： 结束时间
    :param out_path: 输出文件存储路径
    :param step_minute: 粒度步长
    :param value_threshold: 特殊值阈值
    :param maker_threshold: 庄家阈值
    :param histogram_threshold: histogram阈值
    :param bins: histogram bin
    :return:
    """
    start_timestamp = int(pd.to_datetime(start_time, format="%Y-%m-%d %H:%M:%S").value / 1e9)
    end_timestamp = int(pd.to_datetime(end_time, format="%Y-%m-%d %H:%M:%S").value / 1e9)
    exchange_tx_data_path = '../data/' + exchange_name + '.csv'
    exchange_tx_data = pd.read_csv(exchange_tx_data_path, header=-1, names=['time', 'value', 'tag', 'address'])
    exchange_tx_data['timestamp'] = pd.to_datetime(exchange_tx_data['time'], format="%Y%m%d%H%M%S").values.astype(
        np.int64) // 10 ** 9

    jobs = []
    manager = multiprocessing.Manager()
    return_dict = manager.dict()

    window_size = (end_timestamp - start_timestamp) / multi_processing_count

    for i in range(0, multi_processing_count):
        curr_end_time = start_timestamp + window_size if i < (multi_processing_count - 1) else end_timestamp
        sub_process = multiprocessing.Process(target=get_exchange_features,
                                              args=[exchange_tx_data, start_timestamp, curr_end_time,
                                                    step_minute, return_dict, i, value_threshold,
                                                    maker_threshold, histogram_threshold, bins])
        jobs.append(sub_process)
        sub_process.start()
        start_timestamp += window_size

    for feature_job in jobs:
        feature_job.join()

    result_pd = pd.DataFrame()
    for i in range(0, multi_processing_count):
        result_pd = pd.concat([result_pd, return_dict.values()[i]])

    result_pd.round(3).to_csv(out_path + exchange_name + '-feature-' + str(step_minute) + '.csv', index=False)


def get_all_network_set(data_name, start_time, end_time, out_path, step_minute, histogram_threshold, bins):
    """
    获取全网区块信息特征
    :param data_name: 全网区块信息名
    :param start_time: 开始时间(开始时间一点时间后可能才有交易)
    :param end_time： 结束时间
    :param out_path: 输出文件路径
    :param step_minute: 步长
    :param histogram_threshold: histogram阈值
    :param bins: histogram bin
    :return:
    """
    start_timestamp = int(pd.to_datetime(start_time, format="%Y-%m-%d %H:%M:%S").value / 1e9)
    end_timestamp = int(pd.to_datetime(end_time, format="%Y-%m-%d %H:%M:%S").value / 1e9)
    block_info = pd.read_csv('../data/' + data_name + '.csv')
    block_info['timestamp'] = pd.to_datetime(block_info['time'], format="%Y%m%d%H%M%S").values.astype(
        np.int64) // 10 ** 9

    jobs = []
    manager = multiprocessing.Manager()
    return_dict = manager.dict()

    window_size = (end_timestamp - start_timestamp) / multi_processing_count

    for i in range(0, multi_processing_count):
        curr_end_time = start_timestamp + window_size if i < (multi_processing_count - 1) else end_timestamp
        sub_process = multiprocessing.Process(target=get_block_feature,
                                              args=[block_info, start_timestamp, curr_end_time, step_minute,
                                                    return_dict, i, histogram_threshold, bins])

        jobs.append(sub_process)
        sub_process.start()
        start_timestamp += window_size

    for feature_job in jobs:
        feature_job.join()

    result_pd = pd.DataFrame()
    for i in range(0, multi_processing_count):
        result_pd = pd.concat([result_pd, return_dict.values()[i]])

    result_pd.round(3).to_csv(out_path + data_name + '-feature-' + str(step_minute) + '.csv', index=False)


def get_out_of_chain_set(path, step_minute, out_path, price_header):
    """
    获取交易所价格、volume等链外特征
    :param price_header: price取所在列名
    :param path: 文件路径
    :param step_minute: 窗口大小
    :param out_path: 输出文件路径
    :return:
    """
    exchange_info = pd.read_csv(path)
    delta_return = exchange_info[price_header] - exchange_info[price_header].shift(step_minute - 1)
    exchange_info['return'] = delta_return / exchange_info[price_header].shift(step_minute - 1)
    exchange_info['volatility'] = exchange_info[price_header].rolling(step_minute).std()
    exchange_info['volume-sum'] = exchange_info['volume'].rolling(step_minute).sum()
    exchange_info['volume-std'] = exchange_info['volume'].rolling(step_minute).std()
    exchange_info['volume-skew'] = exchange_info['volume'].rolling(step_minute).skew()
    exchange_info['price-std'] = exchange_info[price_header].rolling(step_minute).std()
    exchange_info['price-skew'] = exchange_info[price_header].rolling(step_minute).skew()
    exchange_info['price-mean'] = exchange_info[price_header].rolling(step_minute).mean()
    exchange_info.fillna(0, inplace=True)
    exchange_info.to_csv(out_path,
                         columns=['time', 'return', 'volatility', 'volume-sum', 'volume-std', 'volume-skew',
                                  'price-std', 'price-skew', 'price-mean'], index=False)
    print('finished process out of chain information')


if __name__ == '__main__':
    save_path = '../data/features/'
    start_time = '2018-01-01 00:00:00'
    end_time = '2018-12-01 00:00:00'
    step = 60 * 24
    # for name in ['okex_11']:
    #     get_exchange_set(exchange_name=name, out_path=save_path, start_time=start_time, end_time=end_time,
    #                      step_minute=step, value_threshold=1000, maker_threshold=10, histogram_threshold=10, bins=10)
    # 生成omni特征
    get_all_network_set(data_name='omni', out_path=save_path, step_minute=step,
                        start_time=start_time, end_time=end_time, histogram_threshold=100000, bins=10)
    # 生成block特征
    get_all_network_set(data_name='block', out_path=save_path, step_minute=step,
                        start_time=start_time, end_time=end_time, histogram_threshold=100, bins=10)
    # get_out_of_chain_set('../data/OkEX_BTC_USD_quarter_20180601_20181101.csv', 10,
    #                      save_path + 'okex-price-features.csv', 'close')
