import configparser
import multiprocessing
import threading
import random
import numpy as np
import tensorflow as tf
from tensorflow.python.data.experimental.ops.testing import sleep
from tqdm import tqdm
from stock_a.common.db import stock_info
from stock_a.common.utils import os_util
from stock_a.feature import feature_pb2
from stock_a.feature import feature_resigteror

class StockSampleGenerator:
    def __init__(self):
        self.quant_config = configparser.ConfigParser()
        self.quant_config.read('../config/quant_config.ini')
        self.feature_generators = list()
        for feature_generator_class in feature_resigteror.get_feature_generator_class_list():
            self.feature_generators.append(feature_generator_class(None, None))
        self.task_num = multiprocessing.cpu_count()
        self.__task_progress__list = [0 for i in range(self.task_num)]

    def generate_stock_sample(self, start_date, end_date):

        db = stock_info.new_db_conn()
        cursor_feature = db.cursor()

        code_date_list = []
        for i in tqdm(range(20), '样本数据收集'):
            feature_table_name = 'stock_feature_day_{}'.format(i)
            cursor_feature.execute('SELECT `code`, `date` FROM {} WHERE (`date` BETWEEN %s AND %s)'.format(feature_table_name), [start_date, end_date])
            all_feature_rows = cursor_feature.fetchall()
            code_date_list += all_feature_rows
        cursor_feature.close()
        db.close()

        if len(code_date_list) == 0:
            return
        random.shuffle(code_date_list)
        sub_code_date_list_list = np.array_split(code_date_list, self.task_num)
        all_task_list = []

        for i in range(len(sub_code_date_list_list)):
            task = threading.Thread(target=StockSampleGenerator.__execute_generate_stock_sample, args=(self, i, sub_code_date_list_list[i]))
            task.start()
            all_task_list.append(task)
        # 再起一个线程监控样本产出进度
        monitor_task = threading.Thread(target=StockSampleGenerator.__monitor_progress, args=(self, len(code_date_list)))
        monitor_task.start()
        for task in all_task_list:
            task.join()
        monitor_task.join()

    def __execute_generate_stock_sample(self, idx, code_date_list):
        tf_record_dir_path = self.quant_config['sample']['tf_record_dir_path']
        project_root = os_util.get_project_root()
        tf_record_dir_path = project_root + tf_record_dir_path
        tfrecord_writer = tf.io.TFRecordWriter("{}/{}_stock_sample.tfrecords".format(tf_record_dir_path, idx))
        db = stock_info.new_db_conn()
        cursor = db.cursor()
        for code_date in code_date_list:
            code = code_date[0]
            date = code_date[1]
            feature_table_name = stock_info.get_stock_feature_day_table_name(code)
            label_table_name = stock_info.get_stock_label_day_table_name(code)
            cursor.execute('SELECT `feature` FROM {} WHERE `code`=%s AND `date`=%s LIMIT 1'.format(feature_table_name), [code, date])
            feature_row = cursor.fetchone()
            cursor.execute('SELECT `max_increase_20d`, `max_decrease_20d`, `max_increase_50d`, `max_decrease_50d`, `max_increase_120d`, `max_decrease_120d` FROM {} WHERE `code`=%s AND `date`=%s LIMIT 1'.format(label_table_name), [code, date])
            label_row = cursor.fetchone()
            if feature_row is not None and label_row is not None:
                example = self.__generate_tf_records_example(feature_row, label_row)
                if example is not None:
                    tfrecord_writer.write(example.SerializeToString())
            self.__task_progress__list[idx] += 1
        tfrecord_writer.close()

    def __generate_tf_records_example(self, feature_row, label_row):
        feature_pb = feature_pb2.feature()
        feature_pb.ParseFromString(feature_row[0])
        features = {}
        for feature_generator in self.feature_generators:
            name, value = feature_generator.generate_tf_record(feature_pb)
            features[name] = value
        max_increase_20d = label_row[0]
        max_decrease_20d = label_row[1]
        max_increase_50d = label_row[2]
        max_decrease_50d = label_row[3]
        max_increase_120d = label_row[4]
        max_decrease_120d = label_row[5]
        if max_increase_20d is None or max_decrease_20d is None or max_increase_50d is None or max_decrease_50d is None or max_increase_120d is None or max_decrease_120d is None:
            return None
        features['max_increase_20d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_increase_20d]))
        features['max_decrease_20d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_decrease_20d]))
        features['max_increase_50d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_increase_50d]))
        features['max_decrease_50d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_decrease_50d]))
        features['max_increase_120d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_increase_120d]))
        features['max_decrease_120d'] = tf.train.Feature(float_list=tf.train.FloatList(value=[max_decrease_120d]))
        return tf.train.Example(features=tf.train.Features(feature=features))

    def __monitor_progress(self, total):
        monitor = tqdm(total=total, desc='样本处理/生成')
        while True:
            sum = 0
            for __task_progress in self.__task_progress__list:
                sum += __task_progress
            monitor.n = sum
            monitor.refresh()
            if total <= sum:
                break
            sleep(1000)

    def clear_all_data(self):
        tf_record_dir_path = self.quant_config['sample']['tf_record_dir_path']
        project_root = os_util.get_project_root()
        tf_record_dir_path = project_root + tf_record_dir_path
        os_util.delete_files(tf_record_dir_path)
