import datetime
import json
import os
import pickle
import time

import numpy as np
import pandas as pd
import requests
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder, StandardScaler

from src.mobile.anomaly.anomaly_detect import MobileAnomalyDetector
from src.mobile.anomaly.anomaly_load import load_anomaly_document_item
from src.mobile.model import mobile_model_config
from src.mobile.preprocessor.settle_data_loader import preprocess_settle_data
from src.utils.config import config, logger
from src.utils.db_processor import mysql_prediction_processor, postgre_processor,mysql_price_model
from src.utils.dingding_message import dingding_messager
from src.utils.redis_pool import redis_conn
from src.utils.util import check_conflict_file, get_today, format_date_string

from src.mobile.job.sku_price_complement import MOBILE_DATA_SQL,PRODUCT_SQL,COMPLETE_DATA_SQL,LEVEL_SQL,\
    LEVEL_TEMPLATE_MAPPING_SQL,FEATURES
from src.mobile.predict.mobile_price_config import MODEL_DIR, MAX_EVN_PRICE, \
    HISTORY_AVG_PRICE_WEIGHT, MODEL_PRICE_WEIGHT

import tensorflow as tf
from tensorflow import keras
from tensorflow.python.keras.backend import set_session
session_config = tf.ConfigProto(allow_soft_placement=True)
sess = tf.Session(config=session_config)
global graph
graph = tf.get_default_graph()

def load_history_avg_price(key):
    """
    读取均价均价
    :param key: key
    :return:
    """
    price = redis_conn.get(key)
    if price is None:
        return price
    return int(float(price))

class MobileModel:
    def __init__(self, model_date=None):
        """

        :param model_date: 建模日期
        """
        if model_date is None:
            self.model_date = get_today()
        else:
            self.model_date = model_date

        self.model_data = None
        self.model = None
        self.train_history = None
        # 特征变量One-Hot编码处理器
        self.ohe = OneHotEncoder(handle_unknown='ignore')
        # 增加一个产品的One-Hot，方便在预测时判断是否是新型号
        self.product_ohe = OneHotEncoder(handle_unknown='ignore')
        # 价格标准化
        self.scaler = StandardScaler()
        # api缓存清除url
        self.ab_price_api_url = config.get_config(
            'ab_price_api', 'cache_clear_url')

    def load_model_data(self):
        """
        读取建模数据
        :return:
        """
        self.model_data = preprocess_settle_data()
        logger.info('model_data shape@{}'.format(self.model_data.shape))

    def eliminate_anomaly(self):
        """
        排除异常数据
        :return:
        """
        logger.info('eliminating anomaly...')
        anomaly_document_item = load_anomaly_document_item()
        if not anomaly_document_item.empty:
            logger.info('eliminating anomaly products size@{}'.format(
                anomaly_document_item.shape))
            self.model_data = self.model_data[~self.model_data['document_item_id'].isin(
                anomaly_document_item['document_item_id'])]

    def detect_anomaly(self):
        """
        检测异常数据
        - 以昨日最新的模型对物品进行预测，预测价和最新的出货价进行对比
        :return:
        """
        history_date = self.model_date - datetime.timedelta(days=1)
        anomaly_detector = MobileAnomalyDetector(self.model_data, history_date)
        anomaly_detector.launch_anomaly_detection()

    def build_model(self, input_shape):
        """
        构建模型
        :param input_shape: 输入特征数量
        :return:
        """
        inputs = keras.Input(shape=(input_shape,))

        dense = keras.layers.Dense(128, activation='relu')(inputs)
        dense = keras.layers.Dense(64, activation='relu')(dense)
        dense = keras.layers.Dense(32, activation='relu')(dense)
        outputs = keras.layers.Dense(1, activation='linear')(dense)

        self.model = keras.Model(inputs=inputs, outputs=outputs)

        opt = keras.optimizers.Adam()
        self.model.compile(optimizer=opt, loss='mse')

    def train_model(self):
        """
        训练模型
        :return:
        """
        if self.model_data is None:
            logger.critical('model_data is None!')
            return

        # 近期数据
        recent_date = self.model_date - \
            datetime.timedelta(days=mobile_model_config.RECENT_DAYS)
        recent_data = self.model_data[self.model_data['settle_list_create_date'] >= recent_date]

        x_train, x_valid, y_train, y_valid = train_test_split(self.model_data.drop(columns='item_quotation_price_num'),
                                                              self.model_data['item_quotation_price_num'],
                                                              test_size=0.2)
        train_x_inputs = self.ohe.fit_transform(
            x_train[mobile_model_config.MOBILE_FEATURES])
        valid_x_inputs = self.ohe.transform(
            x_valid[mobile_model_config.MOBILE_FEATURES])
        recent_x_inputs = self.ohe.transform(
            recent_data[mobile_model_config.MOBILE_FEATURES])

        train_y_scaled = self.scaler.fit_transform(
            y_train.values.reshape((-1, 1))).flatten()
        valid_y_scaled = self.scaler.transform(
            y_valid.values.reshape((-1, 1))).flatten()
        recent_y_scaled = self.scaler.transform(
            recent_data['item_quotation_price_num'].values.reshape((-1, 1))).flatten()

        logger.info('training model...')
        self.build_model(train_x_inputs.shape[1])
        self.train_history = self.model.fit(train_x_inputs, train_y_scaled,
                                            validation_data=(
                                                valid_x_inputs, valid_y_scaled),
                                            epochs=100, verbose=0, batch_size=512,
                                            callbacks=mobile_model_config.model_callbacks)
        # 加权训练近期的数据
        self.model.fit(recent_x_inputs, recent_y_scaled,
                       epochs=5, verbose=0, batch_size=512)

        self.product_ohe.fit(x_train[['product_name']])

    def evaluate_model(self):
        """
        评估模型
        :return:
        """
        if self.train_history is None:
            logger.critical('train_history is None!')
            return

        epoch = self.train_history.epoch[-1]
        # 乘以方差来计算实际的loss
        train_loss = np.round(
            np.sqrt(self.train_history.history['loss'][-1] * self.scaler.var_[0]), 4)
        val_loss = np.round(
            np.sqrt(self.train_history.history['val_loss'][-1] * self.scaler.var_[0]), 4)
        data_size = self.train_history.params['samples']

        logger.info('saving model evaluation...')
        evaluation_sql = """
        REPLACE INTO mobile_price_model_evaluation(model_date, epoch, train_loss, val_loss, data_size)
        VALUES(%s, %s, %s, %s, %s)
        """
        evaluation_data = [(format_date_string(self.model_date), epoch, float(
            train_loss), float(val_loss), data_size)]
        mysql_prediction_processor.execute_insert_sql(
            evaluation_sql, evaluation_data)

    def load_models(self):
        """
        读取模型和预处理器
        :return:
        """
        logger.info('loading models...')

        model_file = os.path.join(MODEL_DIR, mobile_model_config.MODEL_FILE_NAME)
        with graph.as_default():
            set_session(sess)
            self.model = keras.models.load_model(model_file)

        ohe_file = os.path.join(MODEL_DIR, mobile_model_config.MOBILE_OHE_NAME)
        with open(ohe_file, 'rb') as f:
            self.ohe = pickle.load(f)

        scaler_file = os.path.join(MODEL_DIR, mobile_model_config.MOBILE_SCALER_NAME)
        with open(scaler_file, 'rb') as f:
            self.scaler = pickle.load(f)

        product_ohe_file = os.path.join(MODEL_DIR, mobile_model_config.MOBILE_PRODUCT_OHE_NAME)
        with open(product_ohe_file, 'rb') as f:
            self.product_ohe = pickle.load(f)

        logger.info('loading models done')

    def save_model(self):
        """
        保存模型和预处理器
        :return:
        """
        if self.model is None:
            logger.critical('model is None!')
            return

        logger.info('saving model...')

        check_conflict_file(mobile_model_config.MODEL_DIR,
                            mobile_model_config.MODEL_FILE_NAME)
        model_file = os.path.join(
            mobile_model_config.MODEL_DIR, mobile_model_config.MODEL_FILE_NAME)
        self.model.save(model_file)

        check_conflict_file(mobile_model_config.MODEL_DIR,
                            mobile_model_config.MOBILE_OHE_NAME)
        with open(os.path.join(mobile_model_config.MODEL_DIR, mobile_model_config.MOBILE_OHE_NAME), 'wb') as f:
            pickle.dump(self.ohe, f)

        check_conflict_file(mobile_model_config.MODEL_DIR,
                            mobile_model_config.MOBILE_SCALER_NAME)
        with open(os.path.join(mobile_model_config.MODEL_DIR, mobile_model_config.MOBILE_SCALER_NAME), 'wb') as f:
            pickle.dump(self.scaler, f)

        check_conflict_file(mobile_model_config.MODEL_DIR,
                            mobile_model_config.MOBILE_PRODUCT_OHE_NAME)
        with open(os.path.join(mobile_model_config.MODEL_DIR, mobile_model_config.MOBILE_PRODUCT_OHE_NAME), 'wb') as f:
            pickle.dump(self.product_ohe, f)

    def push_model(self):
        """
        推送模型文件
        :return:
        """
        logger.info(
            'pushing models to remote server model_date@{}'.format(self.model_date))
        exist_server = mobile_model_config.MODEL_PUSH_SERVERS[0]
        if exist_server == 'no_server':
            logger.info('No server need to push, SKIP!')
            return

        for ip in mobile_model_config.MODEL_PUSH_SERVERS:
            logger.info('push server to server@{}'.format(ip))
            model_push_cmd = 'scp {}{} root@{}:/data/thy/price_model/models/mobile/'. \
                format(mobile_model_config.MODEL_DIR,
                       mobile_model_config.MODEL_FILE_NAME, ip)
            os.system(model_push_cmd)

            preprocessor_push_cmd = 'scp {}{} root@{}:/data/thy/price_model/models/mobile/'. \
                format(mobile_model_config.MODEL_DIR,
                       mobile_model_config.MOBILE_OHE_NAME, ip)
            os.system(preprocessor_push_cmd)

            scaler_push_cmd = 'scp {}{} root@{}:/data/thy/price_model/models/mobile/'. \
                format(mobile_model_config.MODEL_DIR,
                       mobile_model_config.MOBILE_SCALER_NAME, ip)
            os.system(scaler_push_cmd)

            product_push_cmd = 'scp {}{} root@{}:/data/thy/price_model/models/mobile/'. \
                format(mobile_model_config.MODEL_DIR,
                       mobile_model_config.MOBILE_PRODUCT_OHE_NAME, ip)
            os.system(product_push_cmd)

            time.sleep(1)

    def push_reload_model_message(self):
        """
        推送重新加载模型消息
        :return:
        """
        logger.info(
            'pushing reload model message model_date@{}'.format(self.model_date))
        exist_server = mobile_model_config.MODEL_PUSH_SERVERS[0]
        if exist_server == 'no_server':
            logger.info('No server need to push, SKIP!')
            return

        params = {"category": 1, "reload": 1,"data":[[]]}
        base_url = 'http://{}:{}/price/predict'
        for ip in mobile_model_config.MODEL_PUSH_SERVERS:
            rm_cmd = 'ssh root@{} rm -f /data/thy/price_model/api_health/health_check'.format(
                ip)
            os.system(rm_cmd)
            time.sleep(60)
            for port in mobile_model_config.MODEL_SERVER_PORTS:
                request_url = base_url.format(ip, port)
                logger.info('push reload message to {}'.format(request_url))
                # response = requests.post(request_url, json=params)
                # results = json.loads(response.content)
                results = {'code': -1}
                for i in range(1, 4):
                    response = requests.post(request_url, json=params)
                    results = json.loads(response.content)
                    time.sleep(5)
                    if results['code'] == 0:
                        logger.info('push reload message success @time {} '.format(i))
                        break
                    else:
                        logger.info('push reload message fail {} times '.format(i))
                if results['code'] != 0:
                    # 如果有推送reload消息异常，那么停止更新
                    logger.critical(
                        'push reloading message error! {}:{}'.format(ip, port))
                    break
                time.sleep(5)
            time.sleep(30)
            touch_cmd = 'ssh root@{} touch /data/thy/price_model/api_health/health_check'.format(
                ip)
            os.system(touch_cmd)
            time.sleep(10)

    def clear_api_cache(self):
        """
        清除调用接口缓存
        :return:
        """
        if self.ab_price_api_url is None:
            logger.info('ab_price_api_url is None, SKIP!')
        else:
            try:
                response = requests.get(self.ab_price_api_url)
                if response.status_code != 200:
                    logger.critical('手机清除abprice缓存失败!')
                    dingding_messager.send_message('手机清除abprice缓存失败!')
            except Exception as e:
                logger.critical('手机调用清除abprice缓存接口异常：{}'.format(e))
                dingding_messager.send_message(
                    '手机调用清除abprice缓存接口异常：{}'.format(e))

    def clear_models(self):
        """
        清理历史模型文件
        :return:
        """
        logger.info('clearing model files {}'.format(self.model_date))
        clear_timestamp = (get_today(
        ) - datetime.timedelta(days=mobile_model_config.KEEP_MODEL_DAYS)).timestamp()
        model_files = os.listdir(mobile_model_config.MODEL_DIR)
        for file in model_files:
            if file.endswith('pkl') or file.endswith('h5'):
                file_name = os.path.join(mobile_model_config.MODEL_DIR, file)
                create_time = os.path.getctime(file_name)
                if create_time < clear_timestamp:
                    logger.info('removing file@{}'.format(file))
                    os.remove(file_name)

    def cal_history_avg_price(self,to_redis=False):
        """
        计算sku历史平均价格
        :return:
        """
        logger.info('calculate history avg price...')
        history_price_date = self.model_date - datetime.timedelta(
            days=mobile_model_config.MOBILE_HISTORY_AVG_PRICE_DAYS)
        history_avg_price_sql = """
        SELECT product_sku_key, product_level_key, avg(forecast_reference_price) AS avg_price
        FROM dm.dm_prognostizieren_preisergebnisse 
        WHERE product_category_id = 1 AND predict_date >= '{}' 
        GROUP BY product_sku_key, product_level_key
        """.format(history_price_date.strftime('%Y-%m-%d'))
        history_avg_price = postgre_processor.load_sql(history_avg_price_sql)
        self.history_avg_price = history_avg_price

        if to_redis:
            logger.info('saving mobile history price into redis')
            history_avg_price.index = mobile_model_config.MOBILE_HISTORY_AVG_PRICE_PREFIX + \
                history_avg_price['product_sku_key'].astype(
                    str) + '_' + history_avg_price['product_level_key'].astype(str)
            redis_dict = history_avg_price['avg_price'].to_dict()
            for key, value in redis_dict.items():
                redis_conn.set(
                    key, value, ex=mobile_model_config.MOBILE_HISTORY_CACHE_TIME)
            logger.info('saving mobile history price into redis done!')

    def process_complement_data(self):
        """
        处理补全数据
        :return:
        """
        logger.info('processing complement data...')

        data = self.__load_complement_data()

        # 写入pickle
        #complement_pkl = open('complement.pkl', 'wb')
        #pickle.dump(data, complement_pkl)
        #complement_pkl.close()
        # 读取pickle
        # complement_pkl = open('complement.pkl', 'rb')
        # data = pickle.load(complement_pkl)

        data['property'] = data['property'].map(lambda x: x.strip())
        data['property_value'] = data['property_value'].map(lambda x: x.strip())
        data = data.pivot_table(index='product_sku_id', columns='property', values='property_value',
                                aggfunc=lambda x: x).reset_index()
        data.fillna('unknown', inplace=True)

        product_id_data = self.__load_product_data()
        data = pd.merge(data, product_id_data, on='product_sku_id')
        data['product_category_id'] = 1
        data['product_category_name'] = '手机'

        memory_storage_split = data['memory'].str.split('+', expand=True)
        if memory_storage_split.shape[1] == 1:
            # 如果只分出来一个字段，那么补上一个
            memory_storage_split.columns = ['memory_tmp']
            memory_storage_split['storage_tmp'] = np.nan
        else:
            memory_storage_split.columns = ['memory_tmp', 'storage_tmp']
        data = pd.concat([data, memory_storage_split], axis=1, sort=False)
        data['memory'] = data['memory_tmp']
        data['storage'] = data['storage'].where(pd.isnull(data['storage_tmp']), data['storage_tmp'])

        data.loc[data['product_brand_name'] == '苹果', 'memory'] = 'apple_memory'
        data['color'] = np.where(data['color'] != 'unknown', data['product_name'] + '_X_' + data['color'], 'unknown')
        data['period'] = '0'

        levels = self.__load_product_levels()

        # 做笛卡尔积
        data['_tmp_key'] = 0
        levels['_tmp_key'] = 0

        self.predict_data = pd.merge(data, levels, on='_tmp_key', how='outer').drop(columns='_tmp_key')
        self.predict_data['date'] = self.model_date.strftime('%Y-%m-%d')

    def __load_complement_data(self):
        """
        读取测试数据
        :return:
        """
        return postgre_processor.load_sql(COMPLETE_DATA_SQL)

    def __load_product_data(self):
        """
        读取产品的ID
        :return:
        """
        product_data = postgre_processor.load_sql(PRODUCT_SQL)
        product_data['product_level_template_id'] = product_data['product_level_template_id'].astype(str)
        return product_data

    def __load_product_levels(self):
        """
        读取等级
        :return:
        """
        return postgre_processor.load_sql(LEVEL_SQL)

    def __load_template_level_mapping(self):
        """
        读取等级模板和等级对应关系
        :return:
        """
        mapping = postgre_processor.load_sql(LEVEL_TEMPLATE_MAPPING_SQL)
        mapping['product_level_template_id'] = mapping['product_level_template_id'].astype(str)
        mapping['product_level_id'] = mapping['product_level_id'].astype(int)

        return mapping

    def predict_complement_data(self):
        """
        预测补全数据
        :return:
        """
        if self.predict_data is None:
           logger.critical('predict_data is None!')
           return
        if self.model is None:
            self.load_models()

        logger.info('predicting data...')

        self.predict_data['period'] = '0'
        test_x_inputs = self.ohe.transform(self.predict_data[FEATURES])
        t1 = time.time()
        predict_scaled = self.model.predict(test_x_inputs)
        logger.info('predicting data... done！')
        #写入pickle
        #predict_pkl = open('predict_scaled.pkl', 'wb')
        #pickle.dump(predict_scaled, predict_pkl)
        #predict_pkl.close()
        # 读取pickle
        # predict_pkl = open('predict_scaled.pkl', 'rb')
        # predict_scaled = pickle.load(predict_pkl)

        self.predict_data['forecast_reference_price'] = np.round(
             self.scaler.inverse_transform(predict_scaled).flatten()).astype(int)
        self.predict_data['predict_origin'] = self.predict_data['forecast_reference_price']
            # 处理和均价的加权
        self.predict_data['key'] = mobile_model_config.MOBILE_HISTORY_AVG_PRICE_PREFIX + \
                              self.predict_data['product_sku_id'].astype(str) + '_' + self.predict_data['product_level_id'].astype(str)

        self.history_avg_price = self.history_avg_price.rename(columns={'product_sku_key': 'product_sku_id','product_level_key':'product_level_id',
                                                                        'avg_price':'history_avg_price'})
        self.predict_data = pd.merge(self.predict_data,self.history_avg_price,on=['product_sku_id','product_level_id'],how='left')
        self.predict_data['history_avg_price'] = self.predict_data['history_avg_price'].where(
            self.predict_data['history_avg_price'].notnull(), self.predict_data['forecast_reference_price'])
        self.predict_data['forecast_reference_price'] = self.predict_data['forecast_reference_price'] * MODEL_PRICE_WEIGHT + \
                                self.predict_data['history_avg_price'] * HISTORY_AVG_PRICE_WEIGHT

        self.predict_data['is_new_product'] = self.product_ohe.transform(
            self.predict_data[['product_name']]).sum(axis=1).astype(int)
        self.predict_data['is_new_product'] = self.predict_data['is_new_product'].replace({0: 1, 1: 0})

        # 处理环保机报价
        env_protect = self.predict_data['product_level_template_id'] == mobile_model_config.EVN_TEMPLATE_ID
        self.predict_data.loc[env_protect, 'forecast_reference_price'] = self.predict_data.loc[
            env_protect, 'forecast_reference_price'].where(
            self.predict_data.loc[env_protect, 'forecast_reference_price'] <= MAX_EVN_PRICE, MAX_EVN_PRICE)

        # 处理最小值
        self.predict_data['forecast_reference_price'] = self.predict_data['forecast_reference_price'].where(
            self.predict_data['forecast_reference_price'] >= 5, 5)

        logger.info('predicting data done')

    def save_complement_data(self):
        """
        保存补全数据
        :return:
        """
        if self.predict_data is not None:
            logger.info('saving predict data...')
            template_level_mapping = self.__load_template_level_mapping()
            insert_data = pd.merge(self.predict_data, template_level_mapping,
                                   on=['product_level_template_id', 'product_level_id'])
            insert_data['date'] = [time.strftime('%Y-%m-%d')] * len(insert_data)
            insert_sql = """
            INSERT INTO price_prediction(date, product_sku_key, product_sku_name, product_level_key, 
            product_level_name, product_key, product_name, product_category_id, product_category_name, 
            product_brand_name, predict_origin, forecast_reference_price,is_new_product)
            VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """
            mysql_price_model.execute_insert_sql(insert_sql,
                                               insert_data[
                                                   ['date', 'product_sku_id', 'product_sku_name', 'product_level_id',
                                                    'product_level_name', 'product_id', 'product_name',
                                                    'product_category_id', 'product_category_name',
                                                    'product_brand_name', 'predict_origin', 'forecast_reference_price','is_new_product']
                                               ].to_records(index=False).tolist())
            logger.info('saving predict data to mysql done')
            self.mysql_price_num = len(insert_data)

    def launch_model(self):
        """
        启动模型过程
        :return:
        """
        self.load_model_data()
        self.eliminate_anomaly()
        self.detect_anomaly()
        # 在当日异常数据处理完成后，再次调用排除异常的方法来排除当日异常数据
        self.eliminate_anomaly()
        self.train_model()
        self.evaluate_model()
        self.save_model()
        self.cal_history_avg_price()
        self.load_models()
        self.process_complement_data()
        self.predict_complement_data()
        self.save_complement_data()
        #self.push_model()
        #self.push_reload_model_message()
        #self.clear_api_cache()
        #self.clear_models()
