# import datetime
# import os
# import pickle
#
# import numpy as np
# import pandas as pd
# from tensorflow import keras
#
# from src.laptop.model import laptop_model_config
# from src.utils.config import logger
# from src.utils.db_processor import mysql_prediction_processor, postgre_processor
# from src.utils.util import check_date_str, format_date_string, process_model_file_date_mapping
#
#
# INTERVAL_POINT_DAYS = 14
# CHECK_DAYS_NEARBY = 2
#
#
# class LaptopAnomalyTrendDetector:
#     def __init__(self, data, model_date):
#         """
#
#         :param data: 检测数据
#         :param model_date: 模型日期
#         """
#         self.data = data.copy()
#         self.model_date = check_date_str(model_date)
#         self.checkpoint_date = self.model_date - datetime.timedelta(days=INTERVAL_POINT_DAYS)
#         self.anomaly_data = None
#         # 历史模型是否准备好
#         self.history_ready = False
#         self.__load_models()
#         self.__load_history_model()
#         self.__load_anomaly_trend_error_range()
#
#     def __load_models(self):
#         """
#         读取最新的模型与预处理器
#         :return:
#         """
#         logger.info('loading anomaly trend detect models...')
#         model_file = os.path.join(laptop_model_config.MODEL_DIR, laptop_model_config.MODEL_FILE_NAME)
#         self.model = keras.models.load_model(model_file)
#
#         ohe_product_file = 'ohe_product' + laptop_model_config.PREPROCESSOR_SUFFIX_NAME
#         with open(os.path.join(laptop_model_config.MODEL_DIR, ohe_product_file), 'rb') as f:
#             self.ohe_product = pickle.load(f)
#
#         ohe_level_file = 'ohe_level' + laptop_model_config.PREPROCESSOR_SUFFIX_NAME
#         with open(os.path.join(laptop_model_config.MODEL_DIR, ohe_level_file), 'rb') as f:
#             self.ohe_level = pickle.load(f)
#
#         ohe_attr_file = 'ohe_attr' + laptop_model_config.PREPROCESSOR_SUFFIX_NAME
#         with open(os.path.join(laptop_model_config.MODEL_DIR, ohe_attr_file), 'rb') as f:
#             self.ohe_attr = pickle.load(f)
#
#         ohe_period_file = 'ohe_period' + laptop_model_config.PREPROCESSOR_SUFFIX_NAME
#         with open(os.path.join(laptop_model_config.MODEL_DIR, ohe_period_file), 'rb') as f:
#             self.ohe_period = pickle.load(f)
#
#         scaler_file = 'scaler' + laptop_model_config.PREPROCESSOR_SUFFIX_NAME
#         with open(os.path.join(laptop_model_config.MODEL_DIR, scaler_file), 'rb') as f:
#             self.scaler = pickle.load(f)
#
#         logger.info('loading anomaly trend detect models done')
#
#     def __load_history_model(self):
#         """
#         读取历史的模型与预处理器
#         :return:
#         """
#         logger.info('loading anomaly trend detect history models...')
#         history_model_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR +
#                                                           laptop_model_config.MODEL_FILE_NAME + '*')
#         if history_model_file is None:
#             logger.warning('history model file not found!')
#             return
#         self.history_model = keras.models.load_model(history_model_file)
#
#         ohe_product_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR + 'ohe_product' +
#                                                         laptop_model_config.PREPROCESSOR_SUFFIX_NAME + '*')
#         if ohe_product_file is None:
#             logger.warning('history product ohe file is not found!')
#             return
#         with open(ohe_product_file, 'rb') as f:
#             self.history_ohe_product = pickle.load(f)
#
#         ohe_level_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR + 'ohe_level' +
#                                                       laptop_model_config.PREPROCESSOR_SUFFIX_NAME + '*')
#         if ohe_level_file is None:
#             logger.warning('history level ohe file is not found!')
#             return
#         with open(ohe_level_file, 'rb') as f:
#             self.history_ohe_level = pickle.load(f)
#
#         ohe_attr_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR + 'ohe_attr' +
#                                                      laptop_model_config.PREPROCESSOR_SUFFIX_NAME + '*')
#         if ohe_attr_file is None:
#             logger.warning('history attr ohe file is not found!')
#             return
#         with open(ohe_attr_file, 'rb') as f:
#             self.history_ohe_attr = pickle.load(f)
#
#         ohe_period_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR + 'ohe_period' +
#                                                        laptop_model_config.PREPROCESSOR_SUFFIX_NAME + '*')
#         if ohe_period_file is None:
#             logger.warning('history period ohe file is not found!')
#             return
#         with open(ohe_period_file, 'rb') as f:
#             self.history_ohe_period = pickle.load(f)
#
#         scaler_file = self.__get_target_model_file(laptop_model_config.MODEL_DIR + 'scaler' +
#                                                    laptop_model_config.PREPROCESSOR_SUFFIX_NAME + '*')
#         if scaler_file is None:
#             logger.warning('history scaler file is not found!')
#             return
#         with open(scaler_file, 'rb') as f:
#             self.history_scaler = pickle.load(f)
#
#         self.history_ready = True
#         logger.info('loading anomaly trend detect history models done')
#
#     def __get_target_model_file(self, file_pattern):
#         """
#         查找一定检查点日期范围内的模型文件
#         - 查找检查点日期的文件
#         - 查找检查点日期前CHECK_DAYS_NEARBY天内的文件
#         - 查找检查点日期后CHECK_DAYS_NEARBY天内的文件
#         :param file_pattern: 文件模式
#         :return:
#         """
#         target_date = check_date_str(self.checkpoint_date)
#         file_time_mapping = process_model_file_date_mapping(file_pattern)
#
#         target_date_str = format_date_string(target_date)
#         # 查看是否找到指定日期的文件
#         if file_time_mapping.get(target_date_str):
#             logger.info('get target model target_date@{}'.format(target_date))
#             return file_time_mapping[target_date_str]
#         # 向前查找
#         for i in range(CHECK_DAYS_NEARBY):
#             target_date_nearby = format_date_string(target_date - datetime.timedelta(days=i))
#             if file_time_mapping.get(target_date_nearby):
#                 logger.info('get target model target_date@{}'.format(target_date_nearby))
#                 return file_time_mapping[target_date_nearby]
#         # 向后查找
#         for i in range(CHECK_DAYS_NEARBY):
#             target_date_nearby = format_date_string(target_date + datetime.timedelta(days=i))
#             if file_time_mapping.get(target_date_nearby):
#                 logger.info('get target model target_date@{}'.format(target_date_nearby))
#                 return file_time_mapping[target_date_nearby]
#
#         logger.warning('target model file not found target date@{}'.format(target_date))
#         return None
#
#     def __load_anomaly_trend_error_range(self):
#         """
#         读取异常的切分点与误差数据
#         :return:
#         """
#         logger.info('loading anomaly error range...')
#         sql = "SELECT cut_point, upper_rate, lower_rate FROM laptop_anomaly_trend_error_range  ORDER BY cut_point"
#         error_range = mysql_prediction_processor.load_sql(sql)
#         # 将下界设为负值
#         error_range['lower_rate'] = -error_range['lower_rate']
#         self.cut_points = [-np.inf] + error_range['cut_point'].tolist()[:-1] + [np.inf]
#         cut_range = pd.cut(error_range['cut_point'], self.cut_points)
#         cut_range = cut_range.astype(str)
#         error_range = error_range.iloc[:-1, :]
#         error_range['cut_range'] = cut_range[1:].tolist()
#         self.error_range = error_range
#
#     def process_price_margin(self):
#         """
#         计算预测与实际价格差
#         :return:
#         """
#         logger.info("processing trend price margin...")
#         x_product = self.ohe_product.transform(self.data[laptop_model_config.PRODUCT_FEATURES])
#         x_level = self.ohe_level.transform(self.data[laptop_model_config.LEVEL_FEATURES])
#         x_attr = self.ohe_attr.transform(self.data[laptop_model_config.ATTR_FEATURES])
#         x_period = self.ohe_period.transform(self.data[laptop_model_config.PERIOD_FEATURES])
#
#         current_price = self.model.predict([x_product, x_level, x_attr, x_period])
#         self.data['current_price'] = np.round(
#             self.scaler.inverse_transform(current_price.reshape((-1, 1))).flatten()).astype(int)
#
#         x_history_product = self.history_ohe_product.transform(self.data[laptop_model_config.PRODUCT_FEATURES])
#         x_history_level = self.history_ohe_level.transform(self.data[laptop_model_config.LEVEL_FEATURES])
#         x_history_attr = self.history_ohe_attr.transform(self.data[laptop_model_config.ATTR_FEATURES])
#         x_history_period = self.history_ohe_period.transform(self.data[laptop_model_config.PERIOD_FEATURES])
#
#         history_price = self.history_model.predict([x_history_product, x_history_level, x_history_attr, x_history_period])
#         self.data['history_price'] = np.round(
#             self.history_scaler.inverse_transform(history_price.reshape((-1, 1))).flatten()).astype(int)
#         self.data['history_price'] = self.data['history_price'].where(self.data['history_price'] >= 6, 6)
#         self.data['margin'] = self.data['current_price'] - self.data['history_price']
#         self.data['margin_rate'] = np.round(self.data['margin'] / self.data['history_price'], 4)
#
#     def process_anomaly(self):
#         """
#         处理异常数据
#         :return:
#         """
#         logger.info('processing anomaly trend data...')
#         self.data['cut_range'] = pd.cut(self.data['item_quotation_price_num'], self.cut_points)
#         self.data['cut_range'] = self.data['cut_range'].astype(str)
#         detect_data = pd.merge(self.data, self.error_range, on='cut_range')
#         anomaly_data = detect_data[(detect_data['margin_rate'] > detect_data['upper_rate']) |
#                                    (detect_data['margin_rate'] < detect_data['lower_rate'])].copy()
#         logger.info('detecting anomaly trend data size@{}'.format(anomaly_data.shape))
#
#         self.anomaly_data = anomaly_data
#
#     def save_anomaly(self):
#         """
#         保存异常数据
#         :return:
#         """
#         if self.anomaly_data is None:
#             logger.critical('anomaly trend data is None!')
#             return
#
#         if self.anomaly_data.empty:
#             logger.info('anomaly trend data is empty, SKIP...')
#             return
#
#         logger.info('saving anomaly trend data.')
#
#         logger.info('loading sku name...')
#         sku_id_list = self.anomaly_data['product_sku_key'].drop_duplicates().tolist()
#         sku_id_cond = ','.join(str(x) for x in sku_id_list)
#         sku_name_sql = """
#         SELECT product_sku_id AS product_sku_key, product_sku_name
#         FROM dim.dim_product_sku
#         WHERE product_sku_id in ({})""".format(sku_id_cond)
#         sku_names = postgre_processor.load_sql(sku_name_sql)
#         self.anomaly_data = pd.merge(self.anomaly_data, sku_names, on='product_sku_key')
#
#         sql = """
#         INSERT INTO laptop_anomaly_trend_detection(sku_id, product_sku_name, product_level_name,
#         history_price, current_price, error_rate)
#         VALUES(%s, %s, %s, %s, %s, %s)
#         """
#         anomaly_trend_data = self.anomaly_data[['product_sku_key', 'product_sku_name', 'product_level_name',
#                                                 'history_price', 'current_price', 'margin_rate']]
#         mysql_prediction_processor.execute_insert_sql(sql, anomaly_trend_data.to_records(index=False).tolist())
#
#     def launch_anomaly_detect(self):
#         """
#
#         :return:
#         """
#         if not self.history_ready:
#             logger.warning('history model is not ready! SKIP')
#             return
#
#         self.process_price_margin()
#         self.process_anomaly()
#         self.save_anomaly()
