import datetime
import os
import pickle

import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder, StandardScaler
from tensorflow import keras

from src.utils.config import logger
from src.utils.db_processor import  mysql_processor,presto_processor
from src.utils.util import get_today, check_conflict_file

PRODUCT_SQL = """
SELECT distinct
    dps.mapping_product_sku_id as product_sku_id,
    dp.product_id,
    CASE WHEN otpmp.propertyleveltemplateid IS NULL THEN -1 ELSE otpmp.propertyleveltemplateid END AS product_level_template_id,
    otpm.secondary_level_template_id AS secondary_level_template_id
FROM dim.dim_product dp 
JOIN dim.dim_product_sku_channel_mapping dps ON dp.product_id = dps.product_id and dps.business_channel_id=1 
JOIN ods.ods_trunk_producttemplatemapping otpmp ON (dp.product_id = otpmp.productid AND otpmp.templatetype = 1 and otpmp.business_channel = '0')
JOIN ods.ods_opt_foundation_secondary_product_template_mapping otpm ON dp.product_id = otpm.product_id and otpm.business_channel=1
WHERE dp.product_category_id = 1 and otpm.business_channel=1 and dps.business_channel_id=1
"""


COMPLETE_DATA_SQL = """
SELECT
    dps.product_sku_id,
    'product_name' AS property,
    dp.product_name AS property_value
FROM dim.dim_product dp JOIN dim.dim_product_sku dps ON dp.product_id = dps.product_id
WHERE dp.product_category_id = 1 and dps.product_sku_id in (
select distinct mapping_product_sku_id
FROM  dim.dim_product_sku_channel_mapping dps   where dps.business_channel_id=1 and product_category_parent_id=1
)

UNION

SELECT
    dps.product_sku_id,
    'product_sku_name' AS property,
    dps.product_sku_name AS property_value
FROM dim.dim_product dp JOIN dim.dim_product_sku dps ON dp.product_id = dps.product_id
WHERE dp.product_category_id = 1 and dps.product_sku_id in (
select distinct mapping_product_sku_id
FROM  dim.dim_product_sku_channel_mapping dps   where dps.business_channel_id=1 and product_category_parent_id=1
)

UNION

SELECT
    dps.product_sku_id,
    'product_brand_name' AS property,
    dp.product_brand_name AS property_value
FROM dim.dim_product dp JOIN dim.dim_product_sku dps ON dp.product_id = dps.product_id
WHERE dp.product_category_id = 1 and dps.product_sku_id in (
select distinct mapping_product_sku_id
FROM  dim.dim_product_sku_channel_mapping dps   where dps.business_channel_id=1 and product_category_parent_id=1
)

UNION

SELECT
    dps.product_sku_id,
    (CASE
    WHEN pssp.property_name = '内存' THEN 'memory'
    WHEN pssp.property_name = '存储容量' THEN 'storage'
    WHEN pssp.property_name = '苹果保修期时长' THEN 'guarantee'
    WHEN pssp.property_name = '网络制式' THEN 'networks'
    WHEN pssp.property_name = '购买渠道' THEN 'purchase_way'
    WHEN pssp.property_name = '机身颜色' THEN 'color'
    WHEN pssp.property_name = '小型号' THEN 'small_version'
    ELSE 'unknown_prop' END) AS property,
    pssp.property_value_name AS property_value
FROM dim.dim_product dp JOIN dim.dim_product_sku dps ON dp.product_id = dps.product_id
    JOIN dim.dim_product_sku_sub_product pssp ON pssp.product_sku_id = dps.product_sku_id
    AND pssp.property_name IN ('购买渠道', '存储容量', '网络制式', '内存', '苹果保修期时长', '机身颜色', '小型号')
WHERE dp.product_category_id = 1 and dps.product_sku_id in (
select distinct mapping_product_sku_id
FROM  dim.dim_product_sku_channel_mapping dps   where dps.business_channel_id=1 and product_category_parent_id=1
)
"""


BASEPRICE_MOBILE_SQL="""
select product_sku_id,product_level_id as product_level_id,rank,price_3,price_2,price_1,saleprice,
case when qty is null and price_1 = -1 and price_2= -1 and price_3 = -1   then 0 
	 when qty is null and (price_1 >0 or price_2>0 OR price_3>0) then 1
else   qty  end qty,
up_date
from imp_mysql_base_pricedata_category_phone
"""


LEVEL_SQL = """
SELECT 
product_level_id,
product_level_name,product_level_order_rank
FROM dim.dim_product_level
WHERE product_category_id = 1 AND is_product_level_active_flag = 1  and business_channel='0'
and product_level_id not in (select new_level_id from algo.algo_c2b_sku3_to_sku2_mapping  )
"""

LEVEL_TEMPLATE_MAPPING_SQL = """ 
select secondary_level_template_id as secondary_level_template_id,
product_level_id as product_level_id
from  ods.ods_opt_foundation_secondary_product_level_template_level
"""

# LEVEL_RANK_MAPPING_SQL = """
# SELECT
# level_template_id AS product_level_template_id,
# level_id AS product_level_id
# FROM dim.dim_product_level_template_mapping
# """

BRAND_ID_NAME_SQL = """
select distinct dp.product_brand_id,
dp.product_brand_name
from dim.dim_product dp where dp.product_id_status_id = 2 and dp.active = 1 
"""

EXCLUDE_SKU_SQL="""
select distinct * from (
with AHS_Sku_cnt as 
(
SELECT 
dps.product_sku_id ,
count(distinct cdp.product_no) as cnt
FROM dw.dw_centre_document_product cdp
JOIN dw.dw_platform_settle_document_product psdp ON psdp.product_no = cdp.product_no AND psdp.settle_document_no = cdp.document_serial_no 
JOIN dw.dw_platform_document_product pdp ON pdp.product_no = psdp.product_no AND pdp.quotation_document_no = psdp.quotation_document_no 
JOIN dim.dim_product_sku_channel_mapping dps ON dps.product_sku_id = cdp.product_combine_sku_id and dps.business_channel_id=1
JOIN dim.dim_product dp ON dp.product_id = cdp.product_id
WHERE dp.product_category_id = 1 AND cdp.document_category_id >= 200 
AND pdp.quotation_document_type_id = 10
AND cdp.create_date >= current_date-interval '42' day
group by 1
  having count(distinct cdp.product_no)<10
)
select sku.product_sku_id
FROM dim.dim_product dp 
JOIN dim.dim_product_sku_channel_mapping dps ON dp.product_id = dps.product_id and dps.business_channel_id=1 
JOIN ods.ods_trunk_producttemplatemapping otpmp ON (dp.product_id = otpmp.productid AND otpmp.templatetype = 1 and otpmp.business_channel = '0')
JOIN ods.ods_opt_foundation_secondary_product_template_mapping otpm ON dp.product_id = otpm.product_id and otpm.business_channel=1
join dim.dim_product_sku  dpssp on dp.product_id =dpssp.product_id
inner join AHS_Sku_cnt sku on dpssp.product_sku_id=sku.product_sku_id
WHERE dp.product_category_id = 1 and otpm.business_channel=1 and dps.business_channel_id=1
and dp.product_create_date>current_date-interval '30' day and dpssp.product_sku_create_date>current_date-interval '30' day
)a
"""

# 模型路径
COMPLEMENT_MODEL_DIR = 'models/mobile'
# 特征
FEATURES = ['product_name', 'product_brand_name', 'product_level_name', 'product_level_template_id', 'memory',
            'guarantee', 'storage', 'networks', 'purchase_way', 'color', 'period', 'small_version']
# 模型早停
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)
model_callbacks = [
    early_stop
]


class MobileSKUPriceComplement:
    def __init__(self, model_date=None):
        if model_date is None:
            self.model_date = get_today()
        else:
            self.model_date = model_date

        self.model_data = None
        self.model = None
        self.predict_data = None

        self.ohe = OneHotEncoder(handle_unknown='ignore')
        self.scaler = StandardScaler()


    def process_training_data(self):
        """
        预处理训练数据
        :return:
        """
        data = self.__load_training_data()
        if data is None:
            logger.critical('model data is None!')
            return

        logger.info('preprocess data...')
        data['price_property_name_name'] = data['price_property_name_name'].map(lambda x: x.strip())

        pivot_data = data.pivot_table(index=['document_item_id'],
                                      columns='price_property_name_name',
                                      values='price_property_value_name', aggfunc=lambda x: x)
        pivot_data = pivot_data.fillna('unknown')
        pivot_data = pivot_data.reset_index()

        data = data.drop(columns=['price_property_name_name', 'price_property_value_name']).drop_duplicates()
        data = pd.merge(data, pivot_data, on='document_item_id')
        data.drop(columns='unknown', inplace=True)

        data.rename(columns={'内存': 'memory', '苹果保修期时长': 'guarantee', '存储容量': 'storage', '网络制式': 'networks',
                             '购买渠道': 'purchase_way', '机身颜色': 'color'}, inplace=True)

        memory_storage_split = data['memory'].str.split('+', expand=True)
        memory_storage_split.columns = ['memory_tmp', 'storage_tmp']
        data = pd.concat([data, memory_storage_split], axis=1, sort=False)
        data['memory'] = data['memory_tmp']
        data['storage'] = data['storage'].where(pd.isnull(data['storage_tmp']), data['storage_tmp'])
        data.loc[data['product_brand_name'] == '苹果', 'memory'] = 'apple_memory'
        data['period'] = data['settle_list_create_date'].apply(self.__cal_mobile_period).astype(str)

        data['color'] = np.where(data['color'] != 'unknown', data['product_name'] + '_X_' + data['color'], 'unknown')

        self.model_data = data.drop(columns=['memory_tmp', 'storage_tmp'])

    def build_model(self, input_shape):
        """
        构建模型
        :param input_shape: 输入特征数量
        :return:
        """
        inputs = keras.Input(shape=(input_shape,))

        dense = keras.layers.Dense(128, activation='relu')(inputs)
        dense = keras.layers.Dense(64, activation='relu')(dense)
        dense = keras.layers.Dense(32, activation='relu')(dense)
        outputs = keras.layers.Dense(1, activation='linear')(dense)

        self.model = keras.Model(inputs=inputs, outputs=outputs)

        opt = keras.optimizers.Adam()
        self.model.compile(optimizer=opt, loss='mse')

    def train_model(self):
        """
        训练模型
        :return:
        """
        if self.model_data is None:
            logger.critical('model_data is None!')
            return
        # 最近日期
        recent_date = self.model_date - datetime.timedelta(days=3)
        recent_data = self.model_data[self.model_data['settle_list_create_date'] >= recent_date]

        x_train, x_valid, y_train, y_valid = train_test_split(self.model_data.drop(columns='item_quotation_price_num'),
                                                              self.model_data['item_quotation_price_num'],
                                                              test_size=0.2)

        train_x_inputs = self.ohe.fit_transform(x_train[FEATURES])
        valid_x_inputs = self.ohe.transform(x_valid[FEATURES])
        recent_x_inputs = self.ohe.transform(recent_data[FEATURES])

        train_y_scaled = self.scaler.fit_transform(y_train.values.reshape((-1, 1))).flatten()
        valid_y_scaled = self.scaler.transform(y_valid.values.reshape((-1, 1))).flatten()
        recent_y_scaled = self.scaler.transform(
            recent_data['item_quotation_price_num'].values.reshape((-1, 1))).flatten()

        logger.info('training model...')
        self.build_model(train_x_inputs.shape[1])
        self.model.fit(train_x_inputs, train_y_scaled, validation_data=(valid_x_inputs, valid_y_scaled),
                       epochs=100, verbose=0, batch_size=512, callbacks=model_callbacks)
        # 训练近期的数据
        self.model.fit(recent_x_inputs, recent_y_scaled, epochs=5, verbose=0, batch_size=512)

    def save_model(self):
        """
        保存模型和预处理器
        :return:
        """
        if self.model is None:
            logger.critical('model is None!')
            return

        logger.info('saving model...')
        model_file_name = 'mobile_complement_model.h5'

        check_conflict_file(COMPLEMENT_MODEL_DIR, model_file_name)
        model_file = os.path.join(COMPLEMENT_MODEL_DIR, model_file_name)
        self.model.save(model_file)

        ohe_file_name = 'mobile_complement_ohe.pkl'
        check_conflict_file(COMPLEMENT_MODEL_DIR, ohe_file_name)
        with open(os.path.join(COMPLEMENT_MODEL_DIR, ohe_file_name), 'wb') as f:
            pickle.dump(self.ohe, f)

        scaler_file_name = 'mobile_complement_scaler.pkl'
        check_conflict_file(COMPLEMENT_MODEL_DIR, scaler_file_name)
        with open(os.path.join(COMPLEMENT_MODEL_DIR, scaler_file_name), 'wb') as f:
            pickle.dump(self.scaler, f)

    def process_complement_data(self):
        """
        处理补全数据
        :return:
        """
        logger.info('processing complement data...')

        data = self.__load_complement_data()
        data['property'] = data['property'].map(lambda x: x.strip())
        data['property_value'] = data['property_value'].map(lambda x: x.strip())
        data = data.pivot_table(index='product_sku_id', columns='property', values='property_value',
                                aggfunc=lambda x: x).reset_index()
        data.fillna('unknown', inplace=True)

        product_id_data = self.__load_product_data()
        data = pd.merge(data, product_id_data, on='product_sku_id')
        data['product_category_id'] = 1
        data['product_category_name'] = '手机'

        memory_storage_split = data['memory'].str.split('+', expand=True)
        memory_storage_split.columns = ['memory_tmp', 'storage_tmp']
        data = pd.concat([data, memory_storage_split], axis=1, sort=False)
        data['memory'] = data['memory_tmp']
        data['storage'] = data['storage'].where(pd.isnull(data['storage_tmp']), data['storage_tmp'])
        data.loc[data['product_brand_name'] == '苹果', 'memory'] = 'apple_memory'
        data['color'] = np.where(data['color'] != 'unknown', data['product_name'] + '_X_' + data['color'], 'unknown')
        data['period'] = '0'

        levels = self.__load_product_levels()

        # 做笛卡尔积
        data['_tmp_key'] = 0
        levels['_tmp_key'] = 0

        self.predict_data = pd.merge(data, levels, on='_tmp_key', how='outer').drop(columns='_tmp_key')
        self.predict_data['date'] = self.model_date.strftime('%Y-%m-%d')

    def predict_complement_data(self):
        """
        预测补全数据
        :return:
        """
        if self.predict_data is None:
            logger.critical('predict_data is None!')
            return
        if self.model is None:
            self.load_models()

        logger.info('predicting complement data...')

        test_x_inputs = self.ohe.transform(self.predict_data[FEATURES])

        predict_scaled = self.model.predict(test_x_inputs)
        self.predict_data['forecast_reference_price'] = np.round(
            self.scaler.inverse_transform(predict_scaled).flatten()).astype(int)
        self.predict_data['forecast_reference_price'] = self.predict_data['forecast_reference_price'].where(
            self.predict_data['forecast_reference_price'] >= 5, 5)
        # 处理环保机报价
        env_protect = self.predict_data['product_level_template_id'] == '408'
        self.predict_data.loc[env_protect, 'forecast_reference_price'] = self.predict_data.loc[
            env_protect, 'forecast_reference_price'].where(
            self.predict_data.loc[env_protect, 'forecast_reference_price'] <= 10, 10)

    def save_complement_data(self):
        """
        保存补全数据
        :return:
        """
        if self.predict_data is not None:
            logger.info('saving complement data...')
            template_level_mapping = self.__load_template_level_mapping()
            insert_data = pd.merge(self.predict_data, template_level_mapping,
                                   on=['product_level_template_id', 'product_level_id'])
            insert_sql = """
            REPLACE INTO fact_opt_forecast_reference_price(date, product_sku_key, product_sku_name, product_level_key, 
            product_level_name, product_key, product_name, product_category_id, product_category_name, 
            product_brand_name, forecast_reference_price)
            VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """
            mysql_processor.execute_insert_sql(insert_sql,
                                               insert_data[
                                                   ['date', 'product_sku_id', 'product_sku_name', 'product_level_id',
                                                    'product_level_name', 'product_id', 'product_name',
                                                    'product_category_id', 'product_category_name',
                                                    'product_brand_name', 'forecast_reference_price']
                                               ].to_records(index=False).tolist())
            logger.info('saving complement data to mysql done')

            insert_gp_sql = """
            INSERT INTO dm.dm_prognostizieren_preisergebnisse(predict_date, product_sku_key, product_sku_name, 
            product_level_key, product_level_name, product_key, product_name, product_category_id, 
            product_category_name, product_brand_name, forecast_reference_price)
            VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """

            presto_processor.execute_sql(
                "SELECT md.add_partition('dm.dm_prognostizieren_preisergebnisse','{}','day')".format(
                    self.model_date.strftime('%Y%m%d')))
            presto_processor.execute_insert_postgresql(insert_gp_sql,
                                                         insert_data[
                                                             ['date', 'product_sku_id', 'product_sku_name',
                                                              'product_level_id', 'product_level_name', 'product_id',
                                                              'product_name', 'product_category_id',
                                                              'product_category_name', 'product_brand_name',
                                                              'forecast_reference_price']
                                                         ].to_records(index=False).tolist())
            logger.info('saving complement data to GP done')



    def __load_complement_data(self):
        """
        读取测试数据
        :return:
        """
        return presto_processor.load_sql(COMPLETE_DATA_SQL)

    def __load_product_data(self):
        """
        读取产品的ID
        :return:
        """
        product_data = presto_processor.load_sql(PRODUCT_SQL)
        product_data['product_level_template_id'] = product_data['product_level_template_id'].astype(str)
        return product_data

    def __load_product_levels(self):
        """
        读取等级
        :return:
        """
        return presto_processor.load_sql(LEVEL_SQL)

    def __load_template_level_mapping(self):
        """
        读取等级模板和等级对应关系
        :return:
        """
        mapping = presto_processor.load_sql(LEVEL_TEMPLATE_MAPPING_SQL)
        mapping['product_level_template_id'] = mapping['product_level_template_id'].astype(str)
        mapping['product_level_id'] = mapping['product_level_id'].astype(int)

        return mapping

    @staticmethod
    def __cal_mobile_period(s):
        """
        计算手机数据周期特征
        :param s:
        :return:
        """
        diff_days = get_today() - s
        diff_days = diff_days.days
        period = diff_days // 3
        return period
