#!/usr/bin/env python
# encoding: utf-8
import math
import pandas as pd
from utils.time_utisl import *
from utils.common import *
from feature_set.base_feature import BaseFeature, RequstData


class AppUnLoanV2(BaseFeature):
    def __init__(self):
        super().__init__()
        self.apply_time = None
        self.country_abbr = None
        self.applist_data_df = None
        self.function_map = {
            'gen_apploan_feature': self.gen_apploan_feature
        }

    def load_conf(self):
        """
        load config file
        Returns:

        """
        pass

    def load_request(self, request_data: RequstData):
        """
        get request data and format feature input data
        Args:
            request_data: input detail data

        Returns:
            format sms_data calllog_data applist_data sos_data
        """
        self.apply_time = self.trans_str_to_time(request_data.apply_time)
        self.country_abbr = request_data.country_abbr
        applist_data = request_data.data_sources.get('applist_data', [])
        applist_data_df = self.process_data(applist_data)
        self.applist_data_df = pd.merge(applist_data_df, self.google_loanapp_cate_info, left_on='app_package', right_on='package_name', how='left')

    def process_app_name(self, app_name):
        app_name = app_name.lower()
        app_name = sp_string_replace(app_name)
        app_name = app_name.replace('\xa0', ' ')
        app_name = app_name.replace('\'s', '')
        return app_name

    def get_issystem_app(self, data):
        data['is_system_new'] = data["isSystem"]
        return data

    def trans_str_to_time(self, str_time):
        return datetime.strptime(str_time, "%Y-%m-%d %H:%M:%S")

    def process_data(self, applist_data):
        if applist_data:
            applist_data_df = pd.DataFrame(applist_data)
            applist_data_df = applist_data_df.drop_duplicates(subset='app_package', keep='first')
            applist_data_df = self.get_issystem_app(applist_data_df)
            applist_data_df['app_name'] = applist_data_df['app_name'].apply(self.process_app_name)
            applist_data_df['install_time'] = applist_data_df['fi_time'].apply(time_trans, args=(self.country_abbr,))
            applist_data_df['install_date'] = applist_data_df['install_time'].apply(lambda x: x.date())
            applist_data_df['update_time'] = applist_data_df['lu_time'].apply(time_trans, args=(self.country_abbr,))
            applist_data_df['update_date'] = applist_data_df['update_time'].apply(lambda x: x.date())
            applist_data_df['is_update'] = (applist_data_df['install_time'] != applist_data_df['update_time']).astype(int)
            applist_data_df['apply_install_interval_time'] = (pd.to_datetime(self.apply_time) - pd.to_datetime(applist_data_df['install_time'], errors='coerce')).dt.days
            applist_data_df['apply_update_interval_time'] = (pd.to_datetime(self.apply_time) - pd.to_datetime(applist_data_df['update_time'], errors='coerce')).dt.days
        else:
            applist_data_df = pd.DataFrame(columns=['app_name', 'app_package', 'is_system_new', 'install_time', 'install_date', 'update_time', 'update_date', 'is_update', 'apply_install_interval_time', 'apply_update_interval_time'])
            self.logger.info('输入数据applist_data结点为空')
        return applist_data_df

    def gen_apploan_feature(self):
        feature_dict = {}
        self.__gen_apploan_stage1_feature(self.applist_data_df, feature_dict)
        self.__gen_apploan_stage2_feature(feature_dict)

        for key, value in feature_dict.items():
            feature_dict[key] = float(value) if not np.isnan(value) else -999.0
        return feature_dict

    def __gen_apploan_stage1_feature(self, applist_data_df, feature_dict):
        day_list = [1, 3, 7, 15, 30, 60, 90, 180, np.inf]
        action_list = ['install', 'update']
        notsys_data = applist_data_df[applist_data_df['is_system_new'] == 0]
        for action in action_list:
            notsys_data = notsys_data.sort_values(by=f'{action}_time')
            for day in day_list:
                cut_data = notsys_data[(notsys_data[f'apply_{action}_interval_time'] >= 0) & (notsys_data[f'apply_{action}_interval_time'] <= day)]
                cut_loandata = cut_data[cut_data['cate'] == 'loan']
                app_cnt = len(cut_loandata['app_package'])
                app_cnt_ratio = divide(app_cnt,len(cut_data))
                day_cnt = cut_loandata[f'{action}_date'].nunique()

                date_objects = cut_loandata[f'{action}_time'].unique()
                if len(date_objects) > 1:
                    intervals = [(date_objects[i + 1] - date_objects[i]).days for i in range(len(date_objects) - 1)]
                    max_iter = (date_objects[-1] - date_objects[0]).days
                    time_density = divide(math.sqrt(sum([math.pow(i, 2) for i in intervals])), max_iter)  # 按天聚合的联系密度
                else:
                    time_density = np.nan

                feature_dict[f'{action}_app_cnt_{day}d'] = app_cnt
                feature_dict[f'{action}_app_cnt_ratio_{day}d'] = app_cnt_ratio
                feature_dict[f'{action}_day_cnt_{day}d'] = day_cnt
                feature_dict[f'{action}_time_density_{day}d'] = time_density

                cut_loandata_groupby_date = cut_loandata.groupby([f'{action}_date']).agg(app_cnt=('app_package', 'count')).reset_index()
                cut_loandata_groupby_date['shift_date'] = cut_loandata_groupby_date[f'{action}_date'].shift(-1)
                cut_loandata_groupby_date['next_date'] = cut_loandata_groupby_date[f'{action}_date'] + pd.Timedelta(days=1)
                cut_loandata_groupby_date['interval_day'] = (pd.to_datetime(cut_loandata_groupby_date['shift_date']) - pd.to_datetime(cut_loandata_groupby_date[f'{action}_date'])).dt.days

                for column in ['app_cnt', 'interval_day']:
                    for stat in ['max', 'min', 'mean', 'median', 'var']:
                        stat_val = cut_loandata_groupby_date[column].agg(stat)
                        feature_dict[f'{action}_{stat}_{column}_groupby_date_{day}d'] = stat_val

                continuous_cnt = 1
                continuous_list = []
                for i in cut_loandata_groupby_date['interval_day'].tolist():
                    if i == 1:
                        continuous_cnt += 1
                    else:
                        continuous_list.append(continuous_cnt)
                        continuous_cnt = 1

                if continuous_list:
                    max_continuous_days = np.max(continuous_list)
                    min_continuous_days = np.min(continuous_list)
                    avg_continuous_days = np.mean(continuous_list)
                    median_continuous_days = np.median(continuous_list)
                    var_continuous_days = np.var(continuous_list)
                else:
                    max_continuous_days = np.nan
                    min_continuous_days = np.nan
                    avg_continuous_days = np.nan
                    median_continuous_days = np.nan
                    var_continuous_days = np.nan

                feature_dict[f'{action}_max_continuous_days_groupby_date_{day}d'] = max_continuous_days
                feature_dict[f'{action}_min_continuous_days_groupby_date_{day}d'] = min_continuous_days
                feature_dict[f'{action}_avg_continuous_days_groupby_date_{day}d'] = avg_continuous_days
                feature_dict[f'{action}_median_continuous_days_groupby_date_{day}d'] = median_continuous_days
                feature_dict[f'{action}_var_continuous_days_groupby_date_{day}d'] = var_continuous_days

                for column in ['mininstalls', 'score', 'ratings', 'reviews', 'price']:
                    for stat in ['max', 'min', 'mean', 'median', 'var']:
                        stat_val = cut_loandata[column].agg(stat)
                        feature_dict[f'{action}_{stat}_{column}_{day}d'] = stat_val

                if not cut_loandata_groupby_date.empty:
                    first_last_days_diff = (max(cut_loandata_groupby_date[f'{action}_date']) - min(cut_loandata_groupby_date[f'{action}_date'])).days
                    apply_last_days_diff = (self.apply_time.date() - max(cut_loandata_groupby_date[f'{action}_date'])).days
                    apply_first_days_diff = (self.apply_time.date() - min(cut_loandata_groupby_date[f'{action}_date'])).days
                else:
                    first_last_days_diff = np.nan
                    apply_last_days_diff = np.nan
                    apply_first_days_diff = np.nan

                feature_dict[f'{action}_first_last_days_diff_{day}d'] = first_last_days_diff
                feature_dict[f'{action}_apply_last_days_diff_{day}d'] = apply_last_days_diff
                feature_dict[f'{action}_apply_first_days_diff_{day}d'] = apply_first_days_diff

        app_update_cnt = np.sum(notsys_data.loc[notsys_data['cate']=='loan','is_update'])
        app_update_cnt_ratio = np.mean(notsys_data.loc[notsys_data['cate']=='loan','is_update'])

        feature_dict['app_update_cnt'] = app_update_cnt
        feature_dict['app_update_cnt_ratio'] = app_update_cnt_ratio

        return feature_dict

    def __gen_apploan_stage2_feature(self, feature_dict):
        tmp_feature_dict = {}
        day_list = [1, 3, 7, 15, 30, 60, 90, 180, np.inf]
        for i in range(1, len(day_list)):
            pre_day = day_list[i - 1]
            day = day_list[i]

            for key in feature_dict.keys():
                if key.split('_')[-1] == f'{day}d':
                    pre_key = '_'.join(key.split('_')[:-1]) + f'_{pre_day}d'
                    new_key1 = '_'.join(key.split('_')[:-1]) + f'_{pre_day}d_{day}d_diff'
                    new_key2 = '_'.join(key.split('_')[:-1]) + f'_{pre_day}d_{day}d_ratio'
                    tmp_feature_dict[new_key1] = feature_dict[pre_key] - feature_dict[key]
                    tmp_feature_dict[new_key2] = divide(feature_dict[pre_key], feature_dict[key])

        feature_dict.update(tmp_feature_dict)
        return feature_dict
