import datetime
import json
import os
import re
import sys
import time
from copy import deepcopy

import numpy as np

sys.path.append('..')
sys.path.append('../..')
from feature_set.base_feature import BaseFeature, RequstData
from feature_set.app.un.app_un_comp0_v1.loan_urgency import LoanUrgency
from feature_set.app.un.app_un_comp0_v1.upgrade import UpgradeFeature
from feature_set.app.un.app_un_comp0_v1.utils import CreateFeature
from functools import lru_cache
import pandas as pd

@lru_cache()
def load_dataframe(path):
    df = pd.read_excel(path)
    df = df[df["version"] == "v1"]
    return df

class AppUnComp0V1(BaseFeature):
    def __init__(self):
        super().__init__()

        self.root_dir = self.get_root_dir(os.path.abspath("."))
        self.conf_dir = os.path.join(self.root_dir, "feature_conf")
        self.feature_list, self.res_map = self._load_feature_list()

        self.day_section = [
            (3, "d3"),
            (7, "d7"),
            (15, "d15"),
            (30, "m1"),
            (60, "m2"),
            (90, "m3"),
            (180, "m6"),
            (10000, "all"),
        ]
        
        self.app_name = None
        self.package_name = None
        self.inst_time_name = None
        self.updt_time_name = None
        self.pre_inst_name = None
    
    def _load_feature_list(self):
        res_map = {}
        feature_list = []
        with open(os.path.join(self.conf_dir, 'app', 'un', 'app_un_comp0_v1', 'featurelist'), "r") as f:
            for line in f:
                line_list = line.strip().split(' | ')
                feature_list.append(line_list)
                res_map[line_list[0]] = line_list[1]
        return feature_list, res_map
    
    def get_root_dir(self, path):
        path_list = path.split(os.path.sep)
        index = path_list.index("featurelib")
        return os.path.sep.join(path_list[: index + 1])
    
    def trans_str_to_time(self, str_time):
        return datetime.datetime.strptime(str_time, "%Y-%m-%d %H:%M:%S")
    
    def load_request(self, request_data):
        apply_time = self.trans_str_to_time(request_data.apply_time)
        self.apply_date = apply_time.date()

        try:
            app_list = []
            applist_data = request_data.data_sources["applist_data"]
            app_list = (
                json.loads(applist_data) if type(applist_data) == str else applist_data
            )
        except:
            pass

        if len(app_list) == 0 or type(app_list) != list:
            assert (
                1 == 0
            ), "传入的app_json无法解析或者解析后异常(不是list，或者为空list)"

        name_trans_hash = {
            "app": self.app_name,
            "package": self.package_name,
            "inst_time": self.inst_time_name,
            "updt_time": self.updt_time_name,
            "pre_inst": self.pre_inst_name,
        }
        
        clean_app_list = []
        for app in app_list:
            trans_app = {}
            trans_app["app"] = app[name_trans_hash["app"]]
            trans_app["package"] = app[name_trans_hash["package"]]
            trans_app["inst_time"] = datetime.datetime.utcfromtimestamp(
                int(app[name_trans_hash["inst_time"]]) // 1000
            ) + datetime.timedelta(hours=self.country_info["time_zone"])
            trans_app["inst_date"] = trans_app["inst_time"].date()
            trans_app["updt_time"] = datetime.datetime.utcfromtimestamp(
                int(app[name_trans_hash["updt_time"]]) // 1000
            ) + datetime.timedelta(hours=self.country_info["time_zone"])
            trans_app["updt_date"] = trans_app["updt_time"].date()
            trans_app["up2in_time_diff_days"] = (
                trans_app["updt_date"] - trans_app["inst_date"]
            ).days
            trans_app["pre_inst"] = app[name_trans_hash["pre_inst"]]
            trans_app["inst_tfdays"] = (self.apply_date - trans_app["inst_date"]).days
            trans_app["updt_tfdays"] = (self.apply_date - trans_app["updt_date"]).days
            if trans_app["inst_time"] <= apply_time:
                clean_app_list.append(deepcopy(trans_app))
        self.app_list = clean_app_list

    def gen_features(self, data: RequstData):
        feature_dict_res = {}
        feature_dict = {}
        try:
            self.load_request(data)
        except:
            for i in self.res_map:
                if "_name" in i:
                    feature_dict[i] = "N/A"
                else:
                    feature_dict[i] = 0
            return feature_dict

        # init feature parse
        feature_parse = CreateFeature(
            applist=self.app_list, data=self.app_vector_loan
        )
        feature_upgrade = UpgradeFeature(
            applist=self.app_list, data=self.app_vector_loan
        )
        feature_loanurgency = LoanUrgency(
            applist=self.app_list, data=self.app_vector_loan
        )
        # try:
        fuc_list = [
            "basic_count",
            "basic_discount",
            "basic_toptag",
            "basic_toptag_count",
            "basic_toptag_ratio",
            "basic_tag_ratio",
            "basic_tag_poly",
            "basic_tag_sumratio",
            "basic_tagcnt_cyclical",
            "basic_tagsum_cyclical",
        ]
        fuc_list2 = ["grade_feature", "loanupgrade_feature"]
        fuc_list3 = ["loannum_oneday","longest_continuedays","get_days_from_now","special_name_feature"]
        tags = ["package_name", "app_name", "genre_old", "data_country_id"]
        tags_num = ["sim", "score_old", "installs_old"]
        feature_dict1 = feature_parse(tags=tags, tags_num=tags_num, fuc_list=fuc_list)
        feature_dict2 = feature_upgrade(fuc_list=fuc_list2)
        feature_dict3 = feature_loanurgency(fuc_list=fuc_list3)
        feature_dict = feature_upgrade.cutwin.merge_dfs([feature_dict1, feature_dict2, feature_dict3])
        name_map = {
            "package_name": "pck",
            "app_name": "app",
            "data_country_id256": "ctry",
            "data_country_id254": "ctry",
            "data_country_id234": "ctry",
            "genre_old": "grne",
            "score_old": "scr",
            "installs_old": "inst",
            "FINANCE": "fin",
            "BOOKS_AND_REFERENCE": "book",
            "BUSINESS": "busi",
            "EDUCATION": "edu",
            "ENTERTAINMENT": "ente",
            "LIFESTYLE": "life",
            "PERSONALIZATION": "pers",
            "PRODUCTIVITY": "prod",
            "SHOPPING": "shop",
            "TOOLS": "tool",
            "grade": "grd",
            "data_country_id": "ctry",
        }
        for i in feature_dict:
            i1 = i.replace('"', "").replace('“', "").replace('”', "")
            for key in name_map.keys():
                if key in i1:
                    i1 = i1.replace(key, name_map[key])
            i1 = i1.lower()
            feature_dict_res[i1] = feature_dict[i]

        for i in self.res_map:
            if i not in feature_dict_res:
                if "_name" in i:
                    feature_dict_res[i] = "N/A"
                else:
                    feature_dict_res[i] = 0
        
        filter_feature_dict_res = {}
        for i in feature_dict_res:
            if i not in self.res_map:
                continue
            if isinstance(feature_dict_res[i], np.int64):
                filter_feature_dict_res[i] = int(feature_dict_res[i])
            elif pd.isna(feature_dict_res[i]):
                filter_feature_dict_res[i] = None
            else:
                filter_feature_dict_res[i] = feature_dict_res[i]
                
        return filter_feature_dict_res
