# -*- coding: utf-8 -*-
"""
Created on Fri Feb  2 16:00:04 2018

@author:Devin

"""
#from numba import jit
import hashlib
import pickle
import os
from tqdm import tqdm
import pandas as pd
from feature_engineering_class import Construction_data
from multiprocessing import Pool
from dask.delayed import delayed 
import time
import gc
# =============================================================================
# load save code start
# =============================================================================
# =============================================================================
# =============================================================================
cache_root_dir = 'stagex'
if not os.path.exists(cache_root_dir):
    os.makedirs(cache_root_dir)
def md5(s):
    m = hashlib.md5()
    m.update(s.encode("utf8"))
    return m.hexdigest()
def cache_key(f, *args, **kwargs):
    s = '%s-%s-%s' % (f.__name__, str(args), str(kwargs))
    return os.path.join(cache_root_dir, '%s.dump' % md5(s))
def cache(f):
    def wrap(*args, **kwargs):
        fn = cache_key(f, *args, **kwargs)
        if os.path.exists(fn):
            print('loading cache')
            with open(fn, 'rb') as fr:
                return pickle.load(fr)

        obj = f(*args, **kwargs)
        with open(fn, 'wb') as fw:
            pickle.dump(obj, fw,protocol=4)
        return obj
    return wrap
# =============================================================================
# load save code over
# =============================================================================

@cache
def load_table(data_path,err):
    load_tb = pd.read_csv(data_path+err, sep=',',usecols = all_cols)
    return load_tb

# =============================================================================
# save path and columns list etc.
# =============================================================================
@cache
def normal_para(times):
    print(times)
#    path_recoder = 'D:\\P_accumulator\\stage_4\\recoder\\深度学习模型数据.xlsx'
    path_recoder = '/data/recorder/深度学习模型数据.xlsx'
#    path_gz = 'D:\\P_accumulator\\learn_data\\data_new\\only\\'
    path_gz = '/data/all_data/'
    path_zc = '/data/zc_data/'
    file_box_gz = os.listdir(path_gz);file_box_zc = os.listdir(path_zc)
    all_cols = ['vin', 'starttime', 'vehspeed', 'vehrpm', 'vehsyspwrmod',
           'vehgearpos', 'tboxaccelx', 'tboxaccely', 'tboxaccelz', 'vehbrakepos',
           'vehfuelconsumed','vehoutsidetemp', 'vehinsidetemp', 'vehac', 'vehacauto',
           'vehacfanspeed','vehacdrvtargettemp', 'vehacpasstargettemp','vehsidelight',
           'vehdiplight', 'vehmainlight','vehwiperswitchfront','vehfuellev', 'vehbatt',
           'vehcoolanttemp', 'vehodo']
    return path_recoder,path_gz,path_zc,file_box_gz,file_box_zc,all_cols

# =============================================================================
# load recorder data
# =============================================================================
@cache
def get_recorder(path_recoder,times):
    print(times)
    #error data:  vin,starttime,PURCHASED_DATE
    #normal data: vin,PURCHASED_DATE
    def vin_list(v_list,df,pares = 'e'):
        if pares == 'e':
            vin=[];df.START_TIME = pd.to_datetime(df.START_TIME);df.PURCHASED_DATE = pd.to_datetime(df.PURCHASED_DATE)
            for i in v_list:
                vin.append([i,[df[df.VIN==i].START_TIME.iloc[0],df[df.VIN==i].PURCHASED_DATE.iloc[0]]])
                pass
            return vin
        elif pares == 'n':
            vin=[];df.PURCHASED_DATE = pd.to_datetime(df.PURCHASED_DATE)
            for i in v_list:
                vin.append([i,df[df.VIN==i].PURCHASED_DATE.iloc[0]])
                pass
            return vin
    error_record = pd.ExcelFile(path_recoder)
    df_error = error_record.parse("故障车辆--2018-1-10")
    df_normal = error_record.parse("正常车辆")
    vin_err = list(df_error.loc[df_error.维修次数==1].VIN)
    vin_nor = list(df_normal.loc[:,'VIN'])
    v_err = vin_list(vin_err,df_error,'e')
    vin_nors = vin_list(vin_nor,df_normal,'n')
    return v_err,vin_nors

# =============================================================================
# load gz data and get vin unique   .compute()
# =============================================================================

@cache
def get_zc_vin(path_zc,file_box_zc,all_cols,vin_nor,times):
    print(times);path = []
#    empty = [];file_dictx = {}
#    for file in tqdm(file_box_zc):
#        gc.collect()
#        load_tb = load_table(path_zc,file)
#        load_tb_group = load_tb.groupby(["vin"])
#        for k in tqdm(vin_nor):
#            try:
#                file_dictx.setdefault(k[0],[]).append(load_tb_group.get_group(k[0]))
#            except:
#                pass
#    print('............>>>>>> start save the data !')
#    if not os.path.exists('temp_cache_vin_nomal'):
#        os.makedirs('temp_cache_vin_normal')
    i=0
    for k in tqdm(vin_nor):
#        try:
#            temp_vin = pd.concat(file_dictx[k[0]])
#        except:
#            empty.append(k[0])
        s = '%s' % (str(k[0]))
        paths = os.path.join('temp_cache_vin_normal', '%s.dump' % md5(s))#vin's md5 value
        if os.path.exists(paths):
            i+=1
            print('\n=============>'+str(i)+' this normal vin is already exists  now will pass')
            path.append([k[0],paths])
#        else:
#            with open(paths, 'wb') as fw:
#                pickle.dump(temp_vin, fw,protocol=4)
#                path.append([k[0],paths])
    return path

@cache
def get_gz_vin(path_gz,file_box_gz,all_cols,v_err,times):
    print(times);path = [];v_errs = dict(v_err)
#    empty = []
#    v_err = dict(v_err);file_dictx = {}
#    for file in tqdm(file_box_gz):
#        gc.collect()
#        load_tb = load_table(path_gz,file)
#        load_tb_group = load_tb.groupby(["vin"])
#        for k,v in tqdm(v_err.items()):
#            try:
#                file_dictx.setdefault(k,[]).append(load_tb_group.get_group(k))
#            except:
#                pass
#    print('............>>>>>> start save the data !')
#    if not os.path.exists('temp_cache_vin'):
#        os.makedirs('temp_cache_vin')
    i=0
    for k,v in tqdm(v_errs.items()):
#        try:
#            temp_vin = pd.concat(file_dictx[k])
#        except:
#            empty.append(k)
        s = '%s' % (str(k))
        paths = os.path.join('temp_cache_vin', '%s.dump' % md5(s))
        if os.path.exists(paths):
            i+=1
            print('\n=============>'+str(i)+' this error vin is already exists  now will pass')
            path.append([k,paths])
#        else:
#            with open(paths, 'wb') as fw:
#                pickle.dump(temp_vin, fw,protocol=4)
#                path.append([k,paths])
    return path


def vin_combin_nor(k,pathss,vin_nor):
    s1 = time.time();vin_nor_d = dict(vin_nor)#key:vin,value:PURCHASED_DATE
    with open(pathss, 'rb') as fr:
        temp_vin = pickle.load(fr)
    temp_vin = temp_vin.drop_duplicates()
    temp_vin.loc[:,'starttime'] = pd.to_datetime(temp_vin.loc[:,'starttime'])
    temp_vin['ptime'] = vin_nor_d[k]  #add PURCHASED_DATE
    mix = Construction_data(temp_vin,time='D')#complix
    mixx = mix.concat_data();mixx['vin'] = k;mixx['label']=0
    efors = time.time()
    print("for one vin cost time ：", int(efors - s1)/60)
    return mixx

def vin_combin_err(k,v,pathss):
#    s1 = time.time()
    days=15
    with open(pathss, 'rb') as fr:
        temp_vin = pickle.load(fr)
    temp_vin = temp_vin.drop_duplicates()
    temp_vin.loc[:,'starttime'] = pd.to_datetime(temp_vin.loc[:,'starttime'])
    temp_vin['ptime'] = v[1]  #add PURCHASED_DATE
    temp_time_e = pd.date_range(end = v[0],periods=days)
    temp_time_n = pd.date_range(start = v[0],periods=days+150)
    mix = Construction_data(temp_vin,time='D')
    mixx = mix.concat_data();mixx['vin'] = k
    temp_vin_e = mixx[str(temp_time_e[0]):str(temp_time_e[-1])]
    temp_vin_n = mixx[str(temp_time_n[3]):str(temp_time_n[-1])]
    temp_vin_e['label']=1
    temp_vin_n['label']=0
#    efors = time.time()
#    print("for one vin cost time ：", int(efors - s1)/3600)
    return [mixx,temp_vin_e,temp_vin_n]


@cache    
def mix_pool_nor(path,vin_nor,times):
    print(times);path = dict(path)#key:vin,value:vin's md5 valu

@cache    
def mix_pool_err(path,v_err,times):
    print(times);path = dict(path);v_err = dict(v_err)
    e_list = [];err_e_list = [];err_n_list=[]  
    pool = Pool(15);pool_list = []
    for k,v in tqdm(path.items()):
        results = pool.apply_async(vin_combin_err,(k,v_err[k],v))
        pool_list.append(results)
    print(len(pool_list))
    pool.close()
    pool.join()
    for result in tqdm(pool_list):
        s = time.time()
        re_list = result.get()
        e_list.append(re_list[0]);err_e_list.append(re_list[1]);err_n_list.append(re_list[2])
        efor = time.time()
        print("for one result vin cost time ：", int(efor - s)/60)
    return e_list,err_e_list,err_n_list


if __name__ == '__main__':
    path_recoder,path_gz,path_zc,file_box_gz,file_box_zc,all_cols = normal_para(2)
    print('............>>>>>> now start read recorder !')
    v_err,vin_nor = get_recorder(path_recoder,2)
    print('............>>>>>> now step dictx and combin !')
    path_nor = get_zc_vin(path_zc,file_box_zc,all_cols,vin_nor,1)
    path_err = get_gz_vin(path_gz,file_box_gz,all_cols,v_err,1)
    nor_list = mix_pool_nor(path_nor,vin_nor,1)
    print('\n=============>\n'+'now normal data extract and converge complite!' )
    e_list,err_e_list,err_n_list = mix_pool_err(path_err,v_err,1)
    print('\n=============>\n'+'now error data extract and converge complite!' )


