# -*- coding: utf-8 -*-
"""
Created on Fri Feb  2 16:00:04 2018

@author:Devin

"""
from numba import jit
from collections import Counter
import hashlib
import pickle
import os
from tqdm import tqdm
import pandas as pd
from mix_class import Construction_data
import numpy as np
from multiprocessing import Pool
from dask.delayed import delayed 
import dask.dataframe as dd
from dask.multiprocessing import get
import time
import gc
# =============================================================================
# load save code start
# =============================================================================
cache_root_dir = 'cache_vin_copy'
if not os.path.exists(cache_root_dir):
    os.makedirs(cache_root_dir)
def md5(s):
    m = hashlib.md5()
    m.update(s.encode("utf8"))
    return m.hexdigest()
def cache_key(f, *args, **kwargs):
    s = '%s-%s-%s' % (f.__name__, str(args), str(kwargs))
    return os.path.join(cache_root_dir, '%s.dump' % md5(s))
def cache(f):
    def wrap(*args, **kwargs):
        fn = cache_key(f, *args, **kwargs)
        if os.path.exists(fn):
            print('loading cache')
            with open(fn, 'rb') as fr:
                return pickle.load(fr)

        obj = f(*args, **kwargs)
        with open(fn, 'wb') as fw:
            pickle.dump(obj, fw,protocol=4)
        return obj
    return wrap
# =============================================================================
# load save code over
# =============================================================================

@cache
def load_table(data_path,err):
    load_tb = pd.read_csv(data_path+err, sep=',',usecols = all_cols)
    return load_tb

# =============================================================================
# save path and columns list etc.
# =============================================================================
@cache
def normal_para(times):
    print(times)
#    path_recoder = 'D:\\P_accumulator\\stage_4\\recoder\\深度学习模型数据.xlsx'
    path_recoder = '/data/recorder/深度学习模型数据.xlsx'
#    path_gz = 'D:\\P_accumulator\\learn_data\\data_new\\only\\'
    path_gz = '/data/all_data/'
    path_zc = '/data/zc_data/'
    file_box_gz = os.listdir(path_gz);file_box_zc = os.listdir(path_zc)
    all_cols = ['vin', 'starttime', 'vehspeed', 'vehrpm', 'vehsyspwrmod',
           'vehgearpos', 'tboxaccelx', 'tboxaccely', 'tboxaccelz', 'vehbrakepos',
           'vehfuelconsumed','vehoutsidetemp', 'vehinsidetemp', 'vehac', 'vehacauto',
           'vehacfanspeed','vehacdrvtargettemp', 'vehacpasstargettemp','vehsidelight',
           'vehdiplight', 'vehmainlight','vehwiperswitchfront','vehfuellev', 'vehbatt',
           'vehcoolanttemp', 'vehodo']
    return path_recoder,path_gz,path_zc,file_box_gz,file_box_zc,all_cols

# =============================================================================
# load recorder data
# =============================================================================
@cache
def get_recorder(path_recoder,times):
    print(times)
    def vin_list(v_list,df):
        vin=[]
        for i in v_list:
            vin.append([i,df[df.VIN==i].START_TIME.iloc[0]])
            pass
        return vin
    error_record = pd.ExcelFile(path_recoder)
    df_error = error_record.parse("故障车辆--2018-1-10")
    df_normal = error_record.parse("正常车辆")
    vin_err = list(df_error.loc[df_error.维修次数==1].VIN)
    vin_nor = list(df_normal.loc[:,'VIN'])
    v_err = vin_list(vin_err,df_error)
    return v_err,vin_nor

# =============================================================================
# load gz data and get vin unique   .compute()
# =============================================================================
#@jit
#def vin_combin(k,v,file_dictx):
#    days=15
#    temp_vin = pd.concat(file_dictx[k])
#    if temp_vin.shape[0]==0:
#        return None
#    temp_vin = temp_vin.drop_duplicates()
#    temp_vin.loc[:,'starttime'] = pd.to_datetime(temp_vin.loc[:,'starttime'])
#    temp_vin.index = temp_vin.starttime
#    temp_vin = temp_vin.sort_index()
#    temp_time_e = pd.date_range(end = v,periods=days)
#    temp_time_n = pd.date_range(start = v,periods=days+150)
#    mix = Construction_data(temp_vin,time='D')
#    mixx = mix.concat_data();mixx['vin'] = k
#    temp_vin_e = mixx[str(temp_time_e[0]):str(temp_time_e[-1])]
#    temp_vin_n = mixx[str(temp_time_n[3]):str(temp_time_n[-1])]
#    temp_vin_e['label']=1
#    temp_vin_n['label']=0
#    return mixx,temp_vin_e,temp_vin_n    


@cache
def get_gz_vin(path_gz,file_box_gz,all_cols,v_err,times):
    print(times)
    e_list = [];err_e_list = [];err_n_list=[];empty=[]
    v_err = dict(v_err);file_dictx = {}
    for file in tqdm(file_box_gz):
        gc.collect()
        load_tb = load_table(path_gz,file)
        load_tb_group = load_tb.groupby(["vin"])
        for k,v in tqdm(v_err.items()):
            try:
                file_dictx.setdefault(k,[]).append(load_tb_group.get_group(k))
            except:
                pass
    print('>>>>>>>> now dictx is ready start concat and combin !')
    def vin_combin(k,v):
        days=15
        temp_vin = pd.concat(file_dictx[k])
        if temp_vin.shape[0]==0:
            return 0
        temp_vin = temp_vin.drop_duplicates()
        temp_vin.loc[:,'starttime'] = pd.to_datetime(temp_vin.loc[:,'starttime'])
        temp_vin.index = temp_vin.starttime
        temp_vin = temp_vin.sort_index()
        temp_time_e = pd.date_range(end = v,periods=days)
        temp_time_n = pd.date_range(start = v,periods=days+150)
        mix = Construction_data(temp_vin,time='D')
        mixx = mix.concat_data();mixx['vin'] = k
        temp_vin_e = mixx[str(temp_time_e[0]):str(temp_time_e[-1])]
        temp_vin_n = mixx[str(temp_time_n[3]):str(temp_time_n[-1])]
        temp_vin_e['label']=1
        temp_vin_n['label']=0
        return mixx,temp_vin_e,temp_vin_n
    
    pool = Pool(15);pool_list = []
    for k,v in tqdm(v_err.items()):
        results = pool.apply_async(vin_combin,(k,v))
        pool_list.append(results)
    print(len(pool_list))
    pool.close()#关闭进程池，不再接受新的进程
    pool.join()#主进程阻塞等待子进程的退出
    for result in tqdm(pool_list):
        re_list = result.get()
        e_list.append(re_list[0]);err_e_list.append(re_list[1]);err_n_list.append(re_list[2])
    print('>>>>>>>>>>>.....now error data extract and converge complite! satrt nor data !' )
    return e_list,err_e_list,err_n_list


#@cache
#def get_result(v_err,paths): 
#    v_err = dict(v_err)
#    e_list = [];err_e_list = [];err_n_list=[];empty=[]
#    print('start load dictx!')
#    with open(paths, 'rb') as fr:
#        file_dictx = pickle.load(fr)
#    print('load over !')
#    for k,v in tqdm(v_err.items()):
#        s = time.time()
#        print(k)
#        result = delayed(vin_combin)(k,v,file_dictx)
#        print(v)
#        result = result.compute()
#        print('get result:!!')
#        print(len(result))
#        if result:
#            e_list.append(result[0]);err_e_list.append(result[1]);err_n_list.append(result[2])
#        else:
#            empty.append(k)
#        efor = time.time()
#        print("dask-for one vin cost time ：", int(efor - s)/3600)
#    return e_list,err_e_list,err_n_list,empty

if __name__ == '__main__':
    path_recoder,path_gz,path_zc,file_box_gz,file_box_zc,all_cols = normal_para(1)
    print('............>>>>>> now start read recorder !')
    v_err,vin_nor = get_recorder(path_recoder,1)
    print('............>>>>>> now step dictx and combin !')
    e_list,err_e_list,err_n_list,empty = jit(get_gz_vin(path_gz,file_box_gz,all_cols,v_err,3))
#    print('............>>>>>> now start combin !')
#    e_list,err_e_list,err_n_list,empty = get_result(v_err,paths)


