#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2021 wanghch <wanghch@wanghch-pc>
#
# Distributed under terms of the MIT license.

"""

"""
import pandas as pd
# from sklearn.model_selection import train_test_split
import tensorflow as tf
import os
import sys
from tensorflow.keras.layers import *
import tensorflow.keras.backend as K

from feature_utils import *
import argparse
import datetime

today = datetime.datetime.today()
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-j', '--job', type=str, default="train", help='job')

ARGS = parser.parse_args()



def get_dataset():
    dfiles = os.listdir('out/')
    absfiles = ["out/" + f for f in dfiles]
    dfs = []
    for af in absfiles:
        df = pd.read_csv(af, names = COLUMNS, header = None)
        dfs.append(df)
    df = pd.concat(dfs)
    df['dt'] = pd.to_datetime(df.date)
    return df



def gen_input(X):
    X_num_fea = X.loc[:, FNAMES]
    Y = X.loc[:, 'target']
    X = {n: X.loc[:, n] for n in CAT_FNAMES}
    X["X"] = X_num_fea
    return X, Y.values

def my_loss_fn(y_true, y_pred):
    loss = K.binary_crossentropy(y_true, y_pred)
    #weight_vector = y_true * 1.3 + (1 - y_true) * 0.3
    #wloss = weight_vector * loss
    wloss = loss
    return K.mean(wloss)

def model_fun():
    print(N_NUM_FEATURES)
    XInput = tf.keras.Input(N_NUM_FEATURES, name = "X")
    cinput_map = {}
    cate_dims = get_cate_dims()
    cate_feas = []
    for n, cdim in zip(CAT_FNAMES, cate_dims):
        cinput_map[n] = tf.keras.Input(1, name = n)
        embs = Embedding(cdim, 16)(cinput_map[n])
        embs = tf.keras.layers.Reshape((16,))(embs)
        cate_feas.append(embs)
    inputs = [XInput] + list(cinput_map.values())
    #inputs = [XInput]

    last = XInput
    l1 = tf.keras.layers.Dense(128, activation='relu')(last)
    emb_fs = concatenate(cate_feas, axis = 1)
    l2 = Dense(128, activation='relu')(emb_fs)
    last = tf.keras.layers.concatenate([l1, l2, l1 * l2], axis = 1)
    for i in [128, 128, 128, 128]:
        last = tf.keras.layers.Dense(i, activation='relu')(last)
    final = tf.keras.layers.Dense(1, activation='sigmoid')(last)


    model = tf.keras.Model(inputs = inputs, outputs = final)


    model.compile(
        loss='binary_crossentropy',
        #loss=my_loss_fn,
        optimizer='adam',
        metrics=['accuracy', tf.keras.metrics.AUC(), 'mse'])
    return model

def flag_fun(row):
    if row['target'] == 1.0 and row['Y'] > 0.7:
        return 0
    elif row['target'] == 0.0 and row['Y'] < 0.3:
        return 1
    elif (row['target'] == 0.0 and row['Y'] > 0.7) or (row['target'] == 1.0 and row['Y'] < 0.3):
        return 2
    else:
        return 3


df = get_dataset()
X = df[df.dt < '2018-01-01']
X2 = df[df.dt >= '2018-01-01']

N_NUM_FEATURES = len(FNAMES)
N_CAT_FEATURES = len(CAT_FNAMES)

X_train, Y_train = gen_input(X)
X_test, Y_test = gen_input(X2)

model_dir = "model"
if ARGS.job == 'train':
    callbacks = []
    log_dir = "train_logs/"
    tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
    callbacks.append(tensorboard_callback)
    model = model_fun()
    model.fit(X_train, Y_train, epochs = 10, callbacks=callbacks)
    model.save(model_dir)
    model.evaluate(X_test, Y_test)
elif ARGS.job == 'eval':
    model = tf.keras.models.load_model(model_dir)
    model.evaluate(X_test, Y_test)
elif ARGS.job == 'predict':
    model = tf.keras.models.load_model(model_dir)
    Y_res = model.predict(X_test)
    X2["target"] = Y_test
    X2["Y"] = Y_res
    X2["FLAG"] = X2.apply(flag_fun, axis=1)
    for i in range(4):
        X2[X2.FLAG == i].to_csv("/tmp/p_%d_csv" % i, index = False)


