#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
V 0.0.1.201806027
"""

import pandas
import os
if not os.path.isdir("./log"):
    os.mkdir("./log")
import logging
logging.basicConfig(filename="./log/error.log" ,format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',  
                    datefmt='%a, %d %b %Y %H:%M:%S', level=logging.INFO)

import json
import sys
import os
import shutil
import requests
import configparser
import gc
import string
import random
import redis
from DataProcessing import get_name_by_program, split_df, get_df_from_cassandra
from model import Data
from datetime import datetime

sys.path.append("./gen-py")

from train_or_test import Train_or_Evaluate
from AI_pred import Prediction
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TProcessPoolServer
from thrift.server import TServer

import tornado
from tornado.web import RequestHandler, Application
from tornado.ioloop import IOLoop
from tornado import gen, httpclient
from db import Redis_Operation

timeID = datetime.now()
cf = configparser.ConfigParser()
cf.read("conf/train.conf")

transport = TSocket.TSocket(cf.get("thrift_address", "host"), cf.getint("thrift_address","port"))
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Train_or_Evaluate.Client(protocol)

pred_transport = TSocket.TSocket(cf.get("pred_address", "host"), cf.getint("pred_address","port"))
pred_transport = TTransport.TBufferedTransport(pred_transport)
pred_protocol = TBinaryProtocol.TBinaryProtocol(pred_transport)
pred_client = Prediction.Client(pred_protocol)

def save_log( data, log_path = "./log/train.log"):
    data = str(data)
    with open (log_path,"a") as f:
        f.write("timeID: {}\n".format(timeID))
        f.write("{}: {}\n".format(datetime.now(), data))

def get_random_alphabet(amount=8):
    charlist = [random.choice(string.ascii_uppercase) for i in range(amount)]
    chars = ''.join(charlist)
    return chars


class TrainHandler(RequestHandler):
    def get(self):
        print("enter get methods")
        self.write("test")

    @tornado.gen.coroutine 
    def post(self):
        save_log("enter train or evaluate function")
        try:
            x = self.get_arguments("x")
            y = self.get_arguments("y")
            cate_col = self.get_arguments("cate_col")
            num_col = self.get_arguments("num_col")
            #class_list = self.get_arguments("class")
            class_list = []
            methods = self.get_argument("methods")
            customize = self.get_argument("customize")
        except Exception:
            logging.error("error arguments in Train_or_evaluate function", exc_info=True)
            self.write(  {"code":1, "error":"argument error"})
            self.finish()
            return 
        
        csv_file = self.request.files.get("file", None)
        if not csv_file:
            try:    
                train_data = self.get_grguement("train_data")
                df = pandas.DataFrame(train_data)
            except KeyError:
                self.write({"code":2, "error":"train data error, upload data by json or csv file, and ensure you json can turn to  pandas DataFrame."})
                self.finish()
                return 
                    
        else:
            upload_path = os.path.join("./csv_file",csv_file[0]["filename"])
            with open(upload_path, "wb") as f:
                f.write(csv_file[0]["body"])
            df = pandas.read_csv(upload_path)
            os.remove(upload_path)

        max_df_len = cf.getint("limit","max_csv_len")
        if len(df)>max_df_len and cate_col!=y:
            self.write({"code":3, "error":"sorry ,because of our limit resources,when your training data include character,the length of csv must be less than %s."%max_df_len})
            self.finish()
            return
        
        if not (set(y) <=set(cate_col)) or len(y)!=1:
            logging.error("error y")
            self.write( {"code":4, "error":"y(predict column) error"})
            self.finish()
            return 

        user_id = get_random_alphabet()
        model_name = user_id

        result = []
        logging.info("length of DataFrame: %s",len(df))
        if len(df)==0:
            self.write({"code":"5", "error":"empty data"})
            self.finish()
            return 
        if methods !="train":
            self.write({"code":"6", "error":"only train methods are allowed."})
            self.finish()
            return 
        r = Redis_Operation() 
        r.insert_hash_data("train_data", model_name, df.to_json())
        data = {"x":x, "y":y, "cate_col":cate_col, "num_col":num_col, "class":class_list, "methods":methods,  "customize":customize, "user_id":model_name}
        r.insert_hash_data("train_parm",model_name, json.dumps(data)) 
        if customize == "y":
            data["model_parm"] = self.get_argument("model_parm")
        r.insert_into_train_queue(model_name)
        if not self.get_train_status(): 
            if r.get_data_by_index(0)==None:
                yield train(data)
                transport.close()
                r.insert_hash_data("train_status", model_name,"training")
            else:
                top_model = r.get_data_by_index(0)
                r.insert_hash_data("train_status",top_model, "rejected")
                train_parm = json.loads(str(r.get_hash_data("train_parm", top_model), encoding = "utf-8"))
                self.restart_worker()
                train(train_parm)
                
        else:
            r.insert_hash_data("train_status",model_name, "pending")
        
        
        self.write( {"code":0, "user_id": user_id})
        self.finish()
        print("train end")

    def get_train_status(self):
        transport.open()
        res = client.get_train_status()
        print(res)
        transport.close()
        return True if res=="yes" else False

    
    def restart_worker(self):
        """
        restart the worker
        """           
        pass

@tornado.gen.coroutine
def train(data):
    yield None
    transport.open()
    client.train_or_evaluate(json.dumps(data))
    transport.close()

class WorkerHandler(RequestHandler):
    
    @tornado.gen.coroutine
    def post(self):
        r = Redis_Operation()
        save_log("enter WorkerHandler")
        model_name = self.get_argument("model_name")
        train_result = self.get_argument("status")
        r.del_hash_data("train_data", model_name)
        r.del_hash_data("train_parm", model_name)
        r.insert_hash_data("train_status", model_name, train_result)
        r.pop_train_queue()
        new_model = r.get_data_by_index(0)
        if new_model != None:
            train_parm = json.loads(str(r.get_hash_data("train_parm", new_model), encoding = "utf-8"))
            train(train_parm)
        self.write("furfilled")
        self.finish()
       
    def get(self):
        print("enter WorkerHandler test") 
        self.write({"code":0, "result": "connect success"})

class DeleteHandler(RequestHandler):
    
    def post(self):
        model_name = (self.get_argument("model_name"))
        transport.open()
        user_id = model_name
        client.del_model(user_id, model_name)
        transport.close()

class DownloadHandler(RequestHandler):
    
    def post(self):
        model_name = self.get_argument("model_name")
        transport.open()
        model_abspath = client.get_model_path(model_name)
        if not os.path.isfile(model_abspath):
            self.write({"code": 1, 
                        "error":("did not find the model name, please check out the model name " 
                                 "and query the train status,then ensure the model name correct " 
                                 "and it has finished train with no error ")})
            self.finish()
            return 
        self.set_header("Content-Type", "application/octet-stream")
        with open(model_abspath, "rb") as f:
            while True:
                data = f.read(1024)
                if not data:
                    break
                self.write(data)
            self.finish()
        transport.close() 
        
class QueryResultHandler(RequestHandler):

    def post(self):
        model_name =  self.get_argument("model_name")
        r = Redis_Operation()
        result = r.get_hash_data("train_status", model_name)
        if not result:
            self.write({"code":1, "error":"Did not find the model name "})
            self.finish()
            return
        result = str(result, encoding="utf-8")
        self.write({"code":0, "result":result})
        self.finish()
        return

class PredictHandler(RequestHandler):

    def post(self):
        try:
            x_data = json.loads(self.get_argument("x_data"))
            print("x_data: ",  x_data)
            x_column_list = json.loads(self.get_argument("x_column_list"))
            print("x_column_list:", x_column_list)
            df = pandas.DataFrame(x_data ,columns = x_column_list)
            print(df)
            cate_col = self.get_arguments("cate_col")
            num_col = self.get_arguments("num_col")
            #class_list = self.get_arguments("class")
            class_list = []
            model_name = self.get_argument("model_name")
        except Exception:
            logging.error("error json in Train_or_evaluate function", exc_info=True)
            self.write(  {"code":1, "error":"argument error"})
            self.finish()
            return

        model_name = self.get_argument("model_name")
        r = Redis_Operation()
        result = r.get_hash_data("train_status", model_name)
        if not result:
            self.write({"code":2, "error":"Did not find the model name "})
            self.finish()
            return
        result = str(result, encoding="utf-8")
        if result !="furfilled" :
            self.write({"code":3, "error":"Train status is %s" %result})
            self.finish()
            return
        pred_transport.open()
        data = {"x_data":json.dumps(x_data), 
                "x_column_list":json.dumps(x_column_list), 
                "cate_col":cate_col, 
                "num_col":num_col, 
                "class":class_list,
                "user_id":model_name, 
                "model_name": model_name}
        data = json.dumps(data)
        print(data)
        predict_list = pred_client.predict_by_realtime_data(data)
        pred_transport.close()
        self.write({"code":0, "result":predict_list})
        
        
        
        

"""

    def stop_training(self):
        K.clear_session()
        return "successfully"
   
    def get_acc_and_loss(self, user_id,model_name):
        save_log("user_id: {}".format(user_id))
        save_log("enter get_acc_and_loss function")
        result = {}
        root_path = "./model/{}/{}/".format(user_id, model_name)
        if not os.path.isdir(root_path):
            return "Null"
        for acc_and_loss_log in os.listdir(root_path):
            if acc_and_loss_log.rsplit(".",1)[1] == "log":
                key = acc_and_loss_log.rsplit(".",1)[0]
                try:
                    df = pandas.read_csv(root_path+acc_and_loss_log)
                except pandas.errors.EmptyDataError as e:
                    logging.error("log file is empty", exc_info = True)
                    continue
                zero_list = []
                for i in range(len(df)):
                    if df["batch"][i] == "0" or df["batch"][i]==0:
                        zero_list.append(i)
                if len(zero_list)==0:
                    save_log("no log")
                    continue
                if zero_list[(len(zero_list)-1)]!= (len(df)-1):
                    zero_list.append(len(df)-1)
                acc_list = []
                loss_list = []
                for j in range(len(zero_list)-1):
                    acc_list.append([])
                    loss_list.append([])
                    for k in range(zero_list[j],zero_list[j+1]):
                        acc_list[j].append(df["acc"][k])
                        loss_list[j].append(df["loss"][k])
                last_dict = {"acc":acc_list, "loss":loss_list}
                result[key] = last_dict
        return json.dumps(result)    
"""

def make_app():
    return Application([
            (r"/train", TrainHandler),
            (r"/worker", WorkerHandler),
            (r"/delete", DeleteHandler),
            (r"/download", DownloadHandler),
            (r"/result", QueryResultHandler),
            (r"/predict", PredictHandler),
            ])

if __name__ == "__main__":
    app=make_app()
    port = cf.getint("tornado_address", "port")
    app.listen(port)
    print("sever start at %s"%port)
    tornado.ioloop.IOLoop.current().start()

