from flask import Flask
from flask import request
from predictor import *
import tensorflow as tf

app = Flask(__name__)


@app.route('/', methods=['post'])
def hello_world():
    fact = [request.form['fact']]
    mode = request.form['mode']

    global graph
    content_cut = transform.cut_texts(texts=fact, word_len=2)
    transform.text2seq(texts_cut=content_cut, tokenizer_fact=tokenizer_fact,
                       num_words=predictor.num_words, maxlen=predictor.max_len)
    content_fact_pad_seq = np.array(transform.fact_pad_seq)
    with graph.as_default():
        predict = predictor.models[mode].predict(content_fact_pad_seq)
    if mode == 'imprisonment':
        result = str(transform.imprisonment_transform(predict)[0][0])
    else:
        result = str(transform.one_hot_to_str(predict, mode)[0][0])
    return result


def web_predict(data):
    global graph
    content_cut = transform.cut_texts(texts=data, word_len=2)
    transform.text2seq(texts_cut=content_cut, tokenizer_fact=tokenizer_fact,
                       num_words=predictor.num_words, maxlen=predictor.max_len)
    content_fact_pad_seq = np.array(transform.fact_pad_seq)
    with graph.as_default():
        accusation = predictor.models['accusation'].predict(content_fact_pad_seq)
        relevant_articles = predictor.models['relevant_articles'].predict(content_fact_pad_seq)
        imprisonment = predictor.models['imprisonment'].predict(content_fact_pad_seq)

    result = {
        'accusation': str(transform.one_hot_to_str(accusation, 'accusation')[0][0]),
        'relevant_articles': str(transform.one_hot_to_str(relevant_articles, 'relevant_articles')[0][0]),
        'imprisonment': str(transform.imprisonment_transform(imprisonment)[0][0])
    }
    return result


graph = tf.get_default_graph()
predictor = Predictor()
transform = DataTransform()
with open('model/tokenizer_fact_40000.pkl', mode='rb') as f:
    tokenizer_fact = pickle.load(f)

if __name__ == '__main__':
    app.config['JSON_AS_ASCII'] = False
    app.run()
