import os
from flask import Flask, request, jsonify, send_from_directory
from transformers import BertTokenizer
from model import BertTextModel_last_layer
import torch
import numpy as np

class_=[]
tokenizer = BertTokenizer.from_pretrained("./bert-base-chinese")
device = "cuda:0" if torch.cuda.is_available() else "cpu"
model = BertTextModel_last_layer().to(device)
model.load_state_dict(torch.load('./Bert-TextCNN-model_chi/bert-textcnn.pth'))
model.eval()



def generate_response(input_text):
    input_ids = tokenizer(input_text, padding=True, truncation=True, max_length=512,
                                 return_tensors='pt')
    batch_con = [input_ids['input_ids'],input_ids['token_type_ids'],input_ids['attention_mask']]
    batch_con=[i.to(device) for i in batch_con]
    class_ = ['伤感','励志','开心','思念','甜蜜']
    a=[]
    pred = model(batch_con)
    pred=pred[0].tolist()
    for index,i in enumerate(pred):
        if i>0.0:
            a.append(class_[index])
    return a
    # return response


app = Flask(__name__)

@app.route("/")
def index():
    return send_from_directory(
        os.path.abspath(os.path.dirname(__file__)), "index.html"
    )

@app.route("/chat", methods=["POST","GET"])
def chat():
    if request.method == 'POST':
        input_text = request.json["input_text"]
    else:
        input_text = request.args.get("input_text")
    response = generate_response(input_text)
    return jsonify({"response": response})

if __name__ == "__main__":
    app.run()
