from flask import Flask, render_template, request, jsonify
import torch
from encoder import MyEncoderGRU
from attentiondecoder import AttentionDecoderGRU
from get_dict import get_data
from until import SOS_token, device, MAX_LENGTH, EOS_token

app = Flask(__name__)

# 全局变量
english_word2index, english_index2word, english_word_n, \
    french_word2index, french_index2word, french_word_n, my_pairs = get_data()


# 加载模型
def load_models():
    # 实例化编码器模型
    english_vocab_size = english_word_n
    hidden_size1 = 256
    my_encoder_gru = MyEncoderGRU(vocab_size=english_vocab_size, hidden_size=hidden_size1)
    my_encoder_gru.load_state_dict(
        torch.load("./model/encoder_gru_2.bin", weights_only=True))
    my_encoder_gru = my_encoder_gru.to(device)

    # 实例化带有attention解码器模型
    french_vocab_size = french_word_n
    hidden_size2 = 256
    dropout_p = 0.1
    max_len = MAX_LENGTH
    my_decoder_gru = AttentionDecoderGRU(vocab_size=french_vocab_size, hidden_size=hidden_size2,
                                         dropout_p=dropout_p, max_len=max_len)
    my_decoder_gru.load_state_dict(
        torch.load("./model/decoder_gru_2.bin", weights_only=True))
    my_decoder_gru = my_decoder_gru.to(device)

    return my_encoder_gru, my_decoder_gru


# 加载模型
encoder, decoder = load_models()


def seq2seq_evaluate(tensor_x, my_encoder_gru, my_decoder_gru):
    my_encoder_gru.eval()
    my_decoder_gru.eval()
    with torch.no_grad():
        # 编码
        encoder_output, encoder_hidden = my_encoder_gru(tensor_x, my_encoder_gru.init_hidden().to(device))

        # 解码准备
        input_y = torch.tensor([[SOS_token]], device=device)
        decoder_hidden = encoder_hidden

        # 准备V值
        encoder_output_c = torch.zeros(MAX_LENGTH, my_encoder_gru.hidden_size, device=device)
        for i in range(encoder_output.shape[1]):
            if i > MAX_LENGTH - 1:
                break
            encoder_output_c[i] = encoder_output[0, i]

        # 预测单词列表
        decoder_word_list = []

        # 开始预测
        for idx in range(MAX_LENGTH):
            output_y, decoder_hidden, attention_weights = my_decoder_gru(input_y, decoder_hidden, encoder_output_c)

            topv, topi = torch.topk(output_y, k=1)
            if topi.item() == EOS_token:
                decoder_word_list.append('<EOS>')
                break
            else:
                french_word = french_index2word[topi.item()]
                decoder_word_list.append(french_word)

            input_y = topi.detach()

        return ' '.join(decoder_word_list)


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/translate', methods=['POST'])
def translate():
    try:
        data = request.get_json()
        english_text = data.get('text', '')

        if not english_text:
            return jsonify({'error': 'No text provided'}), 400

        # 文本预处理和数值化
        x_list = []
        for word in english_text.split():
            if word.lower() in english_word2index:
                x_list.append(english_word2index[word.lower()])
            else:
                x_list.append(english_word2index['<UNK>'])  # 处理未知词汇

        x_list.append(EOS_token)
        tensor_x = torch.tensor(x_list, dtype=torch.long, device=device).view(1, -1)

        # 翻译
        french_translation = seq2seq_evaluate(tensor_x, encoder, decoder)

        # 移除<EOS>标记
        french_translation = french_translation.replace('<EOS>', '')

        return jsonify({
            'translation': french_translation
        })

    except Exception as e:
        return jsonify({'error': str(e)}), 500


@app.route('/notes')
def notes():
    return render_template('notes.html')


if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=5001)
