# rt_rec_validate.py

import pandas as pd
import numpy as np
import torch
import torch.nn as nn
from gensim.models import Word2Vec
from flask import Flask, request, jsonify
from confluent_kafka import Producer

app = Flask(__name__)

# 检查设备
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

def preprocess_data(validate=False):
    # 2. 数据预处理
    # 2.1 读取数据
    train_product = pd.read_csv('../CT_REC/train_product.csv')
    view_history_predict = pd.read_csv('../CT_REC/view_history_predict.csv')# todo 此处改为读取http接口的请求数据
    #todo 请求内容类似： [{U91836,"N19639,N61837,N53526,N38324,N2073,N49186,N59295,N24510,N39237,N9721","N60905,N39758,N28361,N18680,N55610,N35621,N22850,N58173,N29120,N9786"},{U73700,"N47705,N3574,N42474,N64498,N7517,N63665,N59469,N57356,N15926,N43620","N60603,N40078,N61409,N46013,N50643,N60723,N63175,N22028,N8795,N43647"}]

    # 2.2 使用Word2Vec处理description为特征向量
    descriptions = train_product['description'].apply(lambda x: x.split(','))
    w2v_model = Word2Vec(descriptions, vector_size=100, window=5, min_count=1, workers=4)

    product_feat = {}
    for _, row in train_product.iterrows():
        product_id = row['product_id']
        description = row['description'].split(',')
        product_feat[product_id] = np.mean([w2v_model.wv[word] for word in description if word in w2v_model.wv], axis=0)
    def create_history_and_dislike_tensor(view_history, product_feat, max_history_length=10):
        user_ids = []
        pos_history_tensor = []
        neg_history_tensor = []
        default_feat = np.zeros_like(next(iter(product_feat.values())))

        for _, row in view_history.iterrows():#todo 此处改为读取http接口的请求数据,按照第一列，第二列，第三列进行拆分
            user_id = row['user_id']
            pos_product_ids = row['history'].split(',')
            neg_product_ids = row['dislike'].split(',')

            pos_user_history = [product_feat.get(pid, default_feat) for pid in pos_product_ids[:max_history_length]]
            pos_history_tensor.append(pos_user_history)

            neg_user_history = [product_feat.get(pid, default_feat) for pid in neg_product_ids[:max_history_length]]
            neg_history_tensor.append(neg_user_history)

            user_ids.append(user_id)

        pos_histories_np = np.stack(pos_history_tensor, axis=0)
        pos_histories_tensor = torch.tensor(pos_histories_np, dtype=torch.float32)

        neg_histories_np = np.stack(neg_history_tensor, axis=0)
        neg_histories_tensor = torch.tensor(neg_histories_np, dtype=torch.float32)

        return pos_histories_tensor, neg_histories_tensor

    if validate:
        pos_history_validate_tensor, neg_history_validate_tensor = create_history_and_dislike_tensor(view_history_predict, product_feat)
        return pos_history_validate_tensor, neg_history_validate_tensor, view_history_predict

class RNNModel(nn.Module):
    def __init__(self, input_dim, hidden_dim, num_layers, output_dim):
        super(RNNModel, self).__init__()
        self.rnn = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True)
        self.bn = nn.BatchNorm1d(hidden_dim)  # 添加批标准化层
        self.fc = nn.Linear(hidden_dim, output_dim)

    def forward(self, x):
        h0 = torch.zeros(self.rnn.num_layers, x.size(0), self.rnn.hidden_size).to(device)
        c0 = torch.zeros(self.rnn.num_layers, x.size(0), self.rnn.hidden_size).to(device)

        out, _ = self.rnn(x, (h0, c0))
        out = out[:, -1, :]  # 取最后一个时间步的输出
        out = self.bn(out)  # 应用批标准化
        out = self.fc(out)
        return out


class TransformerModel(nn.Module):
    def __init__(self, input_dim, output_dim, nhead, num_layers):
        super(TransformerModel, self).__init__()
        self.transformer = nn.Transformer(input_dim, nhead, num_layers)
        self.fc = nn.Linear(input_dim, output_dim)

    def forward(self, x):
        x = self.transformer(x, x)
        if len(x.shape) > 2:
            x = x.mean(dim=1)
        x = self.fc(x)
        return x


class RecModel(nn.Module):
    def __init__(self, rnn_model, transformer_model):
        super(RecModel, self).__init__()
        self.rnn = rnn_model
        self.transformer = transformer_model

    def forward(self, x):
        x = self.rnn(x)
        x = self.transformer(x)
        return x


@app.route('/predict', methods=['GET'])# todo 这个接口改为POST 增加请求参数List<Object> 请求内容类似： [{U91836,"N19639,N61837,N53526,N38324,N2073,N49186,N59295,N24510,N39237,N9721","N60905,N39758,N28361,N18680,N55610,N35621,N22850,N58173,N29120,N9786"},{U73700,"N47705,N3574,N42474,N64498,N7517,N63665,N59469,N57356,N15926,N43620","N60603,N40078,N61409,N46013,N50643,N60723,N63175,N22028,N8795,N43647"}]

def predict():
    pos_history_validate_tensor, neg_history_validate_tensor, view_history_validate = preprocess_data(validate=True)

    rnn_model = RNNModel(input_dim=100, hidden_dim=64, output_dim=100, num_layers=2).to(device)
    transformer_model = TransformerModel(input_dim=100, output_dim=1, nhead=5, num_layers=2).to(device)
    rec_model = RecModel(rnn_model, transformer_model).to(device)

    # Load the trained model
    rec_model.load_state_dict(torch.load('../result/RecModel.pth'))
    rec_model.eval()

    with torch.no_grad():
        pos_batchUserHisW2CFeat = pos_history_validate_tensor.to(device)
        pos_batchUserHisRnnFeat = rnn_model(pos_batchUserHisW2CFeat)
        pos_batchUserHisFinalFeat = transformer_model(pos_batchUserHisRnnFeat)
        pos_sim_values = torch.tanh(pos_batchUserHisFinalFeat).squeeze()

        pos_eval_results = pd.DataFrame({
            'user_id': view_history_validate['user_id'],
            'sim_value': pos_sim_values.cpu().numpy()
        })
        print(f"正样本: \n{pos_eval_results}")

        pos_eval_results.to_csv(f'predictRecModelEval.csv', index=False)

        # Kafka producer
        # producer = KafkaProducer(bootstrap_servers='localhost:9092')

        # Sending messages
        # for _, row in pos_eval_results.iterrows():
        #     message = f"{row['user_id']},{row['sim_value']}"
        #     producer.send('rec_model_results', message.encode('utf-8'))
        #
        # producer.flush()

    return jsonify({"status": "prediction completed"})


if __name__ == '__main__':
    app.run(port=5001)
