# models/ncf.py

import torch
import torch.nn as nn

class NCF(nn.Module):
    def __init__(self, num_users, num_items, embedding_dim=8, mlp_layers=None):
        super(NCF, self).__init__()
        # GMF
        if mlp_layers is None:
            mlp_layers = [16, 8, 4]
        self.gmf_user_emb = nn.Embedding(num_users, embedding_dim)
        self.gmf_item_emb = nn.Embedding(num_items, embedding_dim)

        # MLP
        self.mlp_user_emb = nn.Embedding(num_users, mlp_layers[0] // 2)
        self.mlp_item_emb = nn.Embedding(num_items, mlp_layers[0] // 2)

        mlp = []
        for i in range(len(mlp_layers) - 1):
            mlp.append(nn.Linear(mlp_layers[i], mlp_layers[i + 1]))
            mlp.append(nn.ReLU())
        self.mlp = nn.Sequential(*mlp)

        # 合并层
        # 修正：计算正确的combined_size
        combined_size = embedding_dim + mlp_layers[-1]  # 只有gmf和mlp_output拼接
        print("Combined size:", combined_size)
        self.final = nn.Linear(combined_size, 1)
        self.sigmoid = nn.Sigmoid()

    def forward(self, user_ids, item_ids):
        # GMF
        gmf_u = self.gmf_user_emb(user_ids)
        gmf_i = self.gmf_item_emb(item_ids)
        gmf = gmf_u * gmf_i

        # MLP
        mlp_u = self.mlp_user_emb(user_ids)
        mlp_i = self.mlp_item_emb(item_ids)
        mlp_input = torch.cat((mlp_u, mlp_i), dim=-1)
        mlp_output = self.mlp(mlp_input)

        # Concatenate GMF and MLP
        combined = torch.cat((gmf, mlp_output), dim=-1)
        # 打印调试信息
        print(f"Combined shape: {combined.shape}")
        output = self.final(combined)
        return self.sigmoid(output).squeeze()