{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "实际上就是SENet的简单实现"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "class LHUC(nn.Module):\n",
    "    def __init__(self, num_features, embedding_dim):\n",
    "        super(LHUC, self).__init__()\n",
    "        self.embedding_dim = embedding_dim\n",
    "        self.weights = nn.Parameter(torch.Tensor(num_features))  # 初始化权重\n",
    "        self.reset_parameters()\n",
    "\n",
    "    def reset_parameters(self):\n",
    "        nn.init.constant_(self.weights, 0)  # 初始化权重为0\n",
    "\n",
    "    def forward(self, embeddings):\n",
    "        # embeddings是一个形状为[num_features, batch_size, embedding_dim]的张量\n",
    "        sigmoid_weights = torch.sigmoid(self.weights)  # 计算sigmoid后的权重\n",
    "        weighted_embeddings = [sigmoid_weights[i] * embeddings[i] for i in range(len(embeddings))]\n",
    "        concatenated_embeddings = torch.cat(weighted_embeddings, dim=-1)  # 拼接加权后的嵌入向量\n",
    "        return concatenated_embeddings\n",
    "\n",
    "# 示例用法\n",
    "num_features = 3  # 假设有3个特征\n",
    "embedding_dim = 10  # 每个特征的嵌入维度\n",
    "batch_size = 2  # 假设batch大小为2\n",
    "\n",
    "# 创建示例的嵌入向量\n",
    "embeddings = [torch.randn(batch_size, embedding_dim) for _ in range(num_features)]\n",
    "\n",
    "# 创建LHUC模块\n",
    "lruc = LHUC(num_features, embedding_dim)\n",
    "\n",
    "# 前向传播\n",
    "weighted_embeddings = lruc(embeddings)\n",
    "\n",
    "print(\"Weighted and Concatenated Embeddings:\", weighted_embeddings.shape)"
   ]
  }
 ],
 "metadata": {
  "language_info": {
   "name": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
