import torch

import torch.nn as nn
from dotmap import DotMap

# LLM电影描述
class LLMEmbedding(nn.Module):
    """增加LLMEmbedding"""

    def __init__(self, args):
        super().__init__()
        hidden = args.hidden_units
        embedding_size = args.embedding_size
        self.fc = nn.Linear(in_features=embedding_size, out_features=hidden)

    def forward(self, d):
        llm_embedding = d['llm_embedding']  # B x T x 1024
        output = self.fc(llm_embedding) # B x T x H
        return output


args = DotMap()
args.hidden_units = 64
args.embedding_size = 1024

embeddings = torch.randn(size=(32, 200, 1024))
d = {'llm_embedding': embeddings}
llm_embedding = LLMEmbedding(args)
output = llm_embedding(d)
print(output.shape)



#########################

## 怎么去通过sid对齐到embedding上呢


## 假设这个用户的序列为

test_seq = [1, 2, 4]


