import torch.nn as nn
import torch
from layout.Position import Layout
from transformer.Encoder import EncoderLayer
class Translayout(nn.Module):
    ''' 自注意力模型 '''

    def __init__(self,
                 config,
                 bbox=None,
                 position_ids=None):
        super(Translayout, self).__init__()
        self.token_embedding = nn.Embedding(config.token_size, config.embedding_size)

        self.layout_embedding= Layout(config)

        self.layer_stack = nn.ModuleList([
            EncoderLayer(config.embedding_size, config.hidden_dims, config.num_attention_heads, config.k_dims, config.v_dims, dropout=config.attention_probs_dropout_prob)
            for _ in range(config.num_hidden_layers)
        ])

        self.fc_out = nn.Sequential(
            nn.Dropout(config.attention_probs_dropout_prob),
            nn.Linear(config.embedding_size, config.hidden_dims),
            nn.ReLU(inplace=True),
            nn.Dropout(config.attention_probs_dropout_prob),
            nn.Linear(config.hidden_dims, config.num_classes),

        )



    def forward(self, token_id, bbox, pos_id):

        # batch_size * sen_len * embedding_size

        token_embedding = self.token_embedding(token_id)

        pos_1d_embedding,pos_2d_embedding=self.layout_embedding(bbox,pos_id)

        inputs = torch.cat((token_embedding,pos_1d_embedding,pos_2d_embedding),1)

        for layer in self.layer_stack:
            inputs, _ = layer(inputs)
        enc_outs = inputs.permute(0, 2, 1)
        enc_outs = torch.sum(enc_outs, dim=-1)
        return self.fc_out(enc_outs)

if __name__ == "__main__":
    import json
    import numpy as np
    with open("C:/CODE/text_utils/funsd/data_res/0000990274.json") as f:
        data = json.load(f)
    f.close()
    bbox = []
    for inf in data:
        bbox.append(inf["bbox"][0])
    print(bbox)
    # bbox = torch.tensor([bbox])
    # position_ids = torch.arange(len(bbox[0]))
    # token_id = position_ids
