# codding: utf-8
import torch
import torch.nn as nn
from utils.mydataloader import *

class BiLSTM_ATT(nn.Module):
    # vocab_size: 词表大小 pos_size: 实体在每条语句中的位置信息列表大小 tag_size: 标签大小
    def __init__(self, vocab_size, pos_size, tag_size):
        super(BiLSTM_ATT, self).__init__()
        self.vocab_size = vocab_size
        self.pos_size = pos_size
        self.tag_size = tag_size

        self.batch_size = config.batch_size
        self.embedding_dim = config.embedding_dim
        self.hidden_dim = config.hidden_dim
        self.position_dim = config.position_dim
        self.dropout = config.dropout

        # 定义词嵌入层、dropout层、实体1位置嵌入层、实体2位置嵌入层、双向lstm层、dropout层、全连接层、针对注意力dropout层
        self.word_embedding = nn.Embedding()

