import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import torch.optim as optim

import sys
sys.path.append(".")
from utils import EntityDictionary, WordDictionary

"""
    模型：BiLSTM模型和Bilstm+CRF模型，均使用pytorch实现
"""

class ModelArgs:
    embedding_dim = 256 
    hidden_dim = 128

class BiLSTM_CRF(nn.Module):
    def __init__(self, d1:WordDictionary, d2:EntityDictionary):
        super(BiLSTM_CRF, self).__init__()
        self.d1, self.d2 = d1, d2
        self.label_size = len(d2)

        # BiLSTM模型
        self.bilstm = BiLSTM(d1, d2)
        # CRF模型，参数为一个转移矩阵
        self.transition_score = nn.Parameter(torch.ones([self.label_size, self.label_size]) / self.label_size)

    def forward(self, padded, length):
        emission_score = self.bilstm(padded, length)
        crf_score = emission_score.unsqueeze(2).expand(-1, -1, self.label_size, -1) + \
                    self.transition_score.unsqueeze(0)

        return crf_score

    def test(self, sent_tensor):
        """
            使用viterbi算法，对最可能序列进行求解
        """
        BEG = self.d2["[BEG]"]
        END = self.d2["[END]"]
        PAD = self.d2["[PAD]"]
        label_size = len(self.d2)
        length = len(sent_tensor[0])
        crf_score = self.forward(sent_tensor, [length])
        device = crf_score.device

        # viterbi为动态规划变量，note为路径备忘录
        viterbi = torch.zeros(1, length, label_size).to(device)
        note = torch.zeros(1, length, label_size).long().to(device)
        for t in range(length):
            if t == 0:
                viterbi[:, t, :] = crf_score[:, t, BEG, :]
                note[:, t, :] = BEG
            else:
                max_scores, prev_labels = torch.max(
                    viterbi[:, t-1, :].unsqueeze(2) + 
                    crf_score[:, t, :, :], 
                    dim=1
                )
                viterbi[:, t, :] = max_scores
                note[:, t, :] = prev_labels

        # 从最后一个word（[END]）开始对路径进行回溯
        tags = []
        tags_t = None
        note = note.view(1, -1)
        for t in range(length-1, 0, -1):
            if t == length-1:
                index = torch.ones(1, ).long() * (t * label_size)
                index = index.to(device)
                index += END
            else:
                tags_t = tags_t.squeeze(1)
                index = torch.ones(1, ).long() * (t * label_size)
                index = index.to(device)
                index += tags_t.long()
            
            tags_t = note[:].gather(dim=1, index = index.unsqueeze(1).long())
            tags.append(tags_t.squeeze().item())

        # 由于是反序回溯，因此需要倒排
        tags = list(reversed(tags))
        return tags



class BiLSTM(nn.Module):
    def __init__(self, d1:WordDictionary, d2:EntityDictionary):
        super(BiLSTM, self).__init__()

        self.embedding_layer = nn.Embedding(len(d1), ModelArgs.embedding_dim)
        self.bilstm = nn.LSTM(ModelArgs.embedding_dim, ModelArgs.hidden_dim, batch_first=True, num_layers=2, bidirectional=True, dropout=0.3)
        self.score_layer = nn.Linear(2*ModelArgs.hidden_dim, len(d2))

    def forward(self, padded, length):
        embedding = self.embedding_layer(padded)
        packed = pack_padded_sequence(embedding, length, batch_first=True, enforce_sorted=False)
        bout, _ = self.bilstm(packed)
        bout, _ = pad_packed_sequence(bout, batch_first=True)
        score = self.score_layer(bout)

        return score