import numpy as np

import torch as T
from torch.autograd import Variable

class ReqModel(T.nn.Module):

    def __init__(self, embDim, hidDim):
        super(ReqModel, self).__init__()

        self.lstmLayer = T.nn.LSTM(
            input_size=embDim,
            hidden_size=hidDim,
            batch_first=True,
            num_layers=1
        )

    def forward(self, reqEmb):
        reqOut, hc = self.lstmLayer(reqEmb)
        return reqOut, hc

class ResModel(T.nn.Module):

    def __init__(self, seqLen, embDim, hidDim, outDim):
        super(ResModel, self).__init__()

        self.lstmLayer = T.nn.LSTM(
            input_size=embDim,
            hidden_size=hidDim,
            batch_first=True,
            num_layers=1
        )
        self.ratio = T.nn.Softmax(dim=2)
        self.dense1 = T.nn.Linear(hidDim*2, 256)
        self.dense2 = T.nn.Linear(256, outDim)

    def forward(self, resEmb, reqOut, lasthc):
        resOut, hc = self.lstmLayer(resEmb, lasthc)
        reqOutT = reqOut.transpose(1, 2)
        attention = self.ratio(T.matmul(resOut, reqOutT))
        print(attention)
        print("-------------------\n")
        context = T.matmul(attention, reqOut)
        combine = T.cat([context, resOut], dim=2)
        d1out = T.nn.Tanh()(self.dense1(combine))
        d2out = T.nn.LogSoftmax(dim=2)(self.dense2(d1out))
        return d2out, hc

from itertools import chain

class LSTM():

    def __init__(self, seqLen, embDim, hidDim, outDim, Embedding):

        self.outDim = outDim
        self.seqLen = seqLen

        self.reqModel = ReqModel(embDim, hidDim)
        self.resModel = ResModel(seqLen, embDim, hidDim, outDim)
        self.lossFunc = T.nn.NLLLoss()
        self.Embedding = Embedding
        
        self.lr = 0.01
        self.optim = T.optim.Adam(
            chain(self.reqModel.parameters(), self.resModel.parameters()),
            lr=self.lr
        )

    def mulLR(self, rate):
        self.lr *= rate
        self.optim = T.optim.Adam(
            chain(self.reqModel.parameters(), self.resModel.parameters()),
            lr=self.lr
        )

    def load(self, path):
        state = T.load(path)
        self.reqModel.load_state_dict(state['net']['req'])
        self.resModel.load_state_dict(state['net']['res'])
        #self.optim.load_state_dict(state['optimizer'])
        self.optim = T.optim.Adam(
            chain(self.reqModel.parameters(), self.resModel.parameters()),
            lr=state['lr']
        )
        return state["epoch"]

    def save(self, path, epoch):
        state = {
            'net': {
                'req': self.reqModel.state_dict(),
                'res': self.resModel.state_dict(),
            },
            'optimizer': self.optim.state_dict(),
            'epoch': epoch,
            'lr': self.lr
        }
        T.save(state, path % epoch)

    def trainStep(self, req, res):
        req = [[1] + x + [2] + [0] * max(0, self.seqLen-len(x)) for x in req]
        label =  [y + [2, 0] + [0] * max(0, self.seqLen-len(y)) for y in res]
        res = [[1] + y + [2] + [0] * max(0, self.seqLen-len(y)) for y in res]

        reqEmb = self.Embedding.batchEval(req)
        resEmb = self.Embedding.batchEval(res)
        resLabel = T.tensor(label)

        self.reqModel.train()
        self.resModel.train()
        self.optim.zero_grad()

        # forward
        reqOut, hc = self.reqModel(reqEmb)
        out, __ = self.resModel(resEmb, reqOut, hc)
        loss = self.lossFunc(
            out.reshape((-1, self.outDim)),
            resLabel.reshape((-1, ))
        )

        #backward
        loss.backward()
        self.optim.step()

        return loss

    def eval(self, req):
        req = [[1] + req + [2] + [0] * max(0, self.seqLen-len(req))]
        print(req)
        reqEmb = self.Embedding.batchEval(req)
        resEmb = self.Embedding.batchEval([[1]])

        self.reqModel.eval()
        self.resModel.eval()

        # forward
        reqOut, hc = self.reqModel(reqEmb)
        res = []
        for i in range(self.seqLen):
            out, hc = self.resModel(resEmb, reqOut, hc)
            out = out[0, -1, :].argmax(dim=0).detach().numpy() # 应该是一个数
            if out == 2:
                break
            res.append(out)
            resEmb = self.Embedding.batchEval([[out]])

        return res
