import json

import torch
from torch.nn import Module, Linear, ReLU, Softmax, Embedding, ModuleList
import numpy as np


class WideAndDeep(Module):
    def __init__(self):
        super(WideAndDeep, self).__init__()
        self.dense_1 = Linear(793, 1024)
        self.relu_1 = ReLU()
        self.dense_2 = Linear(1024, 512)
        self.relu_2 = ReLU()
        self.dense_3 = Linear(512, 256)
        self.relu_3 = ReLU()
        self.dense_4 = Linear(256, 128)
        self.relu_4 = ReLU()
        self.dense_5 = Linear(128, 2)


    def forward(self, x):
        x = self.dense_1(x)
        x = self.relu_1(x)
        x = self.dense_2(x)
        x = self.relu_2(x)
        x = self.dense_3(x)
        x = self.relu_3(x)
        x = self.dense_4(x)
        x = self.relu_4(x)
        x = self.dense_5(x)
        return x


class WideAndDeep_plus(Module):
    def __init__(self):
        super(WideAndDeep_plus, self).__init__()
        self.emb_config = self.get_count()
        self.embeddings = ModuleList(
            [Embedding(self.emb_config[i], 30) for i in range(26)]
        )
        self.dense_0 = Linear(793, 1200)
        self.dense_1 = Linear(1200, 1024)
        self.relu_1 = ReLU()
        self.dense_2 = Linear(1024, 512)
        self.relu_2 = ReLU()
        self.dense_3 = Linear(512, 256)
        self.relu_3 = ReLU()
        self.dense_4 = Linear(256, 128)
        self.relu_4 = ReLU()
        self.dense_5 = Linear(128, 2)

    def get_count(self):
        with open('dataset/numbers.json', 'r') as f:
            emb_config = json.load(f)
        return emb_config

    def forward(self, x):
        x = x.split([13, 26], dim=1)
        x_lx = x[0]
        x_ls = x[1]
        x___ = None
        for i in range(len(x_ls)):
            x__ = None
            for j in range(26):
                x_ = self.embeddings[j](x_ls[i][j].long())
                if x__ is None:
                    x__ = x_
                else:
                    x__ = torch.cat([x__, x_], dim=0)
            x__ = x__.reshape(1, 780)
            if x___ is None:
                x___ = x__
            else:
                x___ = torch.cat([x___, x__], dim=0)
        x_lx = torch.cat([x_lx, x___], dim=1)
        x = self.dense_0(x_lx)
        x = self.dense_1(x)
        x = self.relu_1(x)
        x = self.dense_2(x)
        x = self.relu_2(x)
        x = self.dense_3(x)
        x = self.relu_3(x)
        x = self.dense_4(x)
        x = self.relu_4(x)
        x = self.dense_5(x)
        return x