from typing import Tuple
import torch
import torch.nn as nn
from torch.utils.tensorboard import SummaryWriter


class SchedulePolicyNet(nn.Module):
    def __init__(self, n_processors: int, d_processors=2, d_tasks=2, d_model=64):
        super().__init__()
        self.d_processors = d_processors
        self.d_tasks = d_tasks
        self.n_processors = n_processors

        self.task_encoder_layer = nn.TransformerEncoderLayer(
            d_model=d_model, dim_feedforward=256, nhead=1, batch_first=True)
        self.task_encoder = nn.TransformerEncoder(
            self.task_encoder_layer, num_layers=1, enable_nested_tensor=False)

        self.task_prenet = nn.Linear(self.d_tasks, d_model)
        self.processor_prenet = nn.Linear(
            self.d_processors * self.n_processors, d_model)
        self.out = nn.Linear(d_model * 2, 1)
        self.act = nn.Softmax(dim=-1)

    # x1 task features
    # x2 node features
    def forward(self, x: Tuple[torch.Tensor, torch.Tensor]):
        x1 = x[0]
        x2 = x[1]
        n_tasks = x1.shape[0]
        x1 = self.task_prenet(x1)
        # print("x1", x1.shape)

        x2 = self.processor_prenet(x2)
        x2 = x2.expand(n_tasks, x2.shape[0])
        # print("x2", x2.shape)

        x1 = self.task_encoder(x1)

        x = torch.concat((x1, x2), dim=1)
        # print(x.shape)
        x = self.out(x)
        x = x.squeeze(1)
        # print("output", x.shape)
        x = self.act(x)

        # print("output", x.shape)
        return x


if __name__ == '__main__':
    x1 = torch.Tensor([[1, 2], [3, 4]])
    x2 = torch.Tensor([1, 2, 3, 4])
    model = SchedulePolicyNet(2)
    with SummaryWriter(comment='model') as w:
        w.add_graph(model, [x1, x2])
