from typing import Any, Optional

import pytorch_lightning as pl
from pytorch_lightning.utilities.types import STEP_OUTPUT
from torch.nn import Sequential, Linear, ReLU, CELU
from torch.optim import Adam
from torchmetrics import MeanSquaredLogError
import torch



class Node2VecPredictor(pl.LightningModule):
    def __init__(self, in_feats, out_feats=1, hid_feats=None, learning_rate=5e-3, weight_decay=5e-3, num_mlp=2):
        super(Node2VecPredictor, self).__init__()
        self.loss = MeanSquaredLogError()
        self.learning_rate = learning_rate
        self.weight_decay = weight_decay
        self.in_feats = in_feats
        self.out_feats = out_feats
        self.num_mlp = num_mlp
        if hid_feats is None:
            self.hid_feats = in_feats
        else:
            self.hid_feats = hid_feats

        mlps = self.init_output_layer()
        self.output_mlps = Sequential(*mlps)

    def init_output_layer(self):
        mlps = []
        for i in range(self.num_mlp):
            ipt = self.in_feats if i == 0 else self.hid_feats
            opt = self.out_feats if i == self.num_mlp - 1 else self.hid_feats
            mlp = Linear(ipt, opt, True)
            mlps.append(mlp)
            mlps.append(ReLU())
            if i != self.num_mlp - 1:
                mlps.append(CELU(0.5))
        return mlps

    def forward(self, x) -> Any:
        popularity = self.output_mlps(x)
        # popularity = t.sum()
        return popularity

    def configure_optimizers(self):
        adam = Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay)
        return adam

    def training_step(self, *args, **kwargs) -> STEP_OUTPUT:
        data = args[0]
        x, y = data
        y_hat = self(x)
        loss = self.loss(y_hat, y)
        self.log('train loss', loss)
        return loss

    def validation_step(self, *args, **kwargs) -> Optional[STEP_OUTPUT]:
        data = args[0]
        x, y = data
        y_hat = self(x)
        loss = self.loss(y_hat, y)
        self.log('valid loss', loss)
        return loss

    def test_step(self, *args, **kwargs) -> Optional[STEP_OUTPUT]:
        data = args[0]
        x, y = data
        y_hat = self(x)
        loss = self.loss(y_hat, y)
        self.log('test loss', loss)
        self.log('hp_metric', loss)
        return loss
