import pytorch_lightning as pl
import torch
from torch import nn, optim
import torch.nn.functional as F
import torchmetrics

class MyModel(pl.LightningModule):
    def __init__(self, learning_rate, input_size, hidden_size, dropout_rate, weight_decay=0.0):
        super(MyModel, self).__init__()
        self.lr = learning_rate
        self.fc1 = nn.Linear(input_size, hidden_size)
        self.fc2 = nn.Linear(hidden_size, 1)
        self.dropout = nn.Dropout(dropout_rate)
        self.loss_function = nn.MSELoss()
        self.r2_score = torchmetrics.R2Score()
        self.mae = torchmetrics.MeanAbsoluteError()
        self.save_hyperparameters()
        self.weight_decay = weight_decay

    def forward(self, x):
        x = torch.tanh(self.fc1(x))
        x = self.dropout(x)
        x = self.fc2(x)
        return x

    def validation_step(self, batch, batch_idx):
        x, y = batch
        y_pred = self(x)
        y_pred = y_pred.squeeze()
        loss = self.loss_function(y_pred, y)
        self.log('val_loss', loss, on_epoch=True)
        self.log('val_r2', self.r2_score(y_pred, y), on_epoch=True)
        return loss

    def training_step(self, batch, batch_idx):
        x, y = batch
        y_pred = self(x)
        y_pred = y_pred.squeeze()
        loss = self.loss_function(y_pred, y)
        self.log('train_loss', loss, on_step=False, on_epoch=True, prog_bar=True)
        self.log('train_r2', self.r2_score(y_pred, y), on_epoch=True)
        return loss
    
    def test_step(self, batch, batch_idx):
        x, y = batch
        y_pred = self(x)
        y_pred = y_pred.squeeze()
        loss = self.loss_function(y_pred, y)
        self.log('test_loss', loss, on_epoch=True)
        self.log('test_r2', self.r2_score(y_pred, y), on_epoch=True)
        return loss

    def predict_step(self, batch, batch_idx, dataloader_idx=0):
        x, _ = batch
        return self(x)
    
    def configure_optimizers(self):
        optimizer = optim.Adam(self.parameters(), lr=self.lr, weight_decay=self.weight_decay)
        return optimizer

    