from typing import Any
import torch
import itertools
from torch import nn


class Parameters:

    def __init__(self, **kwargs) -> None:
        self._kwargs = kwargs

    def __call__(self, key):
        return self._kwargs.get(key, None)
    
    def set(self, **kwargs) -> None:
        self._kwargs.update(kwargs)
    

def parameters_constructor(
        g_a2b_parameters, g_b2a_parameters,
        d_a_parameters, d_b_parameters,
        lr = 0.0001,
) -> Parameters:
    return Parameters(**{
        'train_epochs': 200,
        'decay_epoch': 5,
        'lambda_cyc': 10.0,
        'lambda_id': 5.0,
        'loss_fn': {
            'criterion_GAN': nn.MSELoss(),
            'criterion_cycle': nn.L1Loss(),
            'criterion_identity': nn.L1Loss(),
        },
        'optimizer': {
            # 'gx': torch.optim.Adam(g_x_parameters, lr=lr),
            # 'gy': torch.optim.Adam(g_y_parameters, lr=lr),
            # 'dx': torch.optim.Adam(d_x_parameters, lr=lr),
            # 'dy': torch.optim.Adam(d_y_parameters, lr=lr),
            'g': torch.optim.Adam(
                itertools.chain(g_a2b_parameters, g_b2a_parameters),
                lr = lr,
                betas = (0.5, 0.999)
            ),
            'd_a': torch.optim.Adam(d_a_parameters, lr=lr, betas=(0.5, 0.999)),
            'd_b': torch.optim.Adam(d_b_parameters, lr=lr, betas=(0.5, 0.999)),
        },
        # 'test_seed': torch.randn(16, 100),
    })