from typing import Optional

from lightning.pytorch.callbacks import StochasticWeightAveraging
import wandb

from layer.trainer import Predictor, MaskTrainer,FreqPredictor
from lightning import Trainer
from data_provider.data_loader import *
from lightning.pytorch.callbacks.early_stopping import EarlyStopping
from data_provider.data_factory import data_provider, data_dict
from utils.file_mapping import mapping
from lightning.pytorch.tuner import Tuner
import lightning as pl
from lightning.pytorch.loggers import WandbLogger,TensorBoardLogger
from utils.file_mapping import get_loader
from utils.parsar import read_config
from typing_extensions import Annotated
from typing import List
import typer

fix_seed = 1
pl.seed_everything(fix_seed, workers=True)
app = typer.Typer(pretty_exceptions_show_locals=False,pretty_exceptions_enable=False)


@app.command(name='fit')
def main(channel: Annotated[Optional[int], typer.Option(help="channel")] = 7,
         data: Annotated[str, typer.Option(help="data")] = 'ETTh1',
         look_back: Annotated[int|None, typer.Option(help="look back window")] = None,
         pre_win: Annotated[int|None, typer.Option(help="predict window")] = None,
         batch_size: Annotated[int, typer.Option(help="channel")] = 256,
         d_state: Annotated[int, typer.Option(help="d_state")] = 2,
         dim: Annotated[int, typer.Option(help="embedding dim")] = 256,
         config: Annotated[str, typer.Option(help="config yaml")] = '',
         model: Annotated[str|None, typer.Option(help="model")] = None,
         enable_wandb: Annotated[bool, typer.Option(help="log to wandb")] = False,
         devices: Annotated[str, typer.Option(help="devices")] = "1",
         ):
    look_back = look_back  # 96
    pre_win = pre_win  # 96
    channel = channel  # 7
    batch_size = batch_size  # 256
    data = data  # 'ETTh2'
    max_epochs = 100,
    model = model
    d_state = d_state
    if config != '':
        cnf = read_config(config)['fit']
        look_back = look_back if look_back is not None else cnf['look_back']
        pre_win = pre_win if pre_win is not None else cnf['pre_win']
        channel = cnf['channel']
        data = cnf['data']
        batch_size = cnf['batch_size']
        dim = cnf['d_model']
        max_epochs:int = cnf['max_epochs']
        devices = cnf['devices'] if devices is None else devices
        model = model if model is not None else cnf['model']
        d_state = cnf['d_state']

    Data = data_dict.get(data, data_dict['custom'])
    test_loader, train_loader, vali_loader = get_loader(Data, batch_size, data, look_back, pre_win)

    predictor = FreqPredictor(dim=dim,
                          pre_win=pre_win,
                          look_back=look_back,
                          channel=channel,
                          use_v_attn=True,
                          metric='mae',
                          use_rev=True, d_state=d_state, model=model)
    #tf_logger = TensorBoardLogger()
    trainer = Trainer(max_epochs=max_epochs, devices=devices,
                      callbacks=[EarlyStopping(monitor="val_loss", mode="min", patience=5)], # EarlyStopping(monitor="val_loss", mode="min", patience=3)
                      strategy='ddp_find_unused_parameters_true')  # strategy='ddp_find_unused_parameters_true'
    if enable_wandb and trainer.is_global_zero:
        wandb.init(
            project="freqformer",
            config={
                "data": data,
                "look_back": look_back,
                "pre_win": pre_win,
                "model": model
            }
        )
    trainer.fit(predictor, train_dataloaders=train_loader, val_dataloaders=vali_loader)
    result = trainer.test(predictor, test_loader)
    if enable_wandb and trainer.is_global_zero:
        wandb.log(result[0])


@app.command(name='vis')
def test(pre_win: Annotated[int, typer.Option(help="predict window")] = 96,
         look_back: Annotated[int, typer.Option(help="look back window")] = 96,
         channel: Annotated[int, typer.Option(help="channel")] = 7,
         data: Annotated[str, typer.Option(help="data")] = 'ETTh1',
         batch_size: Annotated[int, typer.Option(help="channel")] = 256,
         dim: Annotated[int, typer.Option(help="embedding dim")] = 256, ):
    predictor = Predictor.load_from_checkpoint(
        '/root/ts_manba/lightning_logs/version_62/checkpoints/epoch=87-step=1056.ckpt',
        map_location='cpu',
        dim=dim, pre_win=pre_win, look_back=look_back, channel=channel, use_v_attn=True,
        use_rev=True, d_state=2, model='mam')
    predictor.eval()
    print(predictor)


if __name__ == '__main__':
    app()
    # test()
