from pytorch_lightning.callbacks import ModelCheckpoint
import time
from datam import *
from model_restnet18 import *
import pytorch_lightning as pl
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning import Trainer
from config import  config
import yaml
import wandb
from pytorch_lightning.callbacks import EarlyStopping
def main():

    pl.seed_everything(1)

    wandb_logger = WandbLogger(log_model="all", config=config.__dict__,
                               name=time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime(time.time())))
    checkpoint_callback = ModelCheckpoint(
        monitor='val_accuracy',
        dirpath=config.modelPATH,
        filename='sample-mnist原始-{epoch:02d}-{val_loss:.2f}',
        save_top_k=1,
        mode='max',
        save_last=True
    )
    trainer = Trainer(gpus=config.AVAIL_GPUS, max_epochs=wandb.config.epochs, callbacks=[checkpoint_callback],logger=wandb_logger)
    data_alz = DataM(config.data_dir,wandb.config.batch_size,config.AVAIL_GPUS,wandb.config.resize)
    data_alz.setup()
    images, labels = next(iter(data_alz.train_dataloader()))
    columns = ["caption", "image"]
    # data should be a list of lists
    my_data = [[wandb.Image(T.ToPILImage()(images[0])),labels[0],],
               [wandb.Image(T.ToPILImage()(images[1])),labels[1], ]]
    # log the Table
    wandb_logger.log_table(key="my_samples", columns=columns, data=my_data)

    model = Model(net)
    #训练模型
    trainer.fit(model,data_alz)
    trainer.test(model,data_alz.test_dataloader())

if __name__ == '__main__':
    # Set up your default hyperparameters
    with open('config.yaml') as file:
        sweep_configuration = yaml.load(file, Loader=yaml.FullLoader)
    sweep_id = wandb.sweep(sweep=sweep_configuration, project=config.project_name)
    wandb.agent(sweep_id, function=main, count=1)


