from datam import *
from model import *
from pytorch_lightning.loggers import CometLogger
from pytorch_lightning import Trainer
import pytorch_lightning as pl


if __name__ == '__main__':
    # Arguments made to CometLogger are passed on to the comet_ml.Experiment class
    comet_logger = CometLogger(
        api_key="LI7H4KkJOv2oQX4ax6iVfPY9n",
        project_name="log入门例子-Comet",
    )
    # Log parameters
    AVAIL_GPUS = min(1, torch.cuda.device_count())
    BATCH_SIZE = 256 if AVAIL_GPUS else 64
    pl.seed_everything(1234)
    comet_logger.log_hyperparams({"batch_size": BATCH_SIZE, "AVAIL_GPUS":AVAIL_GPUS})

    data_mnist = DataM("../../../../data/",BATCH_SIZE,AVAIL_GPUS)
    model = Model(net)
    trainer = Trainer(gpus=AVAIL_GPUS, max_epochs=3, logger=comet_logger)
    #训练模型
    trainer.fit(model,data_mnist)


