|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import pytorch_lightning as pl |
|
from omegaconf import DictConfig, OmegaConf |
|
from pytorch_lightning.strategies import DDPStrategy |
|
|
|
from nemo.collections.nlp.models.language_modeling import BERTLMModel |
|
from nemo.core.config import hydra_runner |
|
from nemo.utils import logging |
|
from nemo.utils.exp_manager import exp_manager |
|
|
|
|
|
@hydra_runner(config_path="conf", config_name="bert_pretraining_from_text_config") |
|
def main(cfg: DictConfig) -> None: |
|
logging.info(f'Config:\n {OmegaConf.to_yaml(cfg)}') |
|
trainer = pl.Trainer(strategy=DDPStrategy(find_unused_parameters=True), **cfg.trainer) |
|
exp_manager(trainer, cfg.get("exp_manager", None)) |
|
bert_model = BERTLMModel(cfg.model, trainer=trainer) |
|
trainer.fit(bert_model) |
|
if cfg.model.nemo_path: |
|
bert_model.save_to(cfg.model.nemo_path) |
|
|
|
|
|
if __name__ == '__main__': |
|
main() |
|
|