from transformers import AutoModelForCausalLM, AutoTokenizer,Trainer
from peft import get_peft_model
import wandb
from train_eval_args import train_parser
from data_utils import lora_config, load_dataset_tokenizer, data_collator

def main():
    # init args
    model_args, data_args, training_args = train_parser()
    
    # init wandb
    wandb.init(project='codellm', name='lxl')

    # load model
    model = AutoModelForCausalLM.from_pretrained(model_args.model_path, trust_remote_code=True)
    model = get_peft_model(model, lora_config)
    
    # load tokenizer
    tokenizer = AutoTokenizer.from_pretrained(model_args.model_path)
    tokenizer.pad_token_id = tokenizer.unk_token_id

    # load train/eval datasets
    train_dataset, eval_dataset = load_dataset_tokenizer(data_args, tokenizer)
    
    # train start
    trainer = Trainer(
        model=model,
        args=training_args,
        train_dataset=train_dataset,
        eval_dataset=eval_dataset,
        tokenizer=tokenizer,
        data_collator=data_collator,
    )
    trainer.train()

if __name__ == '__main__':
    main()
