| | |
| | |
| | |
| |
|
| | from datasets import load_dataset |
| | from peft import LoraConfig |
| | from trl import SFTTrainer, SFTConfig |
| | import json |
| |
|
| | |
| | dataset = load_dataset("ArchibaldAI/agent-intent-router") |
| |
|
| | print(f"Train: {len(dataset['train'])} examples") |
| | print(f"Test: {len(dataset['test'])} examples") |
| | print(f"Sample: {dataset['train'][0]}") |
| |
|
| | |
| | model_name = "HuggingFaceTB/SmolLM2-360M-Instruct" |
| | output_name = "ArchibaldAI/agent-intent-router-v1" |
| |
|
| | |
| | peft_config = LoraConfig( |
| | r=16, |
| | lora_alpha=32, |
| | lora_dropout=0.05, |
| | target_modules=["q_proj", "v_proj", "k_proj", "o_proj"], |
| | bias="none", |
| | task_type="CAUSAL_LM", |
| | ) |
| |
|
| | |
| | training_args = SFTConfig( |
| | output_dir="./intent-router", |
| | push_to_hub=True, |
| | hub_model_id=output_name, |
| | num_train_epochs=5, |
| | per_device_train_batch_size=8, |
| | per_device_eval_batch_size=8, |
| | learning_rate=2e-4, |
| | warmup_ratio=0.1, |
| | logging_steps=10, |
| | eval_strategy="epoch", |
| | save_strategy="epoch", |
| | load_best_model_at_end=True, |
| | metric_for_best_model="eval_loss", |
| | report_to="trackio", |
| | run_name="intent-router-v1", |
| | max_length=256, |
| | bf16=True, |
| | ) |
| |
|
| | |
| | trainer = SFTTrainer( |
| | model=model_name, |
| | train_dataset=dataset["train"], |
| | eval_dataset=dataset["test"], |
| | peft_config=peft_config, |
| | args=training_args, |
| | ) |
| |
|
| | trainer.train() |
| | trainer.push_to_hub() |
| | print(f"\n✅ Model pushed to {output_name}") |
| |
|