import os, torch, json
from datasets import load_dataset
from unsloth import FastLanguageModel, is_bfloat16_supported
from trl import SFTTrainer
from transformers import TrainingArguments

model_path = "unsloth/DeepSeek-R1-Distill-Qwen-8B"  # 14B 同理
data_path  = "data/coldstart.jsonl"
out_dir    = "ckpt/r1-sft1"

model, tokenizer = FastLanguageModel.from_pretrained(
    model_name=model_path,
    max_seq_length=2048,
    load_in_4bit=True,
    dtype=None,
)
model = FastLanguageModel.get_peft_model(
    model, r=64, lora_alpha=128, target_modules="all-linear",
    use_gradient_checkpointing="unsloth",
)

ds = load_dataset("json", data_files=data_path, split="train")
def fmt(ex):
    return {"text": ex["text"] + tokenizer.eos_token}
ds = ds.map(fmt)

trainer = SFTTrainer(
    model=model,
    tokenizer=tokenizer,
    train_dataset=ds,
    dataset_text_field="text",
    args=TrainingArguments(
        per_device_train_batch_size=1,
        gradient_accumulation_steps=16,
        num_train_epochs=1,
        learning_rate=2e-4,
        fp16=not is_bfloat16_supported(),
        bf16=is_bfloat16_supported(),
        optim="adamw_8bit",
        output_dir=out_dir,
        logging_steps=5,
        save_steps=100,
        save_total_limit=2,
    ),
)
trainer.train()
model.save_pretrained_merged(out_dir, tokenizer)
