from transformers import TrainingArguments, HfArgumentParser
from dataclasses import field, dataclass
from peft import LoraConfig, TaskType

lora_config = LoraConfig(
    r=8,
    target_modules=["q_proj", "v_proj"],
    task_type=TaskType.CAUSAL_LM,
    lora_alpha=16,
    lora_dropout=0.05
)

@dataclass
class BaseArguments:
    def __init_subclass__(cls):
        for k, v in cls.__annotations__.items():
            if not hasattr(cls, k):
                setattr(cls, k, field(default=None))

@dataclass
class DataArguments(BaseArguments):
    train_dataset_path: str = None
    eval_dataset_path: str = None
    eval_size: int = None
    max_length: int = None
    num_data_proc: int = None
    skip_eos_token: bool = None

    _defaults = {
        'train_dataset_path': "your_test.parquet",
        'eval_dataset_path': "your_eval.parquet",
        'eval_size': 256,
        'max_length': 512,
        'num_data_proc': 16,
        'skip_eos_token': False
    }
    
    def __post_init__(self):
        for k, v in self._defaults.items():
            if getattr(self, k) is None:
                setattr(self, k, v)

@dataclass
class ModelArguments(BaseArguments):
    model_path: str = "your_model_path"

@dataclass
class MyTrainingArguments(TrainingArguments):
    _training_defaults = {
        'run_name': "codefill",
        'output_dir': "model_param/",
        'per_device_train_batch_size': 4,
        'per_device_eval_batch_size': 4,
        'num_train_epochs': 20,
        'weight_decay': 0,
        'learning_rate': 1e-7,
        'lr_scheduler_type': "cosine",
        'warmup_ratio': 0.1
    }
    
    _strategy_defaults = {
        'eval_strategy': "steps",
        'eval_steps': 100,
        'logging_strategy': "steps",
        'logging_steps': 1,
        'save_strategy': "steps",
        'save_steps': 100
    }
    
    def __post_init__(self):
        super().__post_init__()
        for defaults in (self._training_defaults, self._strategy_defaults):
            for k, v in defaults.items():
                if getattr(self, k) is None:
                    setattr(self, k, v)
        
        if self.load_best_model_at_end is None:
            self.load_best_model_at_end = True
        if self.save_total_limit is None:
            self.save_total_limit = 10
        if self.save_only_model is None:
            self.save_only_model = True
        if self.bf16 is None:
            self.bf16 = True

def train_parser():
    # 解析参数
    parser = HfArgumentParser((ModelArguments, DataArguments, MyTrainingArguments))
    model_args, data_args, training_args = parser.parse_args_into_dataclasses()
    return model_args, data_args, training_args