DEFAULT_CONFIG = {
    # 基础配置
    "project_name": "PoXiao-LLM",
    "output_dir": "../out",
    "seed": 1337,
    
    # 训练配置
    "epochs": 1,
    "batch_size": 32,
    "learning_rate": 5e-4,
    "device": "cuda:0",
    "dtype": "bfloat16",
    "accumulation_steps": 1,
    "grad_clip": 1.0,
    "warmup_iters": 0,
    "log_interval": 100,
    "save_interval": 1000,
    
    # 模型配置
    "hidden_size": 512,
    "num_hidden_layers": 8,
    "max_seq_len": 512,
    "use_moe": False,
    
    # 插件配置
    "model_plugin": "poxiao",
    "dataset_plugin": "pretrain",
    "loss_plugin": "cross_entropy",
    "optimizer_plugin": "adamw",
    "trainer_plugin": "pretrain",
    "scheduler_plugin": "cosine",
    "checkpointer_plugin": "early_stopping",
    
    # 早停法配置
    "early_stopping_patience": 10,
    "early_stopping_min_delta": 1e-4,
    
    # 数据配置
    "data_path": "../dataset/pretrain_hq.jsonl",
    "tokenizer_path": "./model/tokenizer_default",
    
    # 分布式训练配置
    "use_accelerate": False,
    "use_torchrun": False,
    "num_workers": 1,
    "self_schedule": True,
    
    # 日志配置
    "use_wandb": False,
    "use_tensorboard": False,
    "tensorboard_log_dir": "../tensorboard_logs",
    "wandb_project": "PoXiao-LLM",
}