from dataclasses import dataclass, field
from typing import List, Dict
import json


@dataclass
class TrainingArgs:
    load_model: str = ""
    swanlab: str = ""
    proj_dir: str = "out"
    random_seed: int = -1
    data_file: str = ""
    data_type: str = "utf-8"
    vocab_size: int = 0
    ctx_len: int = 1024
    epoch_steps: int = 1000
    epoch_count: int = 2
    epoch_begin: int = 0
    max_epochs: int = 500
    epoch_save: int = 5
    micro_bsz: int = 12
    n_layer: int = 6
    n_embd: int = 512
    dim_att: int = 0
    dim_ffn: int = 0
    lr_init: float = 6e-4
    lr_final: float = 1e-5
    lr_schedule: str = "cos"
    warmup_steps: int = -1
    adam_eps: float = 1e-8
    grad_cp: bool = False
    weight_decay: float = 0
    weight_decay_final: float = -1
    layerwise_lr: int = 1
    head_size_a: int = 64
    head_size_divisor: int = 8
    my_exit: int = 99999999    
    data_shuffle: bool = False
    avg_loss: int = 0

    # PyTorch Lightning specific args
    accelerator: str = "gpu"
    strategy: str = "auto"
    devices: int = 1
    num_nodes: int = 1
    precision: str = "fp16"
    accumulate_grad_batches: int = 1

    # train
    my_timestamp: str = field(init=False)
    enable_checkpointing: bool = False
    gradient_clip_val: float = 1.0
    num_sanity_val_steps: int = 0
    check_val_every_n_epoch: int = int(1e20)
    log_every_n_steps: int = int(1e20)
    max_epochs: int = -1
    betas: tuple = field(init=False)
    real_bsz: int = field(init=False)
    run_name: str = field(init=False)
