| { | |
| "vocab_size": 2048, | |
| "d_model": 256, | |
| "n_heads": 8, | |
| "n_layers": 6, | |
| "n_shared_layers": 2, | |
| "d_ff": 512, | |
| "max_seq_len": 192, | |
| "dropout": 0.05, | |
| "lora_rank": 16, | |
| "use_thinking_tokens": true, | |
| "n_thinking_steps": 2, | |
| "rope_base": 10000, | |
| "weight_tying": true | |
| } |