fatihay commited on
Commit
8f698ed
1 Parent(s): f3f03a8

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -34
config.json DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "model_type": "llama",
3
- "architectures": [
4
- "LLaMAForCausalLM"
5
- ],
6
- "max_seq_length": 2048,
7
- "load_in_4bit": true,
8
- "peft_model_config": {
9
- "r": 16,
10
- "target_modules": ["q_proj", "k_proj", "v_proj", "o_proj", "gate_proj", "up_proj", "down_proj"],
11
- "lora_alpha": 16,
12
- "lora_dropout": 0,
13
- "bias": "none",
14
- "use_gradient_checkpointing": "unsloth",
15
- "random_state": 3407,
16
- "use_rslora": false,
17
- "loftq_config": null
18
- },
19
- "tokenizer_config": {
20
- "eos_token": "<|endoftext|>",
21
- "pad_token": "<|pad|>",
22
- "bos_token": "<|startoftext|>",
23
- "unk_token": "<|unk|>"
24
- },
25
- "vocab_size": 50257,
26
- "hidden_size": 4096,
27
- "num_attention_heads": 16,
28
- "num_hidden_layers": 24,
29
- "intermediate_size": 16384,
30
- "hidden_act": "gelu",
31
- "initializer_range": 0.02,
32
- "layer_norm_eps": 1e-12,
33
- "use_cache": true
34
- }