error577 commited on
Commit
dac8a81
·
verified ·
1 Parent(s): fef980c

Training in progress, step 250

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "k_proj",
24
- "o_proj",
25
- "up_proj",
26
  "v_proj",
 
27
  "gate_proj",
28
- "down_proj",
29
- "q_proj"
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
23
  "v_proj",
24
+ "o_proj",
25
  "gate_proj",
26
+ "k_proj",
27
+ "q_proj",
28
+ "up_proj",
29
+ "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cabc45bc5aabf5322e1457c48a8249842187fe818ccb132b79bde3bb088086f7
3
  size 97728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2df5ad8f357b1110470257ee2447f8d59de6cd9afce0a6c15e47f153dada020c
3
  size 97728
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "hidden_size": 16,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 64,
16
- "max_position_embeddings": 4096,
17
  "mlp_bias": false,
18
  "model_type": "llama",
19
  "num_attention_heads": 4,
 
13
  "hidden_size": 16,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 64,
16
+ "max_position_embeddings": 8192,
17
  "mlp_bias": false,
18
  "model_type": "llama",
19
  "num_attention_heads": 4,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4366489b38b3c2792811c00b5d117d6d4e235e0d37a068173ff8d5b1642069b3
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:269943fdbb6079fe98aea09b1ba0ad9effab69ac6b899d73428616ee9d738342
3
  size 6776