taewan2002 commited on
Commit
e0f8956
1 Parent(s): 414dc52

Upload 3 files

Browse files
Files changed (3) hide show
  1. adapter_config.json +3 -3
  2. rng_state.pth +3 -0
  3. training_args.bin +3 -0
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "lm_head",
24
  "gate_proj",
25
  "k_proj",
 
 
26
  "o_proj",
 
27
  "down_proj",
28
- "up_proj",
29
  "v_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "gate_proj",
23
  "k_proj",
24
+ "up_proj",
25
+ "lm_head",
26
  "o_proj",
27
+ "q_proj",
28
  "down_proj",
 
29
  "v_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:084446e4418929fb40f745000d39077603652db279531f3a8bd3495a51396804
3
+ size 14244
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c471039313c0e84d8eb8cab6bbf0db1ec40d2fc3c81921b0088666e46cd84c2
3
+ size 4920