q1e123 commited on
Commit
9ac0a6e
1 Parent(s): fe3e5ea

Training in progress, step 2

Browse files
adapter_config.json CHANGED
@@ -20,30 +20,30 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "transformer.h.5.mlp.c_fc",
24
- "transformer.h.3.attn.c_proj",
25
  "transformer.h.1.mlp.c_proj",
 
 
26
  "transformer.h.1.mlp.c_fc",
 
 
 
 
27
  "transformer.h.3.mlp.c_proj",
 
 
 
 
 
28
  "transformer.h.0.mlp.c_proj",
29
- "transformer.h.0.attn.c_proj",
30
  "transformer.h.2.mlp.c_proj",
31
- "transformer.h.5.mlp.c_proj",
32
- "transformer.h.4.attn.c_attn",
33
- "transformer.h.4.mlp.c_proj",
34
- "transformer.h.5.attn.c_proj",
35
- "transformer.h.1.attn.c_attn",
36
  "transformer.h.4.mlp.c_fc",
37
- "transformer.h.2.attn.c_proj",
38
- "transformer.h.3.attn.c_attn",
39
- "transformer.h.1.attn.c_proj",
40
  "transformer.h.2.mlp.c_fc",
41
- "transformer.h.5.attn.c_attn",
42
- "transformer.h.2.attn.c_attn",
43
- "transformer.h.0.mlp.c_fc",
44
- "transformer.h.4.attn.c_proj",
45
- "transformer.h.3.mlp.c_fc",
46
- "transformer.h.0.attn.c_attn"
47
  ],
48
  "task_type": "CAUSAL_LM",
49
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "transformer.h.5.attn.c_proj",
 
24
  "transformer.h.1.mlp.c_proj",
25
+ "transformer.h.4.mlp.c_proj",
26
+ "transformer.h.1.attn.c_proj",
27
  "transformer.h.1.mlp.c_fc",
28
+ "transformer.h.4.attn.c_proj",
29
+ "transformer.h.5.attn.c_attn",
30
+ "transformer.h.3.attn.c_attn",
31
+ "transformer.h.4.attn.c_attn",
32
  "transformer.h.3.mlp.c_proj",
33
+ "transformer.h.2.attn.c_proj",
34
+ "transformer.h.2.attn.c_attn",
35
+ "transformer.h.0.mlp.c_fc",
36
+ "transformer.h.1.attn.c_attn",
37
+ "transformer.h.3.attn.c_proj",
38
  "transformer.h.0.mlp.c_proj",
 
39
  "transformer.h.2.mlp.c_proj",
40
+ "transformer.h.0.attn.c_attn",
41
+ "transformer.h.5.mlp.c_fc",
 
 
 
42
  "transformer.h.4.mlp.c_fc",
43
+ "transformer.h.0.attn.c_proj",
 
 
44
  "transformer.h.2.mlp.c_fc",
45
+ "transformer.h.5.mlp.c_proj",
46
+ "transformer.h.3.mlp.c_fc"
 
 
 
 
47
  ],
48
  "task_type": "CAUSAL_LM",
49
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dab480c28c6f49f08beaad63095a709a6e07e5da50d4d81b846a9a45f479a4b8
3
  size 2365304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67e522c4a3a39a1bec1aadcfea458fe7bfc03eb134a2b19a8574c723b90dfb45
3
  size 2365304
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae2e43909ea79d9db3c92450b3cc0cd33dcdd0670942758f8e091b8aa257183f
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea095fcffbc7bde3a287b3e2f56e06c7406ce58e00023096431dbce94ad542ba
3
  size 5112