BobaZooba commited on
Commit
ae38f33
1 Parent(s): eb19ee8

Training in progress, step 50

Browse files
adapter_config.json CHANGED
@@ -16,13 +16,13 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
 
19
  "o_proj",
20
- "k_proj",
21
  "up_proj",
22
- "v_proj",
23
- "q_proj",
24
  "down_proj",
25
- "gate_proj"
 
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "q_proj",
20
  "o_proj",
 
21
  "up_proj",
 
 
22
  "down_proj",
23
+ "gate_proj",
24
+ "v_proj",
25
+ "k_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1d26ae4d91ee5406ecdb04754fcb794c49fdbf6bbb41861d8558309f1105dad
3
  size 42002136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02b49cafa17099fb3f799866f293f74c7421276b1b678b94cf3e64d676ebf640
3
  size 42002136
runs/Dec06_11-57-16_18bca800423a/events.out.tfevents.1701864004.18bca800423a.4241.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:00992b7756e3cc228180c6072d14a88bb7a02bd5c6454c89dd3c9d65eab8504a
3
- size 13546
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ee396a763f977f594520837200e0654995e51e23f59dc294cd6045a4a8bf917
3
+ size 21594
runs/Dec06_12-22-47_18bca800423a/events.out.tfevents.1701865472.18bca800423a.10764.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:927e6ad1177ee61fca06cd2d0387229369ad6361aecb69772d1c69867f5801dc
3
+ size 10774
runs/Dec06_12-29-03_18bca800423a/events.out.tfevents.1701865850.18bca800423a.12555.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd89576b4f3aac70223910bdfd8c4598dd8629cc7cce2480a263ce181763918c
3
+ size 13546
runs/Dec06_12-39-56_18bca800423a/events.out.tfevents.1701866502.18bca800423a.15275.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7e5c327254b37d844ee79ba2e6422d7d2c940cf320e5f4e1fa1d28d640e04cb
3
+ size 13546
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e3b4ec3761ef4757a95a527eb558426cc30befdddfb2e65ccd9b15755214e1aa
3
  size 6328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7cbf95afd03db05e4a6c60c9e638cc6a5d0f42779b04f46a69a319a2385ecef
3
  size 6328
training_config.json CHANGED
@@ -71,7 +71,7 @@
71
  "eval_delay": 0,
72
  "eval_steps": 1000,
73
  "warmup_steps": 5,
74
- "max_steps": 100,
75
  "num_train_epochs": 1,
76
  "learning_rate": 0.0002,
77
  "max_grad_norm": 1.0,
 
71
  "eval_delay": 0,
72
  "eval_steps": 1000,
73
  "warmup_steps": 5,
74
+ "max_steps": 101,
75
  "num_train_epochs": 1,
76
  "learning_rate": 0.0002,
77
  "max_grad_norm": 1.0,