diagonalge commited on
Commit
b8513e4
·
verified ·
1 Parent(s): c7a6bd6

Upload task output test1334test1234test1234test12334

Browse files
adapter_config.json CHANGED
@@ -24,13 +24,13 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
- "gate_proj",
28
- "down_proj",
29
- "o_proj",
30
- "q_proj",
31
  "v_proj",
32
  "k_proj",
33
- "up_proj"
 
 
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
 
 
 
 
27
  "v_proj",
28
  "k_proj",
29
+ "o_proj",
30
+ "up_proj",
31
+ "q_proj",
32
+ "down_proj",
33
+ "gate_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ace4e6b964d4cade918162a3e383897ed3750e133a82daba99e6a46269a117fe
3
  size 22573704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3526409e94892ac02aa6bdb0fbcd7b184a9885e66ecd03ade069e1cbe341173
3
  size 22573704
checkpoint-1/adapter_config.json CHANGED
@@ -24,13 +24,13 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
- "gate_proj",
28
- "down_proj",
29
- "o_proj",
30
- "q_proj",
31
  "v_proj",
32
  "k_proj",
33
- "up_proj"
 
 
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
 
 
 
 
27
  "v_proj",
28
  "k_proj",
29
+ "o_proj",
30
+ "up_proj",
31
+ "q_proj",
32
+ "down_proj",
33
+ "gate_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
checkpoint-1/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ace4e6b964d4cade918162a3e383897ed3750e133a82daba99e6a46269a117fe
3
  size 22573704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3526409e94892ac02aa6bdb0fbcd7b184a9885e66ecd03ade069e1cbe341173
3
  size 22573704
checkpoint-1/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:711bdee872abc49606b6fde70df2dc508857e0526c4e1d28c216366ec2aa870d
3
  size 11710970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e446a54d672ed4f40e57d0cda2515174e6ae677ec27056343b8bb1f63e4b3faa
3
  size 11710970
checkpoint-1/trainer_state.json CHANGED
@@ -11,9 +11,9 @@
11
  "log_history": [
12
  {
13
  "epoch": 0.009216589861751152,
14
- "grad_norm": 0.8389255404472351,
15
  "learning_rate": 0.0,
16
- "loss": 1.9632,
17
  "step": 1
18
  }
19
  ],
 
11
  "log_history": [
12
  {
13
  "epoch": 0.009216589861751152,
14
+ "grad_norm": 0.4905908405780792,
15
  "learning_rate": 0.0,
16
+ "loss": 1.1641,
17
  "step": 1
18
  }
19
  ],
checkpoint-1/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e6c80cc305d6c92ff876f5b47578d2bc99ffd827716ce421d9239686d0ccfb16
3
  size 7224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92831f9e8336983097ea8ddb8f1e144933ef2354f84e5ec0ccef2a44fac8fb98
3
  size 7224