diabolic6045 commited on
Commit
32e4f7a
1 Parent(s): de67569

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -21,12 +21,12 @@
21
  "revision": null,
22
  "target_modules": [
23
  "down_proj",
 
24
  "gate_proj",
25
- "v_proj",
26
  "q_proj",
27
  "o_proj",
28
- "up_proj",
29
- "k_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
21
  "revision": null,
22
  "target_modules": [
23
  "down_proj",
24
+ "k_proj",
25
  "gate_proj",
 
26
  "q_proj",
27
  "o_proj",
28
+ "v_proj",
29
+ "up_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c1f0e0b19d0ff50e96dc1a092984ec8dbe2d8cd6483e8f5075e3451cd8af0d4
3
  size 167832688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37546798c6de5b88257155289078634b88697fcdf3603331ebf28aae4390f071
3
  size 167832688
config.json CHANGED
@@ -38,7 +38,7 @@
38
  "rope_theta": 500000.0,
39
  "tie_word_embeddings": false,
40
  "torch_dtype": "bfloat16",
41
- "transformers_version": "4.41.1",
42
  "use_cache": false,
43
  "vocab_size": 128256
44
  }
 
38
  "rope_theta": 500000.0,
39
  "tie_word_embeddings": false,
40
  "torch_dtype": "bfloat16",
41
+ "transformers_version": "4.42.3",
42
  "use_cache": false,
43
  "vocab_size": 128256
44
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e2c381254771883cfaa51f927e708607d8adf333b7fb2a263b66e63152dd7941
3
- size 7288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:137d77cebbd6fdab4a2e8710d8cdcfe77215de72c36740d1ddf02839f0476616
3
+ size 7416