paulo037 commited on
Commit
fe53674
1 Parent(s): d79ea4d

Training in progress, step 9000

Browse files
adapter_config.json CHANGED
@@ -22,13 +22,13 @@
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
 
25
  "down_proj",
26
- "q_proj",
 
27
  "k_proj",
28
  "gate_proj",
29
- "o_proj",
30
- "up_proj",
31
- "v_proj"
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
 
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
25
+ "up_proj",
26
  "down_proj",
27
+ "o_proj",
28
+ "v_proj",
29
  "k_proj",
30
  "gate_proj",
31
+ "q_proj"
 
 
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2be9c9c5838cb9c1923614236f9cde396586e2523751d9d7fac5bfa2c290fccf
3
  size 2460946960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6d2e9efbcba2d0070d50a726b5421c74f777257623b333867185d54de251a6
3
  size 2460946960
tokenizer.json CHANGED
@@ -499,7 +499,6 @@
499
  "end_of_word_suffix": null,
500
  "fuse_unk": false,
501
  "byte_fallback": false,
502
- "ignore_merges": false,
503
  "vocab": {
504
  "<|endoftext|>": 0,
505
  "<|padding|>": 1,
 
499
  "end_of_word_suffix": null,
500
  "fuse_unk": false,
501
  "byte_fallback": false,
 
502
  "vocab": {
503
  "<|endoftext|>": 0,
504
  "<|padding|>": 1,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3aaf48c3d05ac82612cb05ed0786d59b75a54fe31f553fe38b8cc54f2347e897
3
- size 5048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90541c3584b21931f20b7d44745efd12253bc6c59be244f5e3a02e61ee3dd381
3
+ size 4984