lemonilia commited on
Commit
55b4700
1 Parent(s): 2e8cdc5

Upload 4 files

Browse files
.gitattributes CHANGED
@@ -43,3 +43,4 @@ ShoriRP.v068.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
43
  ShoriRP.v070.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
44
  ShoriRP.v072.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
45
  ShoriRP.v075.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
 
 
43
  ShoriRP.v070.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
44
  ShoriRP.v072.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
45
  ShoriRP.v075.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
46
+ ShoriRP.v075d.q6_k.gguf filter=lfs diff=lfs merge=lfs -text
ShoriRP.v075d.q6_k.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7b04a69ec145eacdaacaba9e5cf0d322d51cfdaab5be39319c45829b3b267df
3
+ size 5942065440
adapter_config.json CHANGED
@@ -19,14 +19,15 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "gate_proj",
23
  "down_proj",
24
- "o_proj",
25
- "up_proj",
26
  "q_proj",
 
 
27
  "v_proj",
28
- "k_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
 
31
  "use_rslora": false
32
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "down_proj",
23
+ "gate_proj",
 
24
  "q_proj",
25
+ "k_proj",
26
+ "o_proj",
27
  "v_proj",
28
+ "up_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
+ "use_dora": false,
32
  "use_rslora": false
33
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:00a104f50ece8cab948692a7708ecc9bf220908379bb4df6a6971b60d66640ab
3
  size 209876621
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf1cae9b58bb020649d3026bf90241e56d936b61a45dee99901ae8cfaad6f713
3
  size 209876621
config.json CHANGED
@@ -34,7 +34,7 @@
34
  "sliding_window": null,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
- "transformers_version": "4.38.0.dev0",
38
  "use_cache": false,
39
  "vocab_size": 32000
40
  }
 
34
  "sliding_window": null,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.38.2",
38
  "use_cache": false,
39
  "vocab_size": 32000
40
  }