Upload LlamaForCausalLM
Browse files- adapter_config.json +2 -2
- adapter_model.safetensors +1 -1
- generation_config.json +1 -1
adapter_config.json
CHANGED
@@ -21,8 +21,8 @@
|
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
"k_proj",
|
24 |
-
"
|
25 |
-
"
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM",
|
28 |
"use_dora": false,
|
|
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
"k_proj",
|
24 |
+
"q_proj",
|
25 |
+
"v_proj"
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM",
|
28 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 75523312
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51db8ac20b4f3d3a08a3fe3c027eed16eef4c9414cf6beeb6a53563ebd9ef88f
|
3 |
size 75523312
|
generation_config.json
CHANGED
@@ -8,5 +8,5 @@
|
|
8 |
"max_length": 4096,
|
9 |
"temperature": 0.6,
|
10 |
"top_p": 0.9,
|
11 |
-
"transformers_version": "4.
|
12 |
}
|
|
|
8 |
"max_length": 4096,
|
9 |
"temperature": 0.6,
|
10 |
"top_p": 0.9,
|
11 |
+
"transformers_version": "4.41.0"
|
12 |
}
|