Upload model
Browse files- layers_0_43/adapter_config.json +2 -2
- layers_0_43/adapter_model.safetensors +1 -1
- layers_44/adapter_config.json +2 -2
- layers_44/adapter_model.safetensors +1 -1
- layers_45/adapter_config.json +2 -2
- layers_45/adapter_model.safetensors +1 -1
- layers_46_47/adapter_config.json +2 -2
- layers_46_47/adapter_model.safetensors +1 -1
layers_0_43/adapter_config.json
CHANGED
@@ -74,8 +74,8 @@
|
|
74 |
"rank_pattern": {},
|
75 |
"revision": null,
|
76 |
"target_modules": [
|
77 |
-
"
|
78 |
-
"
|
79 |
],
|
80 |
"task_type": "CAUSAL_LM",
|
81 |
"use_dora": false,
|
|
|
74 |
"rank_pattern": {},
|
75 |
"revision": null,
|
76 |
"target_modules": [
|
77 |
+
"c_attn",
|
78 |
+
"c_proj"
|
79 |
],
|
80 |
"task_type": "CAUSAL_LM",
|
81 |
"use_dora": false,
|
layers_0_43/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 198281632
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:54ddf6610bfb3e5134bb7758a8f8716e71f19299d17c294ab32e0a6bc0b347ee
|
3 |
size 198281632
|
layers_44/adapter_config.json
CHANGED
@@ -33,8 +33,8 @@
|
|
33 |
"rank_pattern": {},
|
34 |
"revision": null,
|
35 |
"target_modules": [
|
36 |
-
"
|
37 |
-
"
|
38 |
],
|
39 |
"task_type": "CAUSAL_LM",
|
40 |
"use_dora": false,
|
|
|
33 |
"rank_pattern": {},
|
34 |
"revision": null,
|
35 |
"target_modules": [
|
36 |
+
"c_attn",
|
37 |
+
"c_proj"
|
38 |
],
|
39 |
"task_type": "CAUSAL_LM",
|
40 |
"use_dora": false,
|
layers_44/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4507200
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:522e95380cfbe2ea79daeba6a332be26d59eac7c27908bdf04aa8d2b8dd2c22e
|
3 |
size 4507200
|
layers_45/adapter_config.json
CHANGED
@@ -37,8 +37,8 @@
|
|
37 |
"rank_pattern": {},
|
38 |
"revision": null,
|
39 |
"target_modules": [
|
40 |
-
"
|
41 |
-
"
|
42 |
],
|
43 |
"task_type": "CAUSAL_LM",
|
44 |
"use_dora": false,
|
|
|
37 |
"rank_pattern": {},
|
38 |
"revision": null,
|
39 |
"target_modules": [
|
40 |
+
"c_attn",
|
41 |
+
"c_proj"
|
42 |
],
|
43 |
"task_type": "CAUSAL_LM",
|
44 |
"use_dora": false,
|
layers_45/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4508744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6145e94434e33a4357e5b3e6beb04482ba9b9acab63cbcc5a4132d957ae8e86e
|
3 |
size 4508744
|
layers_46_47/adapter_config.json
CHANGED
@@ -40,8 +40,8 @@
|
|
40 |
"rank_pattern": {},
|
41 |
"revision": null,
|
42 |
"target_modules": [
|
43 |
-
"
|
44 |
-
"
|
45 |
],
|
46 |
"task_type": "CAUSAL_LM",
|
47 |
"use_dora": false,
|
|
|
40 |
"rank_pattern": {},
|
41 |
"revision": null,
|
42 |
"target_modules": [
|
43 |
+
"c_attn",
|
44 |
+
"c_proj"
|
45 |
],
|
46 |
"task_type": "CAUSAL_LM",
|
47 |
"use_dora": false,
|
layers_46_47/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9019024
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a52b34084d783488b9ea6a057d4fedef377b7531707d2b80b5774ecc17280326
|
3 |
size 9019024
|