MHGanainy commited on
Commit
0d732e5
·
verified ·
1 Parent(s): cfbede0

Upload model

Browse files
layers_0_43/adapter_config.json CHANGED
@@ -74,8 +74,8 @@
74
  "rank_pattern": {},
75
  "revision": null,
76
  "target_modules": [
77
- "c_proj",
78
- "c_attn"
79
  ],
80
  "task_type": "CAUSAL_LM",
81
  "use_dora": false,
 
74
  "rank_pattern": {},
75
  "revision": null,
76
  "target_modules": [
77
+ "c_attn",
78
+ "c_proj"
79
  ],
80
  "task_type": "CAUSAL_LM",
81
  "use_dora": false,
layers_0_43/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a65b4921dab645c0ed96ad8a7be4a3d63f841aa4653e361cf095686568b2eac3
3
  size 198281632
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54ddf6610bfb3e5134bb7758a8f8716e71f19299d17c294ab32e0a6bc0b347ee
3
  size 198281632
layers_44/adapter_config.json CHANGED
@@ -33,8 +33,8 @@
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
36
- "c_proj",
37
- "c_attn"
38
  ],
39
  "task_type": "CAUSAL_LM",
40
  "use_dora": false,
 
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
36
+ "c_attn",
37
+ "c_proj"
38
  ],
39
  "task_type": "CAUSAL_LM",
40
  "use_dora": false,
layers_44/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7cfa26e2d24e6f7e43c4fecff8253b223746784a2a9f8c4d9e7ba079d658a30
3
  size 4507200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:522e95380cfbe2ea79daeba6a332be26d59eac7c27908bdf04aa8d2b8dd2c22e
3
  size 4507200
layers_45/adapter_config.json CHANGED
@@ -37,8 +37,8 @@
37
  "rank_pattern": {},
38
  "revision": null,
39
  "target_modules": [
40
- "c_proj",
41
- "c_attn"
42
  ],
43
  "task_type": "CAUSAL_LM",
44
  "use_dora": false,
 
37
  "rank_pattern": {},
38
  "revision": null,
39
  "target_modules": [
40
+ "c_attn",
41
+ "c_proj"
42
  ],
43
  "task_type": "CAUSAL_LM",
44
  "use_dora": false,
layers_45/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ab33a927488423bb9134ab943897d61fbe0b12eeddbb2e88676cd19b0ced1d0
3
  size 4508744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6145e94434e33a4357e5b3e6beb04482ba9b9acab63cbcc5a4132d957ae8e86e
3
  size 4508744
layers_46_47/adapter_config.json CHANGED
@@ -40,8 +40,8 @@
40
  "rank_pattern": {},
41
  "revision": null,
42
  "target_modules": [
43
- "c_proj",
44
- "c_attn"
45
  ],
46
  "task_type": "CAUSAL_LM",
47
  "use_dora": false,
 
40
  "rank_pattern": {},
41
  "revision": null,
42
  "target_modules": [
43
+ "c_attn",
44
+ "c_proj"
45
  ],
46
  "task_type": "CAUSAL_LM",
47
  "use_dora": false,
layers_46_47/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:290affd142724d743a38d304f3bbc0efd7e09d8031017405e1bac8c5476978eb
3
  size 9019024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a52b34084d783488b9ea6a057d4fedef377b7531707d2b80b5774ecc17280326
3
  size 9019024