jeiku commited on
Commit
a04a3a6
·
verified ·
1 Parent(s): 15cbd93

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -13,12 +13,12 @@ Aura_3B is a merge of the following models using [LazyMergekit](https://colab.re
13
 
14
  ```yaml
15
  models:
16
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/No_Robots_Alpaca_StableLM
17
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/LimaRP_StableLM
18
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/Futa_Erotica_StableLM
19
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/Gnosis_256_StableLM
20
  merge_method: model_stock
21
- base_model: NousResearch/Nous-Capybara-3B-V1.9
22
  dtype: float16
23
  ```
24
 
 
13
 
14
  ```yaml
15
  models:
16
+ - model: jeiku/Kielbasa_3B+jeiku/Everything_v3_128_StableLM
17
+ - model: euclaise/ReMask-3B+jeiku/Futa_Erotica_StableLM
18
+ - model: jeiku/ToxicNoRobotsRosaHermesBoros_3B+jeiku/Gnosis_256_StableLM
19
+ - model: euclaise/Memphis-scribe-3B+jeiku/Humiliation_StableLM
20
  merge_method: model_stock
21
+ base_model: jeiku/Rosa_v1_3B
22
  dtype: float16
23
  ```
24
 
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "NousResearch/Nous-Capybara-3B-V1.9",
3
  "architectures": [
4
  "StableLMEpochForCausalLM"
5
  ],
6
  "auto_map": {
7
- "AutoConfig": "NousResearch/Nous-Capybara-3B-V1.9--configuration_stablelm_epoch.StableLMEpochConfig",
8
- "AutoModelForCausalLM": "NousResearch/Nous-Capybara-3B-V1.9--modeling_stablelm_epoch.StableLMEpochForCausalLM"
9
  },
10
  "bos_token_id": 0,
11
  "eos_token_id": 0,
@@ -25,7 +25,7 @@
25
  "rotary_scaling_factor": 1.0,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float16",
28
- "transformers_version": "4.40.2",
29
- "use_cache": false,
30
  "vocab_size": 50304
31
  }
 
1
  {
2
+ "_name_or_path": "jeiku/Rosa_v1_3B",
3
  "architectures": [
4
  "StableLMEpochForCausalLM"
5
  ],
6
  "auto_map": {
7
+ "AutoConfig": "jeiku/Rosa_v1_3B--configuration_stablelm_epoch.StableLMEpochConfig",
8
+ "AutoModelForCausalLM": "jeiku/Rosa_v1_3B--modeling_stablelm_epoch.StableLMEpochForCausalLM"
9
  },
10
  "bos_token_id": 0,
11
  "eos_token_id": 0,
 
25
  "rotary_scaling_factor": 1.0,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float16",
28
+ "transformers_version": "4.41.0",
29
+ "use_cache": true,
30
  "vocab_size": 50304
31
  }
mergekit_config.yml CHANGED
@@ -1,9 +1,9 @@
1
 
2
  models:
3
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/No_Robots_Alpaca_StableLM
4
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/LimaRP_StableLM
5
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/Futa_Erotica_StableLM
6
- - model: NousResearch/Nous-Capybara-3B-V1.9+jeiku/Gnosis_256_StableLM
7
  merge_method: model_stock
8
- base_model: NousResearch/Nous-Capybara-3B-V1.9
9
  dtype: float16
 
1
 
2
  models:
3
+ - model: jeiku/Kielbasa_3B+jeiku/Everything_v3_128_StableLM
4
+ - model: euclaise/ReMask-3B+jeiku/Futa_Erotica_StableLM
5
+ - model: jeiku/ToxicNoRobotsRosaHermesBoros_3B+jeiku/Gnosis_256_StableLM
6
+ - model: euclaise/Memphis-scribe-3B+jeiku/Humiliation_StableLM
7
  merge_method: model_stock
8
+ base_model: jeiku/Rosa_v1_3B
9
  dtype: float16
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e6100e21c85de0f4521760fe42e375fd2b189af2f8d3f9fef7cd0ef4bed5308
3
  size 4991841312
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab7aa105efb42150e30e19d17f1c5b8bdd4165f3005a78d5c0802b0a6326cbbf
3
  size 4991841312
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84125412a374b94e010f6c4d04484b17c7f960581ec183c4337f4e11e79c505f
3
  size 599080560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa2dbd0a3a2d71ab8a71a4e535fd8e757893613148bdc4f7c4bb62a15d40669d
3
  size 599080560
special_tokens_map.json CHANGED
@@ -7,14 +7,14 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|im_end|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "[PAD]",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|endoftext|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<|endoftext|>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json CHANGED
@@ -227,33 +227,6 @@
227
  "rstrip": false,
228
  "normalized": true,
229
  "special": false
230
- },
231
- {
232
- "id": 50277,
233
- "content": "[PAD]",
234
- "single_word": false,
235
- "lstrip": false,
236
- "rstrip": false,
237
- "normalized": false,
238
- "special": true
239
- },
240
- {
241
- "id": 50278,
242
- "content": "<|im_end|>",
243
- "single_word": false,
244
- "lstrip": false,
245
- "rstrip": false,
246
- "normalized": false,
247
- "special": true
248
- },
249
- {
250
- "id": 50279,
251
- "content": "<|im_start|>",
252
- "single_word": false,
253
- "lstrip": false,
254
- "rstrip": false,
255
- "normalized": false,
256
- "special": false
257
  }
258
  ],
259
  "normalizer": {
 
227
  "rstrip": false,
228
  "normalized": true,
229
  "special": false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  }
231
  ],
232
  "normalizer": {
tokenizer_config.json CHANGED
@@ -202,37 +202,14 @@
202
  "rstrip": false,
203
  "single_word": false,
204
  "special": false
205
- },
206
- "50277": {
207
- "content": "[PAD]",
208
- "lstrip": false,
209
- "normalized": false,
210
- "rstrip": false,
211
- "single_word": false,
212
- "special": true
213
- },
214
- "50278": {
215
- "content": "<|im_end|>",
216
- "lstrip": false,
217
- "normalized": false,
218
- "rstrip": false,
219
- "single_word": false,
220
- "special": true
221
- },
222
- "50279": {
223
- "content": "<|im_start|>",
224
- "lstrip": false,
225
- "normalized": false,
226
- "rstrip": false,
227
- "single_word": false,
228
- "special": false
229
  }
230
  },
231
  "bos_token": "<|endoftext|>",
 
232
  "clean_up_tokenization_spaces": true,
233
- "eos_token": "<|im_end|>",
234
  "model_max_length": 1000000000000000019884624838656,
235
- "pad_token": "[PAD]",
236
  "tokenizer_class": "GPTNeoXTokenizer",
237
  "unk_token": "<|endoftext|>"
238
  }
 
202
  "rstrip": false,
203
  "single_word": false,
204
  "special": false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
  }
206
  },
207
  "bos_token": "<|endoftext|>",
208
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
209
  "clean_up_tokenization_spaces": true,
210
+ "eos_token": "<|endoftext|>",
211
  "model_max_length": 1000000000000000019884624838656,
212
+ "pad_token": "<|endoftext|>",
213
  "tokenizer_class": "GPTNeoXTokenizer",
214
  "unk_token": "<|endoftext|>"
215
  }