kanye commited on
Commit
ad5cb7c
1 Parent(s): c392b93

Resolved merge conflicts

Browse files
AWQ/config.json DELETED
@@ -1,25 +0,0 @@
1
- {
2
- "_name_or_path": "/home/anon/AI-Models/LLM/AshhLimaRP-Mistral-7B/",
3
- "architectures": [
4
- "MistralForCausalLM"
5
- ],
6
- "bos_token_id": 1,
7
- "eos_token_id": 2,
8
- "hidden_act": "silu",
9
- "hidden_size": 4096,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 14336,
12
- "max_position_embeddings": 32768,
13
- "model_type": "mistral",
14
- "num_attention_heads": 32,
15
- "num_hidden_layers": 32,
16
- "num_key_value_heads": 8,
17
- "rms_norm_eps": 1e-05,
18
- "rope_theta": 10000.0,
19
- "sliding_window": 4096,
20
- "tie_word_embeddings": false,
21
- "torch_dtype": "float16",
22
- "transformers_version": "4.34.1",
23
- "use_cache": true,
24
- "vocab_size": 32000
25
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
AWQ/generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
5
- "transformers_version": "4.34.1"
6
- }
 
 
 
 
 
 
 
AWQ/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:251bc4a6827d76cf70be0b653370d3c2026908b4d3cbb8cb0d577677570eacc2
3
- size 4151034841
 
 
 
 
AWQ/quant_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "zero_point": true,
3
- "q_group_size": 128,
4
- "w_bit": 4,
5
- "version": "GEMM"
6
- }
 
 
 
 
 
 
 
AWQ/special_tokens_map.json DELETED
@@ -1,28 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<unk>",
4
- "<s>",
5
- "</s>"
6
- ],
7
- "bos_token": {
8
- "content": "<s>",
9
- "lstrip": false,
10
- "normalized": false,
11
- "rstrip": false,
12
- "single_word": false
13
- },
14
- "eos_token": {
15
- "content": "</s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false
20
- },
21
- "unk_token": {
22
- "content": "<unk>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false
27
- }
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
AWQ/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
AWQ/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
- size 493443
 
 
 
 
AWQ/tokenizer_config.json DELETED
@@ -1,44 +0,0 @@
1
- {
2
- "added_tokens_decoder": {
3
- "0": {
4
- "content": "<unk>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "1": {
12
- "content": "<s>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "2": {
20
- "content": "</s>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- }
27
- },
28
- "additional_special_tokens": [
29
- "<unk>",
30
- "<s>",
31
- "</s>"
32
- ],
33
- "bos_token": "<s>",
34
- "clean_up_tokenization_spaces": false,
35
- "eos_token": "</s>",
36
- "legacy": true,
37
- "model_max_length": 1000000000000000019884624838656,
38
- "pad_token": null,
39
- "sp_model_kwargs": {},
40
- "spaces_between_special_tokens": false,
41
- "tokenizer_class": "LlamaTokenizer",
42
- "unk_token": "<unk>",
43
- "use_default_system_prompt": true
44
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
AshhLimaRP-Mistral-7B.Q4_K_M.gguf DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b67868935b2399a0a6433f8b4fd417a62274f01188c6495bc4bdcc6b894148fc
3
- size 4368438912
 
 
 
 
AshhLimaRP-Mistral-7B.Q6_K.gguf DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0074d49d059b590eb11f624abf956c309091ce66f5295461187cb0116f9f4a8c
3
- size 5942064768
 
 
 
 
SillyTavern Presets/LimaRP-Alpaca_Context-Template.json DELETED
@@ -1,11 +0,0 @@
1
- {
2
- "story_string": "{{system}}{{char}}'s Persona: {{description}}\n\n{{#if persona}}{{user}}'s Persona: {{persona}}\n\n{{/if}}\n{{#if scenario}}Scenario: {{scenario}}\n\n{{/if}}\n{{#if loreBefore}}Background information: {{loreBefore}}\n\n{{/if}}\nPlay the role of {{char}}. With scene and characters now described, you must engage in a roleplay conversation with {{user}} below this line. Never write for {{user}} in your responses.",
3
- "example_separator": "",
4
- "chat_start": "",
5
- "always_force_name2": false,
6
- "trim_sentences": false,
7
- "include_newline": false,
8
- "custom_stopping_strings": "",
9
- "custom_stopping_strings_macro": true,
10
- "name": "LimaRP-Alpaca"
11
- }
 
 
 
 
 
 
 
 
 
 
 
 
SillyTavern Presets/LimaRP-Alpaca_Instruct-Mode.json DELETED
@@ -1,18 +0,0 @@
1
- {
2
- "wrap": true,
3
- "names": true,
4
- "system_prompt": "",
5
- "system_sequence": "<<SYSTEM>>",
6
- "stop_sequence": "###",
7
- "input_sequence": "\n### Input:",
8
- "output_sequence": "\n### Response:",
9
- "separator_sequence": "",
10
- "macro": true,
11
- "names_force_groups": false,
12
- "last_output_sequence": "\n### Response: (length = short)",
13
- "activation_regex": "",
14
- "system_sequence_prefix": "\n### Instruction:",
15
- "system_sequence_suffix": "",
16
- "first_output_sequence": "",
17
- "name": "LimaRP-Alpaca"
18
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
adapter_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a8b38d58b554cdf248a4822c2a31a10aac47a4815882c3983424bdba1493421
3
- size 2684516749