Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files- llmdolphin.py +25 -18
- llmenv.py +74 -0
llmdolphin.py
CHANGED
|
@@ -219,7 +219,7 @@ def get_raw_prompt(msg: str):
|
|
| 219 |
|
| 220 |
|
| 221 |
@torch.inference_mode()
|
| 222 |
-
@spaces.GPU(duration=
|
| 223 |
def dolphin_respond(
|
| 224 |
message: str,
|
| 225 |
history: list[tuple[str, str]],
|
|
@@ -234,13 +234,15 @@ def dolphin_respond(
|
|
| 234 |
progress=gr.Progress(track_tqdm=True),
|
| 235 |
):
|
| 236 |
try:
|
|
|
|
|
|
|
| 237 |
progress(0, desc="Processing...")
|
| 238 |
override_llm_format = get_state(state, "override_llm_format")
|
| 239 |
if override_llm_format: chat_template = override_llm_format
|
| 240 |
else: chat_template = llm_models[model][1]
|
| 241 |
|
| 242 |
llm = Llama(
|
| 243 |
-
model_path=str(
|
| 244 |
flash_attn=True,
|
| 245 |
n_gpu_layers=81, # 81
|
| 246 |
n_batch=1024,
|
|
@@ -294,8 +296,8 @@ def dolphin_respond(
|
|
| 294 |
yield [(outputs, None)]
|
| 295 |
except Exception as e:
|
| 296 |
print(e)
|
| 297 |
-
gr.
|
| 298 |
-
yield [("", None)]
|
| 299 |
|
| 300 |
|
| 301 |
def dolphin_parse(
|
|
@@ -320,7 +322,7 @@ def dolphin_parse(
|
|
| 320 |
|
| 321 |
|
| 322 |
@torch.inference_mode()
|
| 323 |
-
@spaces.GPU(duration=
|
| 324 |
def dolphin_respond_auto(
|
| 325 |
message: str,
|
| 326 |
history: list[tuple[str, str]],
|
|
@@ -335,6 +337,7 @@ def dolphin_respond_auto(
|
|
| 335 |
progress=gr.Progress(track_tqdm=True),
|
| 336 |
):
|
| 337 |
try:
|
|
|
|
| 338 |
#if not is_japanese(message): return [(None, None)]
|
| 339 |
progress(0, desc="Processing...")
|
| 340 |
|
|
@@ -343,7 +346,7 @@ def dolphin_respond_auto(
|
|
| 343 |
else: chat_template = llm_models[model][1]
|
| 344 |
|
| 345 |
llm = Llama(
|
| 346 |
-
model_path=str(
|
| 347 |
flash_attn=True,
|
| 348 |
n_gpu_layers=81, # 81
|
| 349 |
n_batch=1024,
|
|
@@ -429,26 +432,29 @@ cv2.setNumThreads(1)
|
|
| 429 |
|
| 430 |
|
| 431 |
@torch.inference_mode()
|
| 432 |
-
@spaces.GPU()
|
| 433 |
def respond_playground(
|
| 434 |
-
message,
|
| 435 |
history: list[tuple[str, str]],
|
| 436 |
-
model,
|
| 437 |
-
system_message,
|
| 438 |
-
max_tokens,
|
| 439 |
-
temperature,
|
| 440 |
-
top_p,
|
| 441 |
-
top_k,
|
| 442 |
-
repeat_penalty,
|
| 443 |
-
state,
|
|
|
|
| 444 |
):
|
| 445 |
try:
|
|
|
|
|
|
|
| 446 |
override_llm_format = get_state(state, "override_llm_format")
|
| 447 |
if override_llm_format: chat_template = override_llm_format
|
| 448 |
else: chat_template = llm_models[model][1]
|
| 449 |
|
| 450 |
llm = Llama(
|
| 451 |
-
model_path=str(
|
| 452 |
flash_attn=True,
|
| 453 |
n_gpu_layers=81, # 81
|
| 454 |
n_batch=1024,
|
|
@@ -496,4 +502,5 @@ def respond_playground(
|
|
| 496 |
yield outputs
|
| 497 |
except Exception as e:
|
| 498 |
print(e)
|
| 499 |
-
|
|
|
|
|
|
| 219 |
|
| 220 |
|
| 221 |
@torch.inference_mode()
|
| 222 |
+
@spaces.GPU(duration=59)
|
| 223 |
def dolphin_respond(
|
| 224 |
message: str,
|
| 225 |
history: list[tuple[str, str]],
|
|
|
|
| 234 |
progress=gr.Progress(track_tqdm=True),
|
| 235 |
):
|
| 236 |
try:
|
| 237 |
+
model_path = Path(f"{llm_models_dir}/{model}")
|
| 238 |
+
if not model_path.exists(): raise gr.Error(f"Model file not found: {str(model_path)}")
|
| 239 |
progress(0, desc="Processing...")
|
| 240 |
override_llm_format = get_state(state, "override_llm_format")
|
| 241 |
if override_llm_format: chat_template = override_llm_format
|
| 242 |
else: chat_template = llm_models[model][1]
|
| 243 |
|
| 244 |
llm = Llama(
|
| 245 |
+
model_path=str(model_path),
|
| 246 |
flash_attn=True,
|
| 247 |
n_gpu_layers=81, # 81
|
| 248 |
n_batch=1024,
|
|
|
|
| 296 |
yield [(outputs, None)]
|
| 297 |
except Exception as e:
|
| 298 |
print(e)
|
| 299 |
+
raise gr.Error(f"Error: {e}")
|
| 300 |
+
#yield [("", None)]
|
| 301 |
|
| 302 |
|
| 303 |
def dolphin_parse(
|
|
|
|
| 322 |
|
| 323 |
|
| 324 |
@torch.inference_mode()
|
| 325 |
+
@spaces.GPU(duration=59)
|
| 326 |
def dolphin_respond_auto(
|
| 327 |
message: str,
|
| 328 |
history: list[tuple[str, str]],
|
|
|
|
| 337 |
progress=gr.Progress(track_tqdm=True),
|
| 338 |
):
|
| 339 |
try:
|
| 340 |
+
model_path = Path(f"{llm_models_dir}/{model}")
|
| 341 |
#if not is_japanese(message): return [(None, None)]
|
| 342 |
progress(0, desc="Processing...")
|
| 343 |
|
|
|
|
| 346 |
else: chat_template = llm_models[model][1]
|
| 347 |
|
| 348 |
llm = Llama(
|
| 349 |
+
model_path=str(model_path),
|
| 350 |
flash_attn=True,
|
| 351 |
n_gpu_layers=81, # 81
|
| 352 |
n_batch=1024,
|
|
|
|
| 432 |
|
| 433 |
|
| 434 |
@torch.inference_mode()
|
| 435 |
+
@spaces.GPU(duration=59)
|
| 436 |
def respond_playground(
|
| 437 |
+
message: str,
|
| 438 |
history: list[tuple[str, str]],
|
| 439 |
+
model: str = default_llm_model_filename,
|
| 440 |
+
system_message: str = get_dolphin_sysprompt(),
|
| 441 |
+
max_tokens: int = 1024,
|
| 442 |
+
temperature: float = 0.7,
|
| 443 |
+
top_p: float = 0.95,
|
| 444 |
+
top_k: int = 40,
|
| 445 |
+
repeat_penalty: float = 1.1,
|
| 446 |
+
state: dict = {},
|
| 447 |
+
progress=gr.Progress(track_tqdm=True),
|
| 448 |
):
|
| 449 |
try:
|
| 450 |
+
model_path = Path(f"{llm_models_dir}/{model}")
|
| 451 |
+
if not model_path.exists(): raise gr.Error(f"Model file not found: {str(model_path)}")
|
| 452 |
override_llm_format = get_state(state, "override_llm_format")
|
| 453 |
if override_llm_format: chat_template = override_llm_format
|
| 454 |
else: chat_template = llm_models[model][1]
|
| 455 |
|
| 456 |
llm = Llama(
|
| 457 |
+
model_path=str(model_path),
|
| 458 |
flash_attn=True,
|
| 459 |
n_gpu_layers=81, # 81
|
| 460 |
n_batch=1024,
|
|
|
|
| 502 |
yield outputs
|
| 503 |
except Exception as e:
|
| 504 |
print(e)
|
| 505 |
+
raise gr.Error(f"Error: {e}")
|
| 506 |
+
#yield ""
|
llmenv.py
CHANGED
|
@@ -67,6 +67,7 @@ llm_models = {
|
|
| 67 |
"MISCHIEVOUS-12B-Mix_0.1v.Q4_K_M.gguf": ["mradermacher/MISCHIEVOUS-12B-Mix_0.1v-GGUF", MessagesFormatterType.MISTRAL],
|
| 68 |
"Canidori-12B-v1.i1-Q4_K_M.gguf": ["mradermacher/Canidori-12B-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 69 |
"MT2-Gen4-MM-gemma-2-Rv0.4MTM-9B.Q4_K_M.gguf": ["mradermacher/MT2-Gen4-MM-gemma-2-Rv0.4MTM-9B-GGUF", MessagesFormatterType.ALPACA],
|
|
|
|
| 70 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 71 |
"ChatWaifu_Magnum_V0.2.Q4_K_M.gguf": ["mradermacher/ChatWaifu_Magnum_V0.2-GGUF", MessagesFormatterType.MISTRAL],
|
| 72 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
|
@@ -79,6 +80,79 @@ llm_models = {
|
|
| 79 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
| 80 |
#"": ["", MessagesFormatterType.CHATML],
|
| 81 |
#"": ["", MessagesFormatterType.PHI_3],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
"Mistral-7B-Instruct-v0.2-Neural-Story.Q5_K_M.gguf": ["mradermacher/Mistral-7B-Instruct-v0.2-Neural-Story-GGUF", MessagesFormatterType.MISTRAL],
|
| 83 |
"Layris_9B.Q4_K_M.gguf": ["mradermacher/Layris_9B-GGUF", MessagesFormatterType.MISTRAL],
|
| 84 |
"Eris_Remix_7B.Q5_K_M.gguf": ["mradermacher/Eris_Remix_7B-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
| 67 |
"MISCHIEVOUS-12B-Mix_0.1v.Q4_K_M.gguf": ["mradermacher/MISCHIEVOUS-12B-Mix_0.1v-GGUF", MessagesFormatterType.MISTRAL],
|
| 68 |
"Canidori-12B-v1.i1-Q4_K_M.gguf": ["mradermacher/Canidori-12B-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 69 |
"MT2-Gen4-MM-gemma-2-Rv0.4MTM-9B.Q4_K_M.gguf": ["mradermacher/MT2-Gen4-MM-gemma-2-Rv0.4MTM-9B-GGUF", MessagesFormatterType.ALPACA],
|
| 70 |
+
"Flammen-Trismegistus-7B.i1-Q5_K_M.gguf": ["mradermacher/Flammen-Trismegistus-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 71 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 72 |
"ChatWaifu_Magnum_V0.2.Q4_K_M.gguf": ["mradermacher/ChatWaifu_Magnum_V0.2-GGUF", MessagesFormatterType.MISTRAL],
|
| 73 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
| 80 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
| 81 |
#"": ["", MessagesFormatterType.CHATML],
|
| 82 |
#"": ["", MessagesFormatterType.PHI_3],
|
| 83 |
+
"WONMSeverusDevilv2-TIES.i1-Q5_K_M.gguf": ["mradermacher/WONMSeverusDevilv2-TIES-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 84 |
+
"VirtuosoSmall-InstructModelStock.i1-Q4_K_M.gguf": ["mradermacher/VirtuosoSmall-InstructModelStock-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 85 |
+
"VICIOUS_MESH-12B-GAMMA.Q4_K_M.gguf": ["mradermacher/VICIOUS_MESH-12B-GAMMA-GGUF", MessagesFormatterType.MISTRAL],
|
| 86 |
+
"VICIOUS_MESH-12B-BETA.Q4_K_M.gguf": ["mradermacher/VICIOUS_MESH-12B-BETA-GGUF", MessagesFormatterType.MISTRAL],
|
| 87 |
+
"TQ2.5-14B-Aletheia-v1.i1-Q4_K_M.gguf": ["mradermacher/TQ2.5-14B-Aletheia-v1-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 88 |
+
"Tiger-7B-v0.1.i1-Q5_K_M.gguf": ["mradermacher/Tiger-7B-v0.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 89 |
+
"TaxoLlama3.1-8b-instruct.i1-Q5_K_M.gguf": ["mradermacher/TaxoLlama3.1-8b-instruct-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 90 |
+
"RandomMergeNoNormWEIGHTED-7B-MODELSTOCK.i1-Q5_K_M.gguf": ["mradermacher/RandomMergeNoNormWEIGHTED-7B-MODELSTOCK-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 91 |
+
"RandomMergeNoNormWEIGHTED-7B-DARETIES.i1-Q5_K_M.gguf": ["mradermacher/RandomMergeNoNormWEIGHTED-7B-DARETIES-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 92 |
+
"RandomMergeNoNorm-7B-DARETIES.i1-Q5_K_M.gguf": ["mradermacher/RandomMergeNoNorm-7B-DARETIES-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 93 |
+
"R_Marco_polo.Q5_K_M.gguf": ["mradermacher/R_Marco_polo-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 94 |
+
"Qwenvergence-14B-v3-Reason.i1-Q4_K_M.gguf": ["mradermacher/Qwenvergence-14B-v3-Reason-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 95 |
+
"Qwenvergence-14B-v3-Prose.i1-Q4_K_M.gguf": ["mradermacher/Qwenvergence-14B-v3-Prose-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 96 |
+
"Qwenvergence-14B-v3.i1-Q4_K_M.gguf": ["mradermacher/Qwenvergence-14B-v3-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 97 |
+
"Qwen2.5-14B-SLERPv7.i1-Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B-SLERPv7-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 98 |
+
"Qwen2.5-14B-Instruct-abliterated-v2.i1-Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B-Instruct-abliterated-v2-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 99 |
+
"Prima-LelantaclesV5-7b.i1-Q5_K_M.gguf": ["mradermacher/Prima-LelantaclesV5-7b-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 100 |
+
"q2.5-veltha-14b-q5_k_m.gguf": ["djuna/Q2.5-Veltha-14B-Q5_K_M-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 101 |
+
"OgnoExperiment27-7B.i1-Q5_K_M.gguf": ["mradermacher/OgnoExperiment27-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 102 |
+
"Neurotic-Jomainotrik-7b-slerp.i1-Q5_K_M.gguf": ["mradermacher/Neurotic-Jomainotrik-7b-slerp-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 103 |
+
"NeuralPearlBeagle.Q5_K_M.gguf": ["mradermacher/NeuralPearlBeagle-GGUF", MessagesFormatterType.MISTRAL],
|
| 104 |
+
"Mistral-7B-Instruct-Ukrainian.i1-Q5_K_M.gguf": ["mradermacher/Mistral-7B-Instruct-Ukrainian-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 105 |
+
"Maverick-7B.i1-Q5_K_M.gguf": ["mradermacher/Maverick-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 106 |
+
"Hermes3-L3.1-DirtyHarry-8B.Q5_K_M.gguf": ["mradermacher/Hermes3-L3.1-DirtyHarry-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
| 107 |
+
"Hermes3-L3.1-DarkPlanetSF-8B.i1-Q5_K_M.gguf": ["mradermacher/Hermes3-L3.1-DarkPlanetSF-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 108 |
+
"Hermes3-L3.1-BigTalker-8B.i1-Q5_K_M.gguf": ["mradermacher/Hermes3-L3.1-BigTalker-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 109 |
+
"Casuar-9B-Model_Stock.i1-Q4_K_M.gguf": ["mradermacher/Casuar-9B-Model_Stock-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 110 |
+
"ZEUS-8B-V9.i1-Q5_K_M.gguf": ["mradermacher/ZEUS-8B-V9-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 111 |
+
"vicious_mesh-12b-delta-q4_k_m.gguf": ["bamec66557/VICIOUS_MESH-12B-DELTA-Q4_K_M-GGUF", MessagesFormatterType.MISTRAL],
|
| 112 |
+
"Neural-Logical-Abstract-7B-slerp.Q5_K_M.gguf": ["mradermacher/Neural-Logical-Abstract-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 113 |
+
"NeuralDareBeagle-7B-slerp.Q5_K_M.gguf": ["mradermacher/NeuralDareBeagle-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 114 |
+
"Neural_Waifu_7b_V0.1.Q5_K_M.gguf": ["mradermacher/Neural_Waifu_7b_V0.1-GGUF", MessagesFormatterType.MISTRAL],
|
| 115 |
+
"Mistral-7B-Instruct_v0.2_UNA-TheBeagle-7b-v1.i1-Q5_K_M.gguf": ["mradermacher/Mistral-7B-Instruct_v0.2_UNA-TheBeagle-7b-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 116 |
+
"MISCHIEVOUS-12B-Mix_Neo.i1-Q4_K_M.gguf": ["mradermacher/MISCHIEVOUS-12B-Mix_Neo-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 117 |
+
"MiaLatte-Indo-Mistral-7b.Q5_K_M.gguf": ["mradermacher/MiaLatte-Indo-Mistral-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 118 |
+
"MFANN-SFT-2.i1-Q5_K_M.gguf": ["mradermacher/MFANN-SFT-2-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 119 |
+
"maestrale-chat-v0.3-beta-sft.i1-Q5_K_M.gguf": ["mradermacher/maestrale-chat-v0.3-beta-sft-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 120 |
+
"Llama-3-EZO-8b-Common-it.i1-Q5_K_M.gguf": ["mradermacher/Llama-3-EZO-8b-Common-it-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 121 |
+
"Legal-Saul-Multiverse-7b.Q5_K_M.gguf": ["mradermacher/Legal-Saul-Multiverse-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 122 |
+
"kuno-royale-v3-7b.Q5_K_M.gguf": ["mradermacher/kuno-royale-v3-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 123 |
+
"Kuno-lake-slerp-7b.Q5_K_M.gguf": ["mradermacher/Kuno-lake-slerp-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 124 |
+
"KRONOS-8B-V6.i1-Q5_K_M.gguf": ["mradermacher/KRONOS-8B-V6-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 125 |
+
"Homer-v1.0-Qwen2.5-7B.i1-Q5_K_M.gguf": ["mradermacher/Homer-v1.0-Qwen2.5-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 126 |
+
"FuseCyberMix-Qwen-2.5-7B-Instruct.i1-Q5_K_M.gguf": ["mradermacher/FuseCyberMix-Qwen-2.5-7B-Instruct-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 127 |
+
"flammen7-mistral-7B.Q5_K_M.gguf": ["mradermacher/flammen7-mistral-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 128 |
+
"flammen6-mistral-7B.Q5_K_M.gguf": ["mradermacher/flammen6-mistral-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 129 |
+
"flammen3X-mistral-7B.i1-Q5_K_M.gguf": ["mradermacher/flammen3X-mistral-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 130 |
+
"Eris-Floramix-7b.i1-Q5_K_M.gguf": ["mradermacher/Eris-Floramix-7b-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 131 |
+
"Eris-Daturamix-7b.i1-Q5_K_M.gguf": ["mradermacher/Eris-Daturamix-7b-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 132 |
+
"Eris_Remix_DPO_7B.i1-Q5_K_M.gguf": ["mradermacher/Eris_Remix_DPO_7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 133 |
+
"dolphin-2.6-mistral-7b-dpo-chat.Q5_K_M.gguf": ["mradermacher/dolphin-2.6-mistral-7b-dpo-chat-GGUF", MessagesFormatterType.MISTRAL],
|
| 134 |
+
"BuRP_7B.i1-Q5_K_M.gguf": ["mradermacher/BuRP_7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 135 |
+
"Blurred-Beagle-7b-slerp.Q5_K_M.gguf": ["mradermacher/Blurred-Beagle-7b-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 136 |
+
"Blur-7B-slerp-v0.1.i1-Q5_K_M.gguf": ["mradermacher/Blur-7B-slerp-v0.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 137 |
+
"Biomistral-Clown-Slerp.Q5_K_M.gguf": ["mradermacher/Biomistral-Clown-Slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 138 |
+
"Deris-SSS-7B.Q5_K_M.gguf": ["mradermacher/Deris-SSS-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 139 |
+
"Dark-Waifu-7b.Q5_K_M.gguf": ["mradermacher/Dark-Waifu-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 140 |
+
"Brezn3.Q5_K_M.gguf": ["mradermacher/Brezn3-GGUF", MessagesFormatterType.MISTRAL],
|
| 141 |
+
"BreezeDolphin-SLERP-0.1.Q5_K_M.gguf": ["mradermacher/BreezeDolphin-SLERP-0.1-GGUF", MessagesFormatterType.MISTRAL],
|
| 142 |
+
"Blitz-v0.1.i1-Q5_K_M.gguf": ["mradermacher/Blitz-v0.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 143 |
+
"Asymmetric_Linearity-8B-Model_Stock.Q5_K_M.gguf": ["mradermacher/Asymmetric_Linearity-8B-Model_Stock-GGUF", MessagesFormatterType.LLAMA_3],
|
| 144 |
+
"KRONOS-8B-V5.i1-Q5_K_M.gguf": ["mradermacher/KRONOS-8B-V5-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 145 |
+
"14b-Qwen2.5-Infermatic-Crea-v1.i1-Q4_K_M.gguf": ["mradermacher/14b-Qwen2.5-Infermatic-Crea-v1-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 146 |
+
"TQ2.5-14B-Neon-v1-Q4_K_M.gguf": ["bartowski/TQ2.5-14B-Neon-v1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 147 |
+
"TQ2.5-14B-Aletheia-v1-Q4_K_M.gguf": ["bartowski/TQ2.5-14B-Aletheia-v1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 148 |
+
"SynthIQ-7b.i1-Q5_K_M.gguf": ["mradermacher/SynthIQ-7b-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 149 |
+
"MISCHIEVOUS-12B-Mix_III_IV_V.i1-Q4_K_M.gguf": ["mradermacher/MISCHIEVOUS-12B-Mix_III_IV_V-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 150 |
+
"KRONOS-8B-V4.i1-Q5_K_M.gguf": ["mradermacher/KRONOS-8B-V4-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 151 |
+
"fratricide-12B-Unslop-Mell-DARKNESS.i1-Q4_K_M.gguf": ["mradermacher/fratricide-12B-Unslop-Mell-DARKNESS-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 152 |
+
"AngelSlayer-12B-Unslop-Mell-RPMax-DARKNESS-v2.i1-Q4_K_M.gguf": ["mradermacher/AngelSlayer-12B-Unslop-Mell-RPMax-DARKNESS-v2-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 153 |
+
"Llama-3-8B-ArliAI-Formax-v1.0.i1-Q5_K_M.gguf": ["mradermacher/Llama-3-8B-ArliAI-Formax-v1.0-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 154 |
+
"MonarchLake-7B.i1-Q5_K_M.gguf": ["mradermacher/MonarchLake-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 155 |
+
"Evangelion-7B.i1-Q5_K_M.gguf": ["mradermacher/Evangelion-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 156 |
"Mistral-7B-Instruct-v0.2-Neural-Story.Q5_K_M.gguf": ["mradermacher/Mistral-7B-Instruct-v0.2-Neural-Story-GGUF", MessagesFormatterType.MISTRAL],
|
| 157 |
"Layris_9B.Q4_K_M.gguf": ["mradermacher/Layris_9B-GGUF", MessagesFormatterType.MISTRAL],
|
| 158 |
"Eris_Remix_7B.Q5_K_M.gguf": ["mradermacher/Eris_Remix_7B-GGUF", MessagesFormatterType.MISTRAL],
|