Spaces:
Running
on
Zero
Running
on
Zero
Upload 3 files
Browse files- README.md +1 -1
- genimage.py +2 -4
- llmdolphin.py +3 -0
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: ππ»
|
|
4 |
colorFrom: red
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 4.
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
|
|
4 |
colorFrom: red
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 4.39.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
genimage.py
CHANGED
@@ -44,10 +44,8 @@ def generate_image(prompt, neg_prompt):
|
|
44 |
}
|
45 |
try:
|
46 |
images = pipe(
|
47 |
-
prompt=prompt,
|
48 |
-
|
49 |
-
negative_prompt=neg_prompt,
|
50 |
-
negative_prompt_2="lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], photo, deformed, disfigured, low contrast, photo, deformed, disfigured, low contrast",
|
51 |
width=1024,
|
52 |
height=1024,
|
53 |
guidance_scale=7.5,
|
|
|
44 |
}
|
45 |
try:
|
46 |
images = pipe(
|
47 |
+
prompt=prompt + ", masterpiece, best quality, very aesthetic, absurdres",
|
48 |
+
negative_prompt=neg_prompt + ", lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], photo, deformed, disfigured, low contrast, photo, deformed, disfigured, low contrast",
|
|
|
|
|
49 |
width=1024,
|
50 |
height=1024,
|
51 |
guidance_scale=7.5,
|
llmdolphin.py
CHANGED
@@ -16,6 +16,9 @@ llm_models = {
|
|
16 |
"suzume-llama-3-8B-japanese.Q4_K_M.gguf": ["PrunaAI/lightblue-suzume-llama-3-8B-japanese-GGUF-smashed", MessagesFormatterType.LLAMA_3],
|
17 |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q4_K_M.gguf": ["RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3],
|
18 |
"Bungo-L3-8B.Q5_K_M.gguf": ["backyardai/Bungo-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
|
|
|
|
19 |
"L3-8B-Tamamo-v1.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Tamamo-v1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
20 |
"Mistral-Nemo-Instruct-2407-Q4_K_M.gguf": ["bartowski/Mistral-Nemo-Instruct-2407-GGUF", MessagesFormatterType.MISTRAL],
|
21 |
"ghost-8b-beta.q5_k.gguf": ["ZeroWw/ghost-8b-beta-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
16 |
"suzume-llama-3-8B-japanese.Q4_K_M.gguf": ["PrunaAI/lightblue-suzume-llama-3-8B-japanese-GGUF-smashed", MessagesFormatterType.LLAMA_3],
|
17 |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q4_K_M.gguf": ["RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3],
|
18 |
"Bungo-L3-8B.Q5_K_M.gguf": ["backyardai/Bungo-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
19 |
+
"Meta-Llama-3.1-8B-Instruct-abliterated.i1-Q5_K_M.gguf": ["mradermacher/Meta-Llama-3.1-8B-Instruct-abliterated-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
20 |
+
"pstella-16b.Q5_K_M.gguf": ["mradermacher/pstella-16b-GGUF", MessagesFormatterType.LLAMA_3],
|
21 |
+
"DarkIdol-Llama-3.1-8B-Instruct-1.0-Uncensored.i1-Q5_K_M.gguf": ["mradermacher/DarkIdol-Llama-3.1-8B-Instruct-1.0-Uncensored-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
22 |
"L3-8B-Tamamo-v1.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Tamamo-v1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
23 |
"Mistral-Nemo-Instruct-2407-Q4_K_M.gguf": ["bartowski/Mistral-Nemo-Instruct-2407-GGUF", MessagesFormatterType.MISTRAL],
|
24 |
"ghost-8b-beta.q5_k.gguf": ["ZeroWw/ghost-8b-beta-GGUF", MessagesFormatterType.MISTRAL],
|