Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
β’
e4b455b
1
Parent(s):
8708b41
README.md
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
emoji: π
|
4 |
colorFrom: green
|
5 |
colorTo: green
|
|
|
1 |
---
|
2 |
+
title: TheBloke/WizardCoder-15B-1.0-GGML
|
3 |
emoji: π
|
4 |
colorFrom: green
|
5 |
colorTo: green
|
app.py
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
-
"""Run codes"""
|
2 |
# pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
|
3 |
# import gradio
|
4 |
|
5 |
# gradio.load("models/WizardLM/WizardCoder-15B-V1.0").launch()
|
6 |
|
7 |
import os
|
8 |
-
from pathlib import Path
|
9 |
import time
|
10 |
from dataclasses import asdict, dataclass
|
|
|
11 |
from types import SimpleNamespace
|
12 |
|
13 |
import gradio as gr
|
@@ -164,7 +164,7 @@ def generate(
|
|
164 |
system_prompt: str = default_system_prompt,
|
165 |
user_prompt: str = "",
|
166 |
):
|
167 |
-
"""Run model inference, will return a Generator if streaming is true"""
|
168 |
# if not user_prompt.strip():
|
169 |
return llm(
|
170 |
format_prompt(
|
@@ -211,7 +211,7 @@ logger.info("done dl")
|
|
211 |
_ = """
|
212 |
llm = AutoModelForCausalLM.from_pretrained(
|
213 |
"TheBloke/WizardCoder-15B-1.0-GGML",
|
214 |
-
model_file="",
|
215 |
model_type="starcoder",
|
216 |
threads=8
|
217 |
)
|
@@ -222,16 +222,16 @@ logger.debug(f"{os.cpu_count()=}")
|
|
222 |
if "WizardCoder" in MODEL_FILENAME:
|
223 |
_ = Path("models", MODEL_FILENAME).absolute().as_posix()
|
224 |
LLM = AutoModelForCausalLM.from_pretrained(
|
225 |
-
|
226 |
model_file=_,
|
227 |
model_type="starcoder",
|
228 |
threads=os.cpu_count() // 2, # type: ignore
|
229 |
)
|
230 |
# LLM = AutoModelForCausalLM.from_pretrained(
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
# )
|
236 |
|
237 |
cpu_count = os.cpu_count() // 2 # type: ignore
|
|
|
1 |
+
"""Run codes."""
|
2 |
# pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
|
3 |
# import gradio
|
4 |
|
5 |
# gradio.load("models/WizardLM/WizardCoder-15B-V1.0").launch()
|
6 |
|
7 |
import os
|
|
|
8 |
import time
|
9 |
from dataclasses import asdict, dataclass
|
10 |
+
from pathlib import Path
|
11 |
from types import SimpleNamespace
|
12 |
|
13 |
import gradio as gr
|
|
|
164 |
system_prompt: str = default_system_prompt,
|
165 |
user_prompt: str = "",
|
166 |
):
|
167 |
+
"""Run model inference, will return a Generator if streaming is true."""
|
168 |
# if not user_prompt.strip():
|
169 |
return llm(
|
170 |
format_prompt(
|
|
|
211 |
_ = """
|
212 |
llm = AutoModelForCausalLM.from_pretrained(
|
213 |
"TheBloke/WizardCoder-15B-1.0-GGML",
|
214 |
+
model_file="WizardCoder-15B-1.0.ggmlv3.q4_0.bin",
|
215 |
model_type="starcoder",
|
216 |
threads=8
|
217 |
)
|
|
|
222 |
if "WizardCoder" in MODEL_FILENAME:
|
223 |
_ = Path("models", MODEL_FILENAME).absolute().as_posix()
|
224 |
LLM = AutoModelForCausalLM.from_pretrained(
|
225 |
+
REPO_ID,
|
226 |
model_file=_,
|
227 |
model_type="starcoder",
|
228 |
threads=os.cpu_count() // 2, # type: ignore
|
229 |
)
|
230 |
# LLM = AutoModelForCausalLM.from_pretrained(
|
231 |
+
# "TheBloke/WizardCoder-15B-1.0-GGML",
|
232 |
+
# model_file=MODEL_FILENAME,
|
233 |
+
# model_type="starcoder",
|
234 |
+
# threads=os.cpu_count() // 2 # type: ignore
|
235 |
# )
|
236 |
|
237 |
cpu_count = os.cpu_count() // 2 # type: ignore
|