Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
·
3d7856f
1
Parent(s):
5b49de4
Update torch.cuda.is_available() for device/device_type
Browse files
app.py
CHANGED
@@ -66,7 +66,7 @@ CHROMA_SETTINGS = Settings(
|
|
66 |
persist_directory=PERSIST_DIRECTORY,
|
67 |
anonymized_telemetry=False,
|
68 |
)
|
69 |
-
ns = SimpleNamespace(qa=None, ingest_done=None)
|
70 |
|
71 |
|
72 |
def load_single_document(file_path: str | Path) -> Document:
|
@@ -178,7 +178,7 @@ def upload_files(files):
|
|
178 |
|
179 |
# flag ns.qadone
|
180 |
ns.ingest_done = True
|
181 |
-
|
182 |
|
183 |
# ns.qa = load_qa()
|
184 |
|
@@ -257,13 +257,16 @@ def gen_local_llm(model_id="TheBloke/vicuna-7B-1.1-HF"):
|
|
257 |
localgpt run_localgpt
|
258 |
"""
|
259 |
tokenizer = LlamaTokenizer.from_pretrained(model_id)
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
|
|
|
|
|
|
267 |
|
268 |
pipe = pipeline(
|
269 |
"text-generation",
|
@@ -342,6 +345,10 @@ def main():
|
|
342 |
other text docs). It
|
343 |
takes quite a while to ingest docs (10-30 min. depending
|
344 |
on net, RAM, CPU etc.).
|
|
|
|
|
|
|
|
|
345 |
"""
|
346 |
gr.Markdown(dedent(_))
|
347 |
|
|
|
66 |
persist_directory=PERSIST_DIRECTORY,
|
67 |
anonymized_telemetry=False,
|
68 |
)
|
69 |
+
ns = SimpleNamespace(qa=None, ingest_done=None, files_info=None)
|
70 |
|
71 |
|
72 |
def load_single_document(file_path: str | Path) -> Document:
|
|
|
178 |
|
179 |
# flag ns.qadone
|
180 |
ns.ingest_done = True
|
181 |
+
ns.files_info = res
|
182 |
|
183 |
# ns.qa = load_qa()
|
184 |
|
|
|
257 |
localgpt run_localgpt
|
258 |
"""
|
259 |
tokenizer = LlamaTokenizer.from_pretrained(model_id)
|
260 |
+
if torch.cuda.is_available():
|
261 |
+
model = LlamaForCausalLM.from_pretrained(
|
262 |
+
model_id,
|
263 |
+
# load_in_8bit=True, # set these options if your GPU supports them!
|
264 |
+
# device_map=1#'auto',
|
265 |
+
# torch_dtype=torch.float16,
|
266 |
+
low_cpu_mem_usage=True
|
267 |
+
)
|
268 |
+
else:
|
269 |
+
model = LlamaForCausalLM.from_pretrained(model_id)
|
270 |
|
271 |
pipe = pipeline(
|
272 |
"text-generation",
|
|
|
345 |
other text docs). It
|
346 |
takes quite a while to ingest docs (10-30 min. depending
|
347 |
on net, RAM, CPU etc.).
|
348 |
+
|
349 |
+
Send empty query (hit Enter) to check embedding status and files info ([filename, numb of chars])
|
350 |
+
|
351 |
+
Homepage: https://huggingface.co/spaces/mikeee/localgpt
|
352 |
"""
|
353 |
gr.Markdown(dedent(_))
|
354 |
|