Spaces:
Running
on
Zero
Running
on
Zero
MaziyarPanahi
commited on
Commit
•
d08d7a9
1
Parent(s):
bdaba80
Update app.py
Browse files
app.py
CHANGED
@@ -39,11 +39,11 @@ def bot_streaming(message, history):
|
|
39 |
gr.Error("You need to upload an image for LLaVA to work.")
|
40 |
prompt=f"<|start_header_id|>user<|end_header_id|>\n\n<image>\n{message['text']}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
|
41 |
print(f"prompt: {prompt}")
|
42 |
-
image = Image.open(
|
43 |
-
inputs = processor(prompt,
|
44 |
|
45 |
streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True})
|
46 |
-
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=
|
47 |
generated_text = ""
|
48 |
|
49 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
|
|
39 |
gr.Error("You need to upload an image for LLaVA to work.")
|
40 |
prompt=f"<|start_header_id|>user<|end_header_id|>\n\n<image>\n{message['text']}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
|
41 |
print(f"prompt: {prompt}")
|
42 |
+
image = Image.open(requests.get(image_file, stream=True).raw)
|
43 |
+
inputs = processor(prompt, raw_image, return_tensors='pt').to(0, torch.float16)
|
44 |
|
45 |
streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True})
|
46 |
+
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=200)
|
47 |
generated_text = ""
|
48 |
|
49 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|