merve HF staff commited on
Commit
f04732f
1 Parent(s): dad4f2a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -0
app.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import LlavaNextProcessor, LlavaNextForConditionalGeneration, TextIteratorStreamer
3
+ from threading import Thread
4
+ import re
5
+ import time
6
+ from PIL import Image
7
+ import torch
8
+ import spaces
9
+
10
+ processor = LlavaNextProcessor.from_pretrained("llava-hf/llava-v1.6-mistral-7b-hf")
11
+
12
+ model = LlavaNextForConditionalGeneration.from_pretrained("llava-hf/llava-v1.6-mistral-7b-hf", torch_dtype=torch.float16, low_cpu_mem_usage=True)
13
+ model.to("cuda:0")
14
+
15
+ @spaces.GPU
16
+ def bot_streaming(message, history):
17
+ print(message)
18
+ if message["files"]:
19
+ image = message["files"][-1]["path"]
20
+ else:
21
+ # if there's no image uploaded for this turn, look for images in the past turns
22
+ # kept inside tuples, take the last one
23
+ for hist in history:
24
+ if type(hist[0])==tuple:
25
+ image = hist[0][0]
26
+
27
+ prompt=f"[INST] <image>\n{message['text']} [/INST]"
28
+ image = Image.open(image).convert("RGB")
29
+ inputs = processor(prompt, image, return_tensors="pt").to("cuda:0")
30
+
31
+ streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True})
32
+ generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=100)
33
+ generated_text = ""
34
+
35
+ thread = Thread(target=model.generate, kwargs=generation_kwargs)
36
+ thread.start()
37
+
38
+ text_prompt =f"[INST] \n{message['text']} [/INST]"
39
+
40
+
41
+ buffer = ""
42
+ for new_text in streamer:
43
+
44
+ buffer += new_text
45
+
46
+ generated_text_without_prompt = buffer[len(text_prompt):]
47
+ time.sleep(0.04)
48
+ yield generated_text_without_prompt
49
+
50
+
51
+ demo = gr.ChatInterface(fn=bot_streaming, title="LLaVA Next", examples=[{"text": "What is on the flower?", "files":["./bee.jpg"]},
52
+ {"text": "How to make this pastry?", "files":["./baklava.png"]}],
53
+ description="Try [LLaVA Next](https://huggingface.co/papers/2310.03744) in this demo. Upload an image and start chatting about it, or simply try one of the examples below.",
54
+ stop_btn="Stop Generation", multimodal=True)
55
+ demo.launch(debug=True)