Alaa Awad
commited on
Commit
•
185264b
1
Parent(s):
5cf5f21
with clip
Browse files
app.py
CHANGED
@@ -1,5 +1,18 @@
|
|
1 |
import streamlit as st
|
2 |
|
3 |
-
x = st.slider('Select a value')
|
4 |
-
st.write(x, 'squared is', x * x)
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
|
|
|
|
|
3 |
|
4 |
+
from PIL import Image
|
5 |
+
import requests
|
6 |
+
|
7 |
+
from transformers import CLIPProcessor, CLIPModel
|
8 |
+
|
9 |
+
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
|
10 |
+
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
11 |
+
|
12 |
+
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
|
13 |
+
image = Image.open(requests.get(url, stream=True).raw)
|
14 |
+
|
15 |
+
inputs = processor(text=["a photo of a cat", "a photo of a dog"], images=image, return_tensors="pt", padding=True)
|
16 |
+
|
17 |
+
outputs = model(**inputs)
|
18 |
+
st.write(outputs)
|