marcelcastrobr commited on
Commit
8cf7f66
1 Parent(s): b24b86c

synching with github actions

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
.gitattributes ADDED
@@ -0,0 +1 @@
 
 
1
+ unsplash-25k-photos-embeddings.pkl filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,2 +1,37 @@
1
- # CLIP-image-search
2
- Deployment of CLIP model using Hugging Face space to perform image search.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: CLIP Image Search
3
+ emoji: 📸
4
+ colorFrom: pink
5
+ colorTo: pink
6
+ sdk: gradio
7
+ app_file: app.py
8
+ pinned: false
9
+ ---
10
+
11
+ # Configuration
12
+
13
+ `title`: _string_
14
+ Display title for the Space
15
+
16
+ `emoji`: _string_
17
+ Space emoji (emoji-only character allowed)
18
+
19
+ `colorFrom`: _string_
20
+ Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
21
+
22
+ `colorTo`: _string_
23
+ Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
24
+
25
+ `sdk`: _string_
26
+ Can be either `gradio` or `streamlit`
27
+
28
+ `sdk_version` : _string_
29
+ Only applicable for `streamlit` SDK.
30
+ See [doc](https://hf.co/docs/hub/spaces) for more info on supported versions.
31
+
32
+ `app_file`: _string_
33
+ Path to your main application file (which contains either `gradio` or `streamlit` Python code).
34
+ Path is relative to the root of the repository.
35
+
36
+ `pinned`: _boolean_
37
+ Whether the Space stays on top of your list.
app.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import CLIPProcessor, CLIPModel, CLIPTokenizer
3
+ import sentence_transformers
4
+ from sentence_transformers import SentenceTransformer, util
5
+ import pickle
6
+ from PIL import Image
7
+ import os
8
+
9
+
10
+ ## Define model
11
+ model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
12
+ processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
13
+ tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32")
14
+
15
+
16
+
17
+ #Open the precomputed embeddings
18
+ emb_filename = 'unsplash-25k-photos-embeddings.pkl'
19
+ with open(emb_filename, 'rb') as fIn:
20
+ img_names, img_emb = pickle.load(fIn)
21
+
22
+
23
+ def search_text(query, top_k=1):
24
+ """"
25
+
26
+ Args:
27
+ query ([string]): [query you want search for]
28
+ top_k (int, optional): [Amount of images o return]. Defaults to 1.
29
+
30
+ Returns:
31
+ [list]: [list of images that are related to the query.]
32
+ """
33
+ # First, we encode the query.
34
+ inputs = tokenizer([query], padding=True, return_tensors="pt")
35
+ query_emb = model.get_text_features(**inputs)
36
+
37
+ # Then, we use the util.semantic_search function, which computes the cosine-similarity
38
+ # between the query embedding and all image embeddings.
39
+ # It then returns the top_k highest ranked images, which we output
40
+ hits = util.semantic_search(query_emb, img_emb, top_k=top_k)[0]
41
+
42
+ image=[]
43
+ for hit in hits:
44
+ print(img_names[hit['corpus_id']])
45
+ object = Image.open(os.path.join("photos/", img_names[hit['corpus_id']]))
46
+ image.append(object)
47
+
48
+ return image
49
+
50
+ iface = gr.Interface(
51
+ title = "Text to Image using CLIP Model",
52
+ description = "Gradio Demo fo CLIP model. \n This demo is based on assessment for the :hugging: Huggingface course 2. \n To use it, simply write which image you are looking for. Read more at the links below.",
53
+ fn=search_text,
54
+ inputs=[gr.inputs.Textbox(lines=2,
55
+ label="Write what you are looking for...",
56
+ placeholder="Name Here..."),
57
+ gr.inputs.Slider(0, 5, step=1)],
58
+ outputs=gr.outputs.Carousel(gr.outputs.Image(type="pil"))
59
+ ,examples=[[("Dog in the beach"), 2],
60
+ [("Paris during night."), 1],
61
+ [("A cute kangaroo"), 5],
62
+ [("Dois cachorros"), 2],
63
+ [("un homme marchant sur le parc")], 3]
64
+ ).launch(debug=True)
photos/--2IBUMom1I.jpg ADDED
photos/--6JlGcHl-w.jpg ADDED
photos/--Jy_8mvs4E.jpg ADDED
photos/--SDX4KWIbA.jpg ADDED
photos/--Tn3E5ZtfQ.jpg ADDED
photos/--e3kJUMSZw.jpg ADDED
photos/--kGuWTwn48.jpg ADDED
photos/--lzOIJ-a4U.jpg ADDED
photos/-0YZgPxq04k.jpg ADDED
photos/-0_ww2ACIw8.jpg ADDED
photos/-0eINgEiNw4.jpg ADDED
photos/-12cgSu9HW0.jpg ADDED
photos/-1a83VD65ss.jpg ADDED
photos/-1lMrIXAn6Q.jpg ADDED
photos/-1qb8SIBzKY.jpg ADDED
photos/-2ii0_ctxpQ.jpg ADDED
photos/-2loC3xzDF8.jpg ADDED
photos/-2pFSIxX9ow.jpg ADDED
photos/-3IZERJGsm4.jpg ADDED
photos/-3LtGq_RPcY.jpg ADDED
photos/-3cTY-Q6k88.jpg ADDED
photos/-3l6KX8uCAM.jpg ADDED
photos/-3qSsolbivo.jpg ADDED
photos/-3uIUqsR-Rw.jpg ADDED
photos/-43qvNitz5k.jpg ADDED
photos/-4AR-vVjAbM.jpg ADDED
photos/-4UwhAr4KYg.jpg ADDED
photos/-4WLn9giArE.jpg ADDED
photos/-4qCLz3r1s8.jpg ADDED
photos/-5WWw6DeQ8w.jpg ADDED
photos/-5_tGk7SUuM.jpg ADDED
photos/-5eZ1-Ie68w.jpg ADDED
photos/-6-uqd2hMCg.jpg ADDED
photos/-6HgkmXTWhs.jpg ADDED
photos/-6JK87e42iQ.jpg ADDED
photos/-6UNL6Ghn_c.jpg ADDED
photos/-6_jB6qjwCQ.jpg ADDED
photos/-6e6HVEzgGs.jpg ADDED
photos/-6h-oYfdZDA.jpg ADDED
photos/-6hvB84fyYA.jpg ADDED
photos/-6i6a23H5Ho.jpg ADDED
photos/-7FQ56tDBWQ.jpg ADDED
photos/-7JAEi4PhTU.jpg ADDED
photos/-7Mfzok9LNk.jpg ADDED
photos/-7QeX7V2YLY.jpg ADDED
photos/-7_yLivAnMc.jpg ADDED
photos/-7iCZvSYt6M.jpg ADDED