sashavor commited on
Commit
19cb4eb
1 Parent(s): 8fd2365

adding labels

Browse files
Files changed (1) hide show
  1. app.py +22 -12
app.py CHANGED
@@ -8,7 +8,7 @@ from transformers import AutoModel, AutoFeatureExtractor
8
  seed = 42
9
 
10
  # Only runs once when the script is first run.
11
- with open("index_768.pickle", "rb") as handle:
12
  index = pickle.load(handle)
13
 
14
  # Load model for computing embeddings.
@@ -26,21 +26,31 @@ def query(image, top_k=4):
26
  embedding = model_output.pooler_output.detach()
27
  results = index.query(embedding, k=top_k)
28
  inx = results[0][0].tolist()
 
29
  images = ds.select(inx)["image"]
30
  captions = ds.select(inx)["name"]
31
  images_with_captions = [(i, c) for i, c in zip(images,captions)]
32
- return images_with_captions
 
 
33
 
34
 
35
- title = "Find my Butterfly 🦋"
36
- description = "Use this Space to find your butterfly, based on the [iNaturalist butterfly dataset](https://huggingface.co/datasets/huggan/inat_butterflies_top10k)!"
37
 
38
 
39
- gr.Interface(
40
- query,
41
- inputs=[gr.Image(type="pil")],
42
- outputs=gr.Gallery().style(grid=[2], height="auto"),
43
- title=title,
44
- description=description,
45
- examples=[["elton.jpg"],["ken.jpg"],["gaga.jpg"],["taylor.jpg"]],
46
- ).launch()
 
 
 
 
 
 
 
 
 
 
8
  seed = 42
9
 
10
  # Only runs once when the script is first run.
11
+ with open("index_768_cosine.pickle", "rb") as handle:
12
  index = pickle.load(handle)
13
 
14
  # Load model for computing embeddings.
 
26
  embedding = model_output.pooler_output.detach()
27
  results = index.query(embedding, k=top_k)
28
  inx = results[0][0].tolist()
29
+ logits = results[1][0].tolist()
30
  images = ds.select(inx)["image"]
31
  captions = ds.select(inx)["name"]
32
  images_with_captions = [(i, c) for i, c in zip(images,captions)]
33
+ labels_with_probs = dict(zip(captions,logits))
34
+ labels_with_probs = {k: 1- v for k, v in labels_with_probs.items()}
35
+ return images_with_captions, labels_with_probs
36
 
37
 
 
 
38
 
39
 
40
+
41
+ with gr.Blocks() as demo:
42
+ gr.Markdown("# Find my Butterfly 🦋")
43
+ gr.Markdown("## Use this Space to find your butterfly, based on the [iNaturalist butterfly dataset](https://huggingface.co/datasets/huggan/inat_butterflies_top10k)!")
44
+ examples=["elton.jpg", "ken.jpg", "gaga.jpg", "taylor.jpg"]
45
+ with gr.Row():
46
+ with gr.Column(min_width= 900):
47
+ inputs = gr.Image(shape=(800, 1600))
48
+ btn = gr.Button("Find my butterfly!")
49
+ with gr.Column():
50
+ outputs=gr.Gallery().style(grid=[2], height="auto")
51
+ labels = gr.Label()
52
+
53
+ btn.click(query, inputs, [outputs, labels])
54
+
55
+ demo.launch()
56
+