vinid commited on
Commit
22b4434
Β·
1 Parent(s): 470e04b

update text

Browse files
Files changed (1) hide show
  1. app.py +49 -1
app.py CHANGED
@@ -4,7 +4,25 @@ import subprocess
4
  import streamlit as st
5
  import numpy as np
6
  from PIL import Image
 
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  @st.cache_resource
9
  def load_embedding_file():
10
  with open("embeddings_and_paths.pkl", "rb") as filino:
@@ -16,8 +34,10 @@ def load_embedding_file():
16
 
17
  fclip = FashionCLIP('fashion-clip')
18
 
19
- subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True)
 
20
 
 
21
  query = st.text_input("Enter a description of the clothing item you want to find", "a red dress")
22
 
23
  images, image_embeddings = load_embedding_file()
@@ -29,3 +49,31 @@ id_of_matched_object = np.argmax(text_embedding.dot(image_embeddings.T))
29
  image = Image.open(images[id_of_matched_object])
30
 
31
  st.image(image)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  import streamlit as st
5
  import numpy as np
6
  from PIL import Image
7
+ import os
8
 
9
+ st.sidebar.write("# FashionCLIP Resources")
10
+ st.sidebar.write("We have several resources related to FashionCLIP.")
11
+ st.sidebar.write("## Documentation")
12
+ st.sidebar.write("* πŸ“š [Blog Post](https://towardsdatascience.com/teaching-clip-some-fashion-3005ac3fdcc3)")
13
+ st.sidebar.write("* πŸ“š [Paper](https://www.nature.com/articles/s41598-022-23052-9)")
14
+
15
+ st.sidebar.write("## Code")
16
+ st.sidebar.write("* πŸ“š [Repo](https://github.com/patrickjohncyh/fashion-clip)")
17
+ st.sidebar.write("* πŸ“š [Colab](https://colab.research.google.com/drive/1Z1hAxBnWjF76bEi9KQ6CMBBEmI_FVDrW#scrollTo=FzUQGwS1lhGS)")
18
+ st.sidebar.write("* πŸ“š [HuggingFace Weights](https://huggingface.co/patrickjohncyh/fashion-clip)")
19
+
20
+
21
+ st.write("# FashionCLIP. A Foundation Model for Fashion.")
22
+ st.write("This web app uses FashionCLIP to find clothing items based on a query of the item you want to find.")
23
+ st.write("The model is going to find the most similar item to your query, given a list of 5000 items that have been released [here](https://github.com/alexeygrigorev/clothing-dataset).")
24
+ st.write("Note that some queries might not return anything useful. This could be both due to model's limitation or to the fact that the item you are looking for is missing from the collection.")
25
+ st.write("You can find more about FashionCLIP on the [repo](https://github.com/patrickjohncyh/fashion-clip) or on our [paper](https://www.nature.com/articles/s41598-022-23052-9)")
26
  @st.cache_resource
27
  def load_embedding_file():
28
  with open("embeddings_and_paths.pkl", "rb") as filino:
 
34
 
35
  fclip = FashionCLIP('fashion-clip')
36
 
37
+ if not os.path.exists("clothing-dataset"):
38
+ subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True)
39
 
40
+ st.write("## Simple FashionCLIP search engine")
41
  query = st.text_input("Enter a description of the clothing item you want to find", "a red dress")
42
 
43
  images, image_embeddings = load_embedding_file()
 
49
  image = Image.open(images[id_of_matched_object])
50
 
51
  st.image(image)
52
+
53
+
54
+ st.write("If you use FashionCLIP in your work, please cite our paper:")
55
+ st.write("""
56
+ ```
57
+ @Article{Chia2022,
58
+ title="Contrastive language and vision learning of general fashion concepts",
59
+ author="Chia, Patrick John
60
+ and Attanasio, Giuseppe
61
+ and Bianchi, Federico
62
+ and Terragni, Silvia
63
+ and Magalh{\~a}es, Ana Rita
64
+ and Goncalves, Diogo
65
+ and Greco, Ciro
66
+ and Tagliabue, Jacopo",
67
+ journal="Scientific Reports",
68
+ year="2022",
69
+ month="Nov",
70
+ day="08",
71
+ volume="12",
72
+ number="1",
73
+ pages="18958",
74
+ issn="2045-2322",
75
+ doi="10.1038/s41598-022-23052-9",
76
+ url="https://doi.org/10.1038/s41598-022-23052-9"
77
+ ```
78
+ }""")
79
+