Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -87,33 +87,37 @@ def segment_clothing(img, clothes=["Hat", "Upper-clothes", "Skirt", "Pants", "Dr
|
|
87 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories # Return detected categories
|
88 |
|
89 |
def find_similar_images(query_embedding, collection, top_k=5):
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
representative_image_url = image_urls[0] if image_urls else None
|
107 |
-
|
108 |
-
results.append({
|
109 |
-
'info': metadata,
|
110 |
-
'similarity': similarities[top_indices[idx]],
|
111 |
-
'image_url': representative_image_url
|
112 |
-
})
|
113 |
-
return results
|
114 |
-
except Exception as e:
|
115 |
-
st.error(f"Error during finding similar images: {str(e)}")
|
116 |
return []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
|
118 |
|
119 |
|
@@ -172,6 +176,12 @@ elif st.session_state.step == 'show_results':
|
|
172 |
|
173 |
# Get the embedding of the segmented image
|
174 |
query_embedding = get_image_embedding(st.session_state.segmented_image) # Use the segmented image from session state
|
|
|
|
|
|
|
|
|
|
|
|
|
175 |
similar_images = find_similar_images(query_embedding, collection)
|
176 |
|
177 |
st.subheader("Similar Items:")
|
|
|
87 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories # Return detected categories
|
88 |
|
89 |
def find_similar_images(query_embedding, collection, top_k=5):
|
90 |
+
# ๋ชจ๋ ์๋ฒ ๋ฉ์ ๊ฐ์ ธ์ด
|
91 |
+
all_embeddings = collection.get(include=['embeddings'])['embeddings']
|
92 |
+
if len(all_embeddings) == 0:
|
93 |
+
st.error("No embeddings found in the collection.")
|
94 |
+
return []
|
95 |
+
|
96 |
+
database_embeddings = np.array(all_embeddings)
|
97 |
+
|
98 |
+
# ์ ์ฌ๋ ๊ณ์ฐ
|
99 |
+
similarities = cosine_similarity(database_embeddings, query_embedding.reshape(1, -1)).squeeze()
|
100 |
+
top_indices = np.argsort(similarities)[::-1][:top_k]
|
101 |
+
|
102 |
+
# ๋ฉํ๋ฐ์ดํฐ ๊ฐ์ ธ์ด
|
103 |
+
all_data = collection.get(include=['metadatas'])['metadatas']
|
104 |
+
if len(all_data) == 0:
|
105 |
+
st.error("No metadatas found in the collection.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
return []
|
107 |
+
|
108 |
+
top_metadatas = [all_data[idx] for idx in top_indices]
|
109 |
+
|
110 |
+
results = []
|
111 |
+
for idx, metadata in enumerate(top_metadatas):
|
112 |
+
image_urls = metadata['image_url'].split(',')
|
113 |
+
representative_image_url = image_urls[0] if image_urls else None
|
114 |
+
|
115 |
+
results.append({
|
116 |
+
'info': metadata,
|
117 |
+
'similarity': similarities[top_indices[idx]],
|
118 |
+
'image_url': representative_image_url
|
119 |
+
})
|
120 |
+
return results
|
121 |
|
122 |
|
123 |
|
|
|
176 |
|
177 |
# Get the embedding of the segmented image
|
178 |
query_embedding = get_image_embedding(st.session_state.segmented_image) # Use the segmented image from session state
|
179 |
+
|
180 |
+
# ์ฟผ๋ฆฌ ์๋ฒ ๋ฉ์ด ์ ์์ ์ผ๋ก ์์ฑ๋์๋์ง ํ์ธ
|
181 |
+
if query_embedding is None or len(query_embedding) == 0:
|
182 |
+
st.error("Failed to generate query embedding.")
|
183 |
+
else:
|
184 |
+
st.write("Query embedding generated successfully.")
|
185 |
similar_images = find_similar_images(query_embedding, collection)
|
186 |
|
187 |
st.subheader("Similar Items:")
|