from fashion_clip.fashion_clip import FashionCLIP import pickle import subprocess import streamlit as st import numpy as np from PIL import Image @st.cache_resource def load_embedding_file(): with open("embeddings_and_paths.pkl", "rb") as filino: data = pickle.load(filino) images = data["images_path"] embeddings = data["embeddings"] return images, embeddings fclip = FashionCLIP('fashion-clip') subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True) query = st.text_input("Enter a description of the clothing item you want to find", "a red dress") images, image_embeddings = load_embedding_file() text_embedding = fclip.encode_text([query], 32)[0] id_of_matched_object = np.argmax(text_embedding.dot(image_embeddings.T)) image = Image.open(images[id_of_matched_object]) st.image(image)