Spaces:
Runtime error
Runtime error
File size: 1,210 Bytes
e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 04242a9 e997328 db2d292 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
from dotenv import load_dotenv
load_dotenv()
import json
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms import OpenAI
from langchain.vectorstores import DeepLake
from names import DATASET_ID, MODEL_ID
def create_db(dataset_path: str, json_filepath: str) -> DeepLake:
with open(json_filepath, "r") as f:
data = json.load(f)
texts = []
metadatas = []
for movie, lyrics in data.items():
for lyric in lyrics:
texts.append(lyric["text"])
metadatas.append(
{
"movie": movie,
"name": lyric["name"],
"embed_url": lyric["embed_url"],
}
)
embeddings = OpenAIEmbeddings(model=MODEL_ID)
db = DeepLake.from_texts(
texts, embeddings, metadatas=metadatas, dataset_path=dataset_path
)
return db
def load_db(dataset_path: str, *args, **kwargs) -> DeepLake:
db = DeepLake(dataset_path, *args, **kwargs)
return db
if __name__ == "__main__":
dataset_path = f"hub://{os.environ['ACTIVELOOP_ORG_ID']}/{DATASET_ID}"
create_db(dataset_path, "data/emotions_with_spotify_url.json")
|