from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.core import VectorStoreIndex
from llama_index.core import Settings
from llama_index.core import SimpleDirectoryReader
import config

# changing the global default
embed_model = OpenAIEmbedding()
Settings.embed_model = embed_model
documents = SimpleDirectoryReader("../data").load_data()
# local usage
embedding = OpenAIEmbedding()._get_text_embedding("hello world")
print(embedding)
print("===============")
embeddings = OpenAIEmbedding()._get_text_embeddings(
    ["hello world", "hello world"]
)
print(embeddings)
print("===============")
# per-index
index = VectorStoreIndex.from_documents(documents, embed_model=embed_model)
print(index)
print("===============")