import logging
import sys
from typing import Dict

from langchain_openai import OpenAIEmbeddings
from ollama import embeddings

# from llama_index.embeddings import HuggingFaceEmbedding
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG,format='%(asctime)s %(name)s [%(pathname)s line:%(lineno)d] %(levelname)s %(message)s')
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))

DEFAULT_OPENAI_API_BASE = "http://localhost:11434/v1"
DEFAULT_OPENAI_MODEL = "bge-m3:latest"

# embedding_model=OpenAIEmbeddings(openai_api_base=DEFAULT_OPENAI_API_BASE,
#                  openai_api_key="aaa",
#                  model=DEFAULT_OPENAI_API_BASE,
#                  )
#
# query= "When was artificial intelligence founded"
#
# res=embedding_model.embed_query(query)
# print(res)


# query = '北京建筑大学是一所不亚于清华大学的好学校'
# # ollama embedding请求的构建
# response = embeddings(
#     # host="127.0.0.1:11434",
#     # host="127.0.0.1:11434",
#     model=DEFAULT_OPENAI_MODEL, prompt=query)
#
# print(response)
# # response回复可查看得到的response格式
# print(response['embedding'])
# # 查看embedding里的向量
# print(len(response['embedding']))
# # 查看维度

"""
"""
from langchain_ollama import OllamaEmbeddings

# 初始化模型
#OLLAMA_HOST
embeddings = OllamaEmbeddings(base_url="http://localhost:11434",model=DEFAULT_OPENAI_MODEL)

# 嵌入查询
query_embedding = embeddings.embed_query("My query to look up")
print(query_embedding)

# 异步嵌入文档
document_embeddings =  embeddings.aembed_documents(
    ["This is a content of the document", "This is another document"]
)
print(document_embeddings)
