import asyncio
import inspect

from langchain_openai import ChatOpenAI
from loguru import logger

from langchain_community.embeddings import DashScopeEmbeddings
from langchain_openai import OpenAIEmbeddings
from openai.resources import AsyncCompletions

print("openai",OpenAIEmbeddings.model_fields.keys())
print("openai",DashScopeEmbeddings.model_fields.keys())

sig = inspect.signature(AsyncCompletions.create)
print(list(sig.parameters.keys()))

api_keys = [

]
# # # 使用阿里云百炼 / DashScope 的 OpenAI 兼容接口

# params = {
#     'model': 'qwen-plus',
#     'base_url': 'https://dashscope.aliyuncs.com/compatible-mode/v1',
#     'api_key': 'sk-72e7da265ca346768c6c2b2340b3fe77',
#     'temperature': 0.3,
#     'top_p': 1,
#     'streaming': False,
#     # 'model_kwargs': {
#     #     'stream_options': {'include_usage': True}
#     # },
# }
params={'model': 'qwen-plus', 'base_url': 'https://dashscope.aliyuncs.com/compatible-mode/v1', 'api_key': 'sk-72e7da265ca346768c6c2b2340b3fe77', 'streaming': True, 'model_kwargs': {'stream_options': {'include_usage': True}}, 'temperature': 0.3, 'top_p': 1}
# llm = ChatOpenAI(
#     api_key="sk-72e7da265ca346768c6c2b2340b3fe77",
#     base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
#     model="qwen-plus",  # 对应阿里云的模型名称
#     temperature=0.7,
#     disable_streaming=True
# )

# print(llm.model_name)


async def main():
    llm = ChatOpenAI(**params)
    async for res in llm.astream("太阳直径是多少？"):
        logger.debug(f"chunk,{type(res)}, {res}")


# asyncio.run(main())

# start = datetime.now()
# response = llm.invoke("太阳直径是多少？")
# diff = datetime.now() - start
# print(diff, diff.seconds)
# print("ChatOpenAI",response)

# 启用 debug 日志
# logging.basicConfig(level=logging.DEBUG)
#
# # 或者只看 httpx 的日志
# import httpx
# httpx_log = logging.getLogger("httpx")
# httpx_log.setLevel(logging.INFO)
# httpx_log.addHandler(logging.StreamHandler())


# llm = ChatTongyi(
#     dashscope_api_key="sk-72e7da265ca346768c6c2b2340b3fe77",
#
#     model="qwq-plus",  # 对应阿里云的模型名称
#
#     streaming=True,
#     temperature=0.3,
#     top_p=1,
#     cache=False,
#     model_kwargs={
#         "temperature": 0.3,
#         'enable_search': False
#     },
#
# )
#
# start = datetime.now()
#
#
# for res in llm.stream("太阳直径是多少？"):
#     print(res)
#
# diff = datetime.now() - start
# print(diff, diff.seconds)

# llm = ChatOpenAI(
#     api_key="sk-72e7da265ca346768c6c2b2340b3fe77",
#     base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
#     model="qwq-plus",  # 对应阿里云的模型名称
#     temperature=0.3,
# )
#
# start = datetime.now()
# for chunk in llm.stream("太阳直径是多少？"):
#     print(chunk)
# diff = datetime.now() - start
# print(diff, diff.seconds)
# print("chatopenai",response)
# # print("chatopenai",response['usage_metadata'])
# print("chatopenai",response.usage_metadata)
# print("chatopenai",response.response_metadata)
