import os

from langchain_deepseek import ChatDeepSeek
#
# base_url = 'https://api-inference.modelscope.cn/v1/'
# api_key = 'b011f8fd-99a5-4633-8bf5-fef46cf7bd90'
base_url = 'https://api.lkeap.cloud.tencent.com/v1'
api_key = 'sk-7EIVZor7TFz2e9ZmXEcKacVPEOcgUphCIxfeMIh9vp8Xxr4w'
llm = ChatDeepSeek(
    api_base=base_url,
    api_key=api_key,
    # model="deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
    # model="deepseek-ai/DeepSeek-V3",
    model="deepseek-v3",
    temperature=0,
    max_tokens=None,
    timeout=None,
    max_retries=1
)

messages = [
    ("system", "你是一个小助手"),
    ("human", "你好，你是谁"),
]
# a = llm.invoke(messages)
# flag = 'reasoning_content' in a.additional_kwargs
# if flag:
#     print('<think>')
#     print(a.additional_kwargs['reasoning_content'])
#     print('</think>')
# print(a.content)


done_reasoning = False
print("\n<think>")
for chunk in llm.stream(messages):
    # print(chunk)
    flag = 'reasoning_content' in chunk.additional_kwargs
    reasoning_chunk = ''
    if flag:
        reasoning_chunk = chunk.additional_kwargs['reasoning_content']
    answer_chunk = chunk.content
    if reasoning_chunk != '':
        print(reasoning_chunk, end='', flush=True)
    elif answer_chunk != '':
        if not done_reasoning:
            print('\n\n </think>\n')
            done_reasoning = True
        print(answer_chunk, end='', flush=True)



#
# from pydantic import BaseModel, Field
#
# class GetWeather(BaseModel):
#     '''Get the current weather in a given location'''
#
#     location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
#
# class GetPopulation(BaseModel):
#     '''Get the current population in a given location'''
#
#     location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
#
# llm_with_tools = llm.bind_tools([GetWeather, GetPopulation])
# ai_msg = llm_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?")
# print(ai_msg.tool_calls)
