# https://tf7k.prod-bd-wx-if.apps.gitee-ai.com/?sign=eyJhbGciOiJFZERTQSJ9.eyJpc3MiOiJodHRwczovL2FpLmdpdGVlLmNvbSIsInN1YiI6IntcIm5hbWVzcGFjZVwiOlwic3RyaW5naWZ5XCIsXCJwcm9qZWN0XCI6XCJnbG00LWFwaVwiLFwiYXBwSWRcIjpcInRmN2tcIn0iLCJleHAiOjE3MjM4OTAxMTh9.zU4yezCBxoN9ldAFRNzGiEs53NRxX9QRgMVEeQAcBcbpz6u0xTCw2sXGwjYibpWgZzdh_4vv60D13RV-FAirDg&__theme=light&embed_options=light_theme
from langchain_openai import ChatOpenAI
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler

from openai import OpenAI
openai_api_key = "EMPTY"
openai_api_base = "http://tf7k.prod-bd-wx-if.apps.gitee-ai.com/v1"

client = OpenAI(
    api_key=openai_api_key,
    base_url=openai_api_base,
)
sign = "eyJhbGciOiJFZERTQSJ9.eyJpc3MiOiJodHRwczovL2FpLmdpdGVlLmNvbSIsInN1YiI6IntcIm5hbWVzcGFjZVwiOlwic3RyaW5naWZ5XCIsXCJwcm9qZWN0XCI6XCJnbG00LWFwaVwiLFwiYXBwSWRcIjpcInRmN2tcIn0iLCJleHAiOjE3MjM4OTAxMTh9.zU4yezCBxoN9ldAFRNzGiEs53NRxX9QRgMVEeQAcBcbpz6u0xTCw2sXGwjYibpWgZzdh_4vv60D13RV-FAirDg"
stream = client.chat.completions.create(
    model="hf-models/glm-4-9b-chat",
    messages=[{"role": "user", "content": "哈喽, 今天吃点什么?"}],
    extra_body={
        "stop_token_ids": [151329, 151336, 151338]
    },
    max_tokens=8000,
    stream=True,
    extra_query={"sign": sign},
)

for chunk in stream:
    if chunk.choices[0].delta.content is not None:
        print(chunk.choices[0].delta.content, end="")


# callbacks=[StreamingStdOutCallbackHandler()]
# llm = ChatOpenAI(api_key="EMPTY", model_name="hf-models/glm-4-9b-chat",  base_url=openai_api_base,
#                  streaming=True, temperature=0.3, presence_penalty=0.7, top_p=0.95, max_tokens=8000,
#                  extra_body={"stop_token_ids": [151329, 151336, 151338]},
#                  default_query={"sign": sign})
# for response in llm.stream([{"role": "user", "content": "哈喽, 今天吃点什么?"}]):
#     if content := response.content or "":
#         print(content, end="")
