import requests
import json
from datetime import datetime
import time
import hashlib
import base64
from uuid import uuid4

# header 构造
def gen_proxy_header(
        appid,
        capabilityname,
        appKey
    ):

    csid = appid+capabilityname+"0"*(24-len(capabilityname))+str(uuid4()).replace("-", "")
    x_server_param = base64.urlsafe_b64encode(json.dumps({"appid": appid, "csid": csid}).encode())
    curtime = int(time.time())
    checkSum = hashlib.md5(f"{appKey}{str(curtime)}{x_server_param.decode()}".encode('utf-8')).hexdigest()
    headers = {
        "Content-Type": "application/json;charset=UTF-8",
        "X-Server-Param": f"{x_server_param.decode()}",
        "X-CurTime": f"{curtime}",
        "X-CheckSum": f"{checkSum}",
    }
    return headers

def openai_api_streaming(appid, capabilityname, appKey, url, api_key, model, temperature=0.5, max_tokens=50):



    headers = gen_proxy_header(appid, capabilityname, appKey)
    headers["Authorization"] = "Bearer " + api_key

    data = {   
        "model": model,
        "messages": [
            
            {"role": "user","content": "hello, 介绍以下你自己"}
            ],
        "max_tokens": max_tokens,
        "temperature": temperature,
        "stream": True,
        # 注意该参数为推理软开关
        "chat_template_kwargs": {"enable_thinking": True}
    }
    st_time = time.time()
    response = requests.post(url, headers=headers, json=data, stream=True, timeout=100)
    if response.status_code != 200:
        raise ValueError(f"Failed to generate response: {response.text}")

    result = []
    print(datetime.now())
    flag = True
    for chunk in response.iter_lines():
        # 
        # print(chunk)
        if chunk and chunk[5:]!=b" [DONE]":
            # print(chunk)
            chunk = json.loads(chunk[5:])
            # print(chunk)
            if "choices" in chunk and len(chunk["choices"]) > 0:
                
                if flag:
                    print(time.time()-st_time)
                    flag = False
                choice = chunk["choices"][0]
                if choice["delta"]:
                    result.append(choice["delta"].get("content", ""))
                    print(result[-1], end="", flush=True)
            if "usage" in chunk:
                print()
                print(chunk["usage"])
                print(time.time()-st_time)
    print()
    print(datetime.now())


# 调用认证
# api_key = "chatbi"
api_key = "cs-baac3b82"
# 模型名
model = "qwen3-32b-hc"
# URL，办公网或呼池使用10.217.247.48，其它资源池请更换对应IP
url = "http://10.217.247.48:9050/llmm-prod/v1/chat/completions"
# 磐智认证参数
appid = "dsjyybai"
appKey = "bb9c5100caf0f84e22d88451e94ce2bb"
capabilityname = "llmm"

openai_api_streaming(
    url=url,
    api_key=api_key, 
    model=model, 
    temperature=0.8, 
    max_tokens=500,
    appid=appid,
    appKey=appKey,
    capabilityname=capabilityname
    )

