import json

import requests
from concurrent.futures import ThreadPoolExecutor
from restful import DeepSeekResponse,deepseek2openai
#本地跑
# OLLAMA_URL = "http://192.168.35.221:11434/api/generate"
#容器里面
# OLLAMA_URL = "http://ollama:11434/api/generate"

# OLLAMA_URL = "http://132.10.10.121:5000/generate"
# OLLAMA_URL = "http://ollama:11434/api/generate"
OLLAMA_URL = "http://132.10.10.121:11434/api/generate"



def deepseek_stream(prompt):
    payload = {
        "model": "qwen2.5:14b",  # Model name from API tags  qwen2.5:32b  qwen:1.8b，qwen2.5:32b
        "prompt": prompt,
        "stream": True  # Set to True if you want a streaming response
    }

    try:
        response = requests.post(OLLAMA_URL, json=payload)
        response.raise_for_status()  # Raise error if response is not 200 OK
        #整合成openai的格式输出
        byte_sequence = b""
        for item in response:
            byte_sequence += item
        text_list = [item for item in byte_sequence.decode('utf-8').split("\n")]

        for idx, item in enumerate(text_list):
            if len(item) == 0:
                continue
            j_item = json.loads(item)
            dsr = DeepSeekResponse(id = idx+1,model = j_item["model"],created_at = j_item["created_at"],response = j_item["response"],done = j_item["done"])
            new_res = deepseek2openai(dsr)
            # print(type(new_res))
            yield new_res

    except requests.exceptions.RequestException as e:
        print("Error:", e)


def deepseek_chat(prompt):
    payload = {
        "model": "qwen2.5:14b",  # Model name from API tags
        # "model": "qwen2.5:32b",  # Model name from API tags  qwen2.5:32b  qwen:1.8b qwen2.5:14b
        "prompt": prompt,
        "stream": False  # Set to True if you want a streaming response
    }

    try:
        response = requests.post(OLLAMA_URL, json=payload)
        response.raise_for_status()  # Raise error if response is not 200 OK
        rp = response.json()["response"]
        return rp

    except requests.exceptions.RequestException as e:
        print("Error:", e)


def mutil_function(function, input_datas):
    with ThreadPoolExecutor() as executor:
        results = executor.map(function, input_datas)
    return results





if __name__ == '__main__':

    # prompt = "中国的首都在哪里？"
    # prompts = [prompt]*10
    # print(deepseek_chat(prompt))
    # res = mutil_function(deepseek_chat,prompts)
    # for i in res:
    #     print(i)
    # response = deepseek_stream(prompt)
    # for i in response:
    #     print(i)
    prompt = "请简单介绍一下北京？"
    for i in deepseek_stream(prompt):
        print(type(i),i.json())
    #     if i.choices[0].finish_reason == "stop":
    #         i.choices[0].delta.content="stop"
    #     print(i.choices[0].delta.content)
    #     print(i.choices[0].finish_reason)
