from flask import Flask, Response, json,request
from dashscope import Generation
from dashscope.api_entities.dashscope_response import Role
from flask_cors import CORS
import dashscope
from openai import OpenAI
from utils.tools import detect_tool
from utils.basic_function import get_current_time, get_current_weather
import random
import json
import copy

# import numpy as np
dashscope.api_key = 'sk-72749221db5f4e7da4553342aaf75a9a'
app = Flask(__name__)
CORS(app, resources="/*")

# 接收全部的信息

# 定义工具列表，模型在选择使用哪个工具时会参考工具的name和description
qwen_tools = [
    # 工具1 获取当前时刻的时间
    {
        "type": "function",
        "function": {
            "name": "get_current_time",
            "description": "当你想知道现在的时间时非常有用。",
            "parameters": {}
        }
    },
    # 工具2 获取天气信息
    {
        "type": "function",
        "function": {
            "name": "get_current_weather",
            "description": "获取指定城市的天气信息。",
            "parameters": {
                "type": "object",
                "properties": {
                    "city": {
                        "type": "string",
                        "description": "城市名称"
                    }
                },
                "required": ["city"]
            }
        }
    }
]
openai_tools = [
    {
        "type": "function",
        "function": {
            "type": "object",
            "name": "get_current_time",
            "description": "获取当前时间",
            "parameters": {
                "type": "object",
                "properties": {}
            }
        }
    }
]
# 封装模型响应函数
def get_qwen(messages, stream):
    response = Generation.call(
        model='qwen-plus',
        messages=messages,
        tools=qwen_tools,
        stream=stream,
        # seed=random.randint(1, 10000),  # 设置随机数种子seed，如果没有设置，则随机数种子默认为1234
        result_format='message' # 将输出设置为message形式
    )
    return response



tools = [
    {
        "type": "function",
        "function": {
            "name": "get_current_time",
            "description": "get current time",
            "parameters": {
                "type": "object",
                "properties":{}
            },
        }
    },
]
def get_deepseek(messages):
    response = client.chat.completions.create(
        model="deepseek-chat",
        messages=messages,
        tools=openai_tools
    )
    return response.choices[0].message

client = OpenAI(
    api_key="sk-2a523c2a52ca4066bebca15f73de1807",
    base_url="https://api.deepseek.com",
)


headers = {
    'Content-Type': 'text/event-stream',
    'Cache-Control': 'no-cache',
    'X-Accel-Buffering': 'no',
}

@app.route('/llm/request')
def stream_numbers():

    query = request.args.get('query', default='default query')
    messages = []
    def chat():
        
        messages.append({'role': 'user', 'content': query})
        assistant_output = get_qwen(messages, True)
        try:
            tool_info = {}
            for chunk in assistant_output:
                tool_messsage = chunk.output.choices[0].message
                # 如果模型选择的工具是get_current_time
                if chunk.output.choices[0].message.tool_calls and chunk.output.choices[0].message.tool_calls[0]["function"]["name"] == 'get_current_time':
                    tool_info = {"name":"get_current_time",'role':'tool'}
                    tool_info['content'] = get_current_time()
            messages.append(dict(tool_messsage))
            messages.append(tool_info)  
            # print(messages)    
            # 模型的第二轮调用，对工具的输出进行总结
            response = get_qwen(messages, True)
            for chunk in response:
                part_message = chunk.output.choices[0].message.content
                json_data = json.dumps({"message": part_message + "\n"})
                yield f"data: {json_data}\n\n"  # 按照SSE格式发送数据
            # messages.append({'role': 'assistant', 'content': whole_message})
            json_data = json.dumps({"message": 'done'})
            yield f"data: {json_data}\n\n"
            print('结束')
        except:
            # whole_message = ""
            for chunk in assistant_output:
                # print(chunk)
                part_message = chunk.output.choices[0].message.content
                json_data = json.dumps({"message": part_message})
                yield f"data: {json_data}\n\n"  # 按照SSE格式发送数据
            json_data = json.dumps({"message": 'done'})
            yield f"data: {json_data}\n\n"
    return Response(chat(), content_type='text/event-stream', headers=headers)
@app.route('/llm/direct')
def qwen_direct():
    query = request.args.get('query', default='default query')
    messages = []
    def chat():
        messages.append({'role': 'user', 'content': query})
        response = get_qwen(messages, False)
        if "tool_calls" in dict(response.output.choices[0].message).keys():
            messages.append(response.output.choices[0].message)
            tool_info = {}
            if response.output.choices[0].message.tool_calls[0]["function"]["name"] == "get_current_time":
                tool_info = {"name":"get_current_time",'role':'tool'}
                tool_info['content'] = get_current_time()
            elif response.output.choices[0].message.tool_calls[0]["function"]["name"] == "get_current_weather":
                tool_info = {"name":"get_current_weather",'role':'tool'}
                city = response.output.choices[0].message.tool_calls[0]["function"]["arguments"]
                city = json.loads(city)
                tool_info['content'] = get_current_weather(city["city"])
            messages.append(tool_info)
            response = get_qwen(messages, False)
            content = response.output.choices[0].message.content
            json_data = json.dumps({"message": content})
            return f"data: {json_data}\n\n"
        else:
            content = response.output.choices[0].message.content
            json_data = json.dumps({"message": content})
            return f"data: {json_data}\n\n"
    return Response(chat(), content_type='text/event-stream', headers=headers)
@app.route('/llm/deepseek')
def deepseek():
    query = request.args.get('query', default='default query')
    def chat():
        messages = [{"role": "user", "content": query}]
        message = get_deepseek(messages)
        print(f"User>\t {messages[0]['content']}")

        tool = message.tool_calls[0]
        messages.append(message)

        messages.append({"role": "tool", "tool_call_id": tool.id, "content": get_current_time()})
        print(messages)
        message = get_deepseek(messages)
        print(f"Model>\t {message.content}")
        json_data = json.dumps({"message": message.content})
        return f"data: {json_data}\n\n"  # 按照SSE格式发送数据
    return Response(chat(), content_type='text/event-stream', headers=headers)
if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=5000)
