|
Function Calling |
|
================ |
|
|
|
We offer a wrapper for function calling over the dashscope API and the |
|
OpenAI API in `Qwen-Agent <https://github.com/QwenLM/Qwen-Agent>`__. |
|
|
|
Use Case |
|
-------- |
|
|
|
.. code:: py |
|
|
|
import json |
|
import os |
|
from qwen_agent.llm import get_chat_model |
|
|
|
|
|
|
|
|
|
def get_current_weather(location, unit='fahrenheit'): |
|
"""Get the current weather in a given location""" |
|
if 'tokyo' in location.lower(): |
|
return json.dumps({ |
|
'location': 'Tokyo', |
|
'temperature': '10', |
|
'unit': 'celsius' |
|
}) |
|
elif 'san francisco' in location.lower(): |
|
return json.dumps({ |
|
'location': 'San Francisco', |
|
'temperature': '72', |
|
'unit': 'fahrenheit' |
|
}) |
|
elif 'paris' in location.lower(): |
|
return json.dumps({ |
|
'location': 'Paris', |
|
'temperature': '22', |
|
'unit': 'celsius' |
|
}) |
|
else: |
|
return json.dumps({'location': location, 'temperature': 'unknown'}) |
|
|
|
|
|
def test(): |
|
llm = get_chat_model({ |
|
|
|
'model': 'qwen-max', |
|
'model_server': 'dashscope', |
|
'api_key': os.getenv('DASHSCOPE_API_KEY'), |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}) |
|
|
|
|
|
messages = [{ |
|
'role': 'user', |
|
'content': "What's the weather like in San Francisco?" |
|
}] |
|
functions = [{ |
|
'name': 'get_current_weather', |
|
'description': 'Get the current weather in a given location', |
|
'parameters': { |
|
'type': 'object', |
|
'properties': { |
|
'location': { |
|
'type': 'string', |
|
'description': |
|
'The city and state, e.g. San Francisco, CA', |
|
}, |
|
'unit': { |
|
'type': 'string', |
|
'enum': ['celsius', 'fahrenheit'] |
|
}, |
|
}, |
|
'required': ['location'], |
|
}, |
|
}] |
|
|
|
print('# Assistant Response 1:') |
|
responses = [] |
|
for responses in llm.chat(messages=messages, |
|
functions=functions, |
|
stream=True): |
|
print(responses) |
|
|
|
messages.extend(responses) |
|
|
|
|
|
last_response = messages[-1] |
|
if last_response.get('function_call', None): |
|
|
|
|
|
|
|
available_functions = { |
|
'get_current_weather': get_current_weather, |
|
} |
|
function_name = last_response['function_call']['name'] |
|
function_to_call = available_functions[function_name] |
|
function_args = json.loads(last_response['function_call']['arguments']) |
|
function_response = function_to_call( |
|
location=function_args.get('location'), |
|
unit=function_args.get('unit'), |
|
) |
|
print('# Function Response:') |
|
print(function_response) |
|
|
|
|
|
messages.append({ |
|
'role': 'function', |
|
'name': function_name, |
|
'content': function_response, |
|
}) |
|
|
|
print('# Assistant Response 2:') |
|
for responses in llm.chat( |
|
messages=messages, |
|
functions=functions, |
|
stream=True, |
|
): |
|
print(responses) |
|
|
|
|
|
if __name__ == '__main__': |
|
test() |
|
|