
if __name__ == '__main__':

    from qwen_agent.llm import get_chat_model

    llm_cfg = {
        'model': 'qwen-max',
        'model_server': 'dashscope',
        'api_key': 'sk-82b50194df5c46a0b8e06fa37cfc395e',
        # 'model': 'Qwen',
        # 'model_server': 'http://127.0.0.1:7905/v1',
        'generate_cfg': {
            'top_p': 0.8
        }
    }
    llm = get_chat_model(llm_cfg)
    messages = [{
        'role': 'user',
        'content': "What's the weather like in San Francisco?"
    }]
    functions = [{
        'name': 'get_current_weather',
        'description': 'Get the current weather in a given location',
        'parameters': {
            'type': 'object',
            'properties': {
                'location': {
                    'type': 'string',
                    'description':
                        'The city and state, e.g. San Francisco, CA',
                },
                'unit': {
                    'type': 'string',
                    'enum': ['celsius', 'fahrenheit']
                },
            },
            'required': ['location'],
        },
    }]

    # 此处演示流式输出效果
    responses = []
    for responses in llm.chat(messages=messages,
                              functions=functions,
                              stream=True):
        print(responses)