#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
LLM客户端使用示例

演示如何使用LLM客户端进行对话
"""

import sys
import os

# 添加services目录到路径
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'services'))

from llm_client import LLMClient


def example_single_chat():
    """单次对话示例"""
    print("=" * 50)
    print(" 单次对话示例")
    print("=" * 50)
    
    # 初始化客户端
    client = LLMClient()
    
    # 发送问题
    question = "请用简洁的语言解释什么是人工智能"
    print(f" 问题: {question}")
    
    response = client.chat(question)
    if response:
        print(f" 回答: {response}")
    else:
        print(" 获取回答失败")


def example_chat_with_system_prompt():
    """带系统提示词的对话示例"""
    print("\n" + "=" * 50)
    print(" 带系统提示词的对话示例")
    print("=" * 50)
    
    # 初始化客户端
    client = LLMClient()
    
    # 设置系统提示词
    system_prompt = "你是一个Python编程专家，请用简洁明了的方式回答编程问题。"
    question = "如何在Python中读取CSV文件？"
    
    print(f" 系统提示: {system_prompt}")
    print(f" 问题: {question}")
    
    response = client.chat(question, system_prompt=system_prompt)
    if response:
        print(f" 回答: {response}")
    else:
        print(" 获取回答失败")


def example_multi_turn_chat():
    """多轮对话示例"""
    print("\n" + "=" * 50)
    print(" 多轮对话示例")
    print("=" * 50)
    
    # 初始化客户端
    client = LLMClient()
    
    # 构建对话历史
    messages = [
        {"role": "system", "content": "你是一个友好的助手，喜欢用简洁的语言回答问题。"},
        {"role": "user", "content": "你好！"},
        {"role": "assistant", "content": "你好！很高兴为您服务，有什么可以帮助您的吗？"},
        {"role": "user", "content": "请推荐一本好看的科幻小说"}
    ]
    
    print(" 对话历史:")
    for msg in messages:
        if msg["role"] == "user":
            print(f" 用户: {msg['content']}")
        elif msg["role"] == "assistant":
            print(f" 助手: {msg['content']}")
    
    print("\n 助手正在回复...")
    response = client.chat_with_context(messages)
    if response:
        print(f" 助手: {response}")
    else:
        print(" 获取回答失败")


def main():
    """主函数"""
    print(" LLM客户端使用示例")
    
    try:
        # 运行示例
        example_single_chat()
        example_chat_with_system_prompt()
        example_multi_turn_chat()
        
        print("\n" + "=" * 50)
        print(" 所有示例运行完成！")
        print("\n 命令行使用方法:")
        print("  python services/llm_client.py '你的问题'")
        print("  python services/llm_client.py '你的问题' -s '系统提示词'")
        print("  python services/llm_client.py -i  # 交互模式")
        print("  python services/llm_client.py '你的问题' --json  # JSON输出")
        
    except Exception as e:
        print(f" 示例运行失败: {e}")
        import traceback
        traceback.print_exc()


if __name__ == "__main__":
    main()
