#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
大模型聊天客户端
使用OpenAI兼容的方式调用Qwen3-235B-A22B模型
"""

import openai
import json
import sys
from typing import List, Dict, Optional, Generator


class ChatClient:
    """大模型聊天客户端"""
    
    def __init__(self, api_key: str, base_url: str, model: str):
        """
        初始化聊天客户端
        
        Args:
            api_key: API密钥
            base_url: API基础URL
            model: 模型名称
        """
        self.client = openai.OpenAI(
            api_key=api_key,
            base_url=base_url
        )
        self.model = model
    
    def chat(self, 
             messages: List[Dict[str, str]], 
             stream: bool = False,
             max_tokens: int = 4096,
             temperature: float = 0.7,
             stop: Optional[List[str]] = None,
             frequency_penalty: float = 0,
             presence_penalty: float = 0,
             logprobs: bool = False,
             top_logprobs: int = 0) -> Dict:
        """
        发送聊天请求
        
        Args:
            messages: 消息列表，格式为 [{"role": "user", "content": "你好"}]
            stream: 是否流式返回
            max_tokens: 最大token数
            temperature: 温度参数
            stop: 停止词列表
            frequency_penalty: 频率惩罚
            presence_penalty: 存在惩罚
            logprobs: 是否返回对数概率
            top_logprobs: 返回的top对数概率数量
            
        Returns:
            模型响应
        """
        try:
            response = self.client.chat.completions.create(
                model=self.model,
                messages=messages,
                stream=stream,
                max_tokens=max_tokens,
                temperature=temperature,
                stop=stop,
                frequency_penalty=frequency_penalty,
                presence_penalty=presence_penalty,
                logprobs=logprobs,
                top_logprobs=top_logprobs if logprobs else None
            )
            return response
        except Exception as e:
            print(f"请求失败: {e}")
            return None
    
    def chat_stream(self, messages: List[Dict[str, str]], **kwargs) -> Generator[str, None, None]:
        """
        流式聊天
        
        Args:
            messages: 消息列表
            **kwargs: 其他参数
            
        Yields:
            流式响应内容
        """
        kwargs['stream'] = True
        response = self.chat(messages, **kwargs)
        
        if response:
            for chunk in response:
                if chunk.choices[0].delta.content:
                    yield chunk.choices[0].delta.content
    
    def simple_chat(self, user_input: str, system_prompt: str = None) -> str:
        """
        简单聊天接口
        
        Args:
            user_input: 用户输入
            system_prompt: 系统提示词
            
        Returns:
            模型回复
        """
        messages = []
        if system_prompt:
            messages.append({"role": "system", "content": system_prompt})
        messages.append({"role": "user", "content": user_input})
        
        response = self.chat(messages)
        if response and response.choices:
            return response.choices[0].message.content
        return "抱歉，无法获取回复"


def main():
    """主函数 - 交互式聊天"""
    # 配置参数
    API_KEY = "sk-ldcfdqyvcyw7pqp7qp37jm6c4wc3nglrzfzhqwusflrc3v5j"
    BASE_URL = "https://maas-api.lanyun.net/v1"
    MODEL = "/maas/qwen/Qwen3-235B-A22B"
    
    # 创建客户端
    client = ChatClient(API_KEY, BASE_URL, MODEL)
    
    print("=== 大模型聊天客户端 ===")
    print("输入 'quit' 或 'exit' 退出")
    print("输入 'stream' 开启流式模式")
    print("输入 'normal' 关闭流式模式")
    print("-" * 40)
    
    stream_mode = False
    
    while True:
        try:
            user_input = input("\n用户: ").strip()
            
            if user_input.lower() in ['quit', 'exit', '退出']:
                print("再见！")
                break
            
            if user_input.lower() == 'stream':
                stream_mode = True
                print("已开启流式模式")
                continue
            
            if user_input.lower() == 'normal':
                stream_mode = False
                print("已关闭流式模式")
                continue
            
            if not user_input:
                continue
            
            if stream_mode:
                print("助手: ", end="", flush=True)
                for chunk in client.chat_stream([{"role": "user", "content": user_input}]):
                    print(chunk, end="", flush=True)
                print()  # 换行
            else:
                response = client.simple_chat(user_input)
                print(f"助手: {response}")
                
        except KeyboardInterrupt:
            print("\n\n程序被中断，再见！")
            break
        except Exception as e:
            print(f"发生错误: {e}")


def example_usage():
    """示例用法"""
    # 配置参数
    API_KEY = "sk-ldcfdqyvcyw7pqp7qp37jm6c4wc3nglrzfzhqwusflrc3v5j"
    BASE_URL = "https://maas-api.lanyun.net/v1"
    MODEL = "/maas/qwen/Qwen3-235B-A22B"
    
    # 创建客户端
    client = ChatClient(API_KEY, BASE_URL, MODEL)
    
    print("=== 示例用法 ===")
    
    # 1. 简单聊天
    print("\n1. 简单聊天:")
    response = client.simple_chat("你好，请介绍一下你自己")
    print(f"回复: {response}")
    
    # 2. 带系统提示词的聊天
    print("\n2. 带系统提示词的聊天:")
    system_prompt = "你是一个专业的Python编程助手，请用简洁明了的方式回答问题。"
    response = client.simple_chat("如何创建一个Python类？", system_prompt)
    print(f"回复: {response}")
    
    # 3. 流式聊天
    print("\n3. 流式聊天:")
    print("回复: ", end="", flush=True)
    for chunk in client.chat_stream([{"role": "user", "content": "请写一个Python函数来计算斐波那契数列"}]):
        print(chunk, end="", flush=True)
    print()
    
    # 4. 高级参数使用
    print("\n4. 高级参数使用:")
    messages = [{"role": "user", "content": "请解释什么是机器学习"}]
    response = client.chat(
        messages=messages,
        max_tokens=1000,
        temperature=0.8,
        frequency_penalty=0.5,
        presence_penalty=0.3
    )
    if response and response.choices:
        print(f"回复: {response.choices[0].message.content}")


if __name__ == "__main__":
    if len(sys.argv) > 1 and sys.argv[1] == "example":
        example_usage()
    else:
        main()
