import requests
import json

class OllamaChatbot:
    def __init__(self, model_name="gemma3:270m", base_url="http://localhost:11434"):
        """
        初始化Ollama聊天机器人
        
        :param model_name: 模型名称，默认gemma3:270m
        :param base_url: Ollama服务地址，默认本地11434端口
        """
        self.model_name = model_name
        self.base_url = base_url
        self.session_history = []
        
        # 测试连接
        try:
            response = requests.get(f"{base_url}/api/tags")
            if response.status_code != 200:
                raise ConnectionError(f"无法连接到Ollama服务，HTTP状态码: {response.status_code}")
            print(f"✅ 成功连接到Ollama服务，可用模型: {[m['name'] for m in response.json()['models']]}")
        except Exception as e:
            raise ConnectionError(f"连接Ollama服务失败: {str(e)}")

    def generate_response(self, prompt, stream=False):
        """
        生成AI回复
        
        :param prompt: 用户输入
        :param stream: 是否使用流式输出
        :return: AI回复内容
        """
        self.session_history.append({"role": "user", "content": prompt})
        
        data = {
            "model": self.model_name,
            "messages": self.session_history,
            "stream": stream,
            "options": {"temperature": 0.7}
        }
        
        try:
            response = requests.post(
                f"{self.base_url}/api/chat",
                headers={"Content-Type": "application/json"},
                data=json.dumps(data)
            )
            
            if response.status_code != 200:
                raise RuntimeError(f"API请求失败，状态码: {response.status_code}")
                
            response_data = response.json()
            ai_response = response_data["message"]["content"]
            
            # 将AI回复加入对话历史
            self.session_history.append({"role": "assistant", "content": ai_response})
            return ai_response
            
        except Exception as e:
            raise RuntimeError(f"生成回复时出错: {str(e)}")

def interactive_chat():
    """交互式聊天测试"""
    print("🚀 Ollama本地模型聊天机器人 (gemma3:270m)")
    print("输入您的消息开始对话，输入'退出'、'exit'或'quit'结束对话\n")
    
    bot = OllamaChatbot()
    
    while True:
        try:
            user_input = input("你: ")
            if user_input.lower() in ['退出', 'exit', 'quit', 'q', '再见']:
                print("对话结束，再见！")
                break
                
            print("AI: ", end="", flush=True)
            response = bot.generate_response(user_input)
            print(response + "\n")
            
        except KeyboardInterrupt:
            print("\n对话被用户中断")
            break
        except Exception as e:
            print(f"发生错误: {str(e)}")
            break

if __name__ == "__main__":
    # 先确认Ollama服务已启动（在终端运行: ollama serve）
    interactive_chat()