import os
import json
import requests
from typing import List, Dict, Any, Optional
from .base import BaseLLMService

class QwenChatModel(BaseLLMService):
    """通义千问模型服务"""
    
    def __init__(self, config: Dict[str, Any]):
        """
        初始化通义千问模型服务
        
        Args:
            config: 配置信息
        """
        self.model_name = config.get('model_name', 'qwen2.5:3b')
        self.api_base = config.get('api_base', 'http://localhost:11434')
        self.temperature = config.get('temperature', 0.7)
        self.max_tokens = config.get('max_tokens', 2000)
        
    def chat(self, messages: List[Dict[str, str]], stream: bool = False) -> str:
        """
        与模型对话
        
        Args:
            messages: 消息列表
            stream: 是否流式返回
            
        Returns:
            str: 模型回复
        """
        url = f"{self.api_base}/api/chat"
        
        # 构建请求数据
        data = {
            "model": self.model_name,
            "messages": messages,
            "stream": stream,
            "options": {
                "temperature": self.temperature,
                "max_tokens": self.max_tokens
            }
        }
        
        try:
            response = requests.post(url, json=data)
            response.raise_for_status()
            result = response.json()
            return result.get('message', {}).get('content', '')
        except Exception as e:
            print(f"Error in QwenChatModel.chat: {str(e)}")
            return f"Error: {str(e)}"
            
    def get_embeddings(self, text: str) -> List[float]:
        """
        获取文本的嵌入向量
        
        Args:
            text: 输入文本
            
        Returns:
            List[float]: 嵌入向量
        """
        url = f"{self.api_base}/api/embeddings"
        
        data = {
            "model": self.model_name,
            "input": text
        }
        
        try:
            response = requests.post(url, json=data)
            response.raise_for_status()
            result = response.json()
            return result.get('embedding', [])
        except Exception as e:
            print(f"Error in QwenChatModel.get_embeddings: {str(e)}")
            return []

# 注册服务提供商
from .base import LLMServiceFactory
LLMServiceFactory.register('qwen', QwenChatModel)