import json
import requests
from requests.adapters import HTTPAdapter


class OllamaClient:
    """封装Ollama API客户端，支持连接复用和参数化输入"""
    def __init__(self, base_url="http://192.168.50.175:11434", model="llama3.2-vision:11b"):
        self.base_url = base_url

        self.model = model
        self.session = requests.Session()
        # 配置连接池（最大10连接，每主机保持5连接）
        self.session.mount('http://', HTTPAdapter(pool_connections=10, pool_maxsize=5))

    def _build_prompt(self, sample_data, candidate_types):
        """构建标准化提示模板"""
        return f"""需求说明
 1. 请依据下面示例json数据的key和value判断每个字段的类型  
 2. 不要分析直接输出结果即可    
  
输出要求:
 1. 不需要分析和思考,直接输出结果。
 2. 必须从 候选类型列表 中选择。如果不知道选啥就选unknown。不要自己创造类型
 3. 下面的输出JSON格式必须严格遵循:
    {{ "field1": "type", "field2": "type" }}
 4. 忽略空值和异常值，根据大多数有效样本进行判断
 5. first_name 和 last_name 不是全名

        
候选类型列表: {json.dumps(candidate_types, ensure_ascii=False)}

示例json数据:
{sample_data}
                """.strip()

    def analyze_data_type(self, sample_data, candidate_types=None, temperature=0.3, max_tokens=2048):
        """执行数据类型分析"""
        if candidate_types is None:
            candidate_types = ["phone_number", 'address', "email", "date", "id", "username", "int", "city", 'ip', 'unknown']

        prompt = self._build_prompt(sample_data, candidate_types)

        data = {
            "model": self.model,
            "prompt": prompt,
            "stream": False,
            "options": {
                "seed": 42,
                "temperature": temperature,
                "num_predict": max_tokens
            }
        }

        try:
            response = self.session.post(
                f"{self.base_url}/api/generate",
                json=data,
                timeout=240  # 添加超时控制
            )
            response.raise_for_status()
            return response.json().get("response", "No response")
        except Exception as e:
            print('{},call client error'.format(e))
            return 'error'


    def get_data_base_relationship(self, data, temperature=0.7, max_tokens=2048):

        data = {
            "model": self.model,
            "prompt": data,
            "stream": False,
            "options": {
                'seed': 42,
                "temperature": temperature,
                "num_predict": max_tokens
            }
        }

        try:
            response = self.session.post(
                f"{self.base_url}/api/generate",
                json=data,
                timeout=300  # 添加超时控制
            )
            response.raise_for_status()
            return response.json().get("response","No response")
        except Exception as e:
            print('{},call client error'.format(e))
            return 'error'


