"""
Proxy-safe helpers for generating search topics/keywords and querying Serper.

- Disables environment proxies by default (requests.Session().trust_env = False)
- If a proxy is explicitly indicated by env (HTTP_PROXY/HTTPS_PROXY/ALL_PROXY,
  or USE_PROXY/FORCE_LOCAL_PROXY/LOCAL_PROXY), force it to 127.0.0.1:7897
- Provides Google Gemini fallback for topic/keywords generation

Environment:
- AIHUBMIX_API_KEY: key for aihubmix.com OpenAI-compatible endpoint
- GEMINI_API_KEY: key for Google Generative AI SDK (fallback)
- LOCAL_PROXY (optional): proxy host:port, defaults to 127.0.0.1:7897 when proxy is enabled
- USE_PROXY / FORCE_LOCAL_PROXY / HTTP(S)_PROXY / ALL_PROXY: any of these enables using LOCAL_PROXY
"""

from __future__ import annotations

import json
import os
from typing import List

import requests


# ------------------------------
# Proxy/session helpers
# ------------------------------

def _should_use_local_proxy() -> bool:
    keys = (
        "USE_PROXY", "FORCE_LOCAL_PROXY", "LOCAL_PROXY",
        "HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY",
        "http_proxy", "https_proxy", "all_proxy",
    )
    return any(bool(os.environ.get(k)) for k in keys)


def _local_proxies() -> dict:
    addr = (os.environ.get("LOCAL_PROXY") or "127.0.0.1:7897").strip()
    return {"http": f"http://{addr}", "https": f"http://{addr}"}


def _session() -> requests.Session:
    s = requests.Session()
    s.trust_env = False
    if _should_use_local_proxy():
        s.proxies = _local_proxies()
    return s


# ------------------------------
# Public API
# ------------------------------

def generate_topic_and_keywords(input_sentence: str, api_key: str | None):
    """
    Return a dict with ASCII keys {title: str, keywords: [str,...]}.

    Priority:
    1) aihubmix (OpenAI-compatible) using model gpt-4o-mini
    2) Fallback to Google Gemini (gemini-1.5-flash)
    3) Naive last-resort
    """
    # Improved prompt: produce an angle-driven title and two groups of high-quality queries
    prompt_en = (
        "Analyze the input topic and return pure JSON with fields:\n"
        "- title (string, 6-14 English words, specific and angle-driven; no brand names; avoid 'Discover'/'Explore').\n"
        "- keywords (array of 6 English search queries):\\n"
        "  • The first 3 are broad/practical queries for blogs/media/industry guides.\\n"
        "  • The last 3 are authority-oriented queries for edu/gov/standards/papers.\n\n"
        "All queries must follow:\n"
        "- English-only, ASCII only; 8-14 words each.\n"
        "- Extract domain vocabulary from the topic; include 1-2 specific terms relevant to color/undertone/material/care/constraints/outfit/etc. (as applicable).\n"
        "- Add exclusions to reduce social/UGC/video: -site:pinterest.com -site:youtube.com -site:reddit.com -site:tiktok.com -site:instagram.com -site:medium.com -site:quora.com\n"
        "- For the last 3 authority queries, prefer hints like site:edu OR site:gov OR filetype:pdf (use them where appropriate).\n"
        "- No quotes, no emojis, no pipes/brackets; no trailing punctuation; avoid duplicates.\n\n"
        "Output JSON only, no code fences, no explanations.\n"
        "Input topic: " + str(input_sentence)
    )

    # 1) aihubmix
    if api_key:
        try:
            url = "https://aihubmix.com/v1/chat/completions"
            headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
            data = {
                "model": "gpt-4o-mini",
                "messages": [{"role": "user", "content": prompt_en}],
            }
            resp = _session().post(url, headers=headers, data=json.dumps(data), timeout=45)
            resp.raise_for_status()
            obj = resp.json()
            content = obj.get('choices', [{}])[0].get('message', {}).get('content', '')
            if isinstance(content, str):
                t = content.strip()
                if t.startswith('```json') and t.endswith('```'):
                    t = t[7:-3].strip()
                elif t.startswith('```') and t.endswith('```'):
                    t = t[3:-3].strip()
                try:
                    data = json.loads(t)
                    out = {}
                    if isinstance(data, dict):
                        # title
                        for k, v in data.items():
                            if isinstance(v, str) and not out.get('title'):
                                kl = str(k).lower()
                                if ('title' in kl) or ('theme' in kl):
                                    out['title'] = v.strip()
                        # keywords
                        for k, v in data.items():
                            if isinstance(v, list) and all(isinstance(x, str) for x in v):
                                out['keywords'] = [x.strip() for x in v if isinstance(x, str) and x.strip()]
                                break
                    if out.get('title') and out.get('keywords'):
                        return out
                except Exception:
                    pass
        except Exception as e:
            print(f"API请求出错 (aihubmix): {e}")

    # 2) Google Gemini fallback
    try:
        try:
            import google.generativeai as genai  # type: ignore
        except Exception:
            genai = None  # type: ignore
        if genai is not None and os.environ.get("GEMINI_API_KEY"):
            genai.configure(api_key=os.environ["GEMINI_API_KEY"])
            mdl = genai.GenerativeModel("gemini-1.5-flash")
            resp = mdl.generate_content(prompt_en)
            text = getattr(resp, 'text', '').strip()
            if text.startswith('```') and text.endswith('```'):
                t = text.strip('`').strip()
                if t.lower().startswith('json'):
                    text = t[4:].strip()
                else:
                    text = t
            data = json.loads(text)
            out = {}
            if isinstance(data, dict):
                if isinstance(data.get('title'), str):
                    out['title'] = data['title'].strip()
                if isinstance(data.get('keywords'), list):
                    out['keywords'] = [str(x).strip() for x in data['keywords'] if str(x).strip()]
            if out.get('title') and out.get('keywords'):
                return out
    except Exception as e:
        print(f"Gemini 回退失败: {e}")

    # 3) Naive last resort
    base = (str(input_sentence) or '').strip() or 'General topic'
    title = base[:60]
    kws = [
        f"{base} blog",
        f"{base} practical guide",
        f"{base} case study",
        f"{base} academic research",
        f"{base} authoritative report",
        f"{base} scholarly study",
    ]
    return {"title": title, "keywords": kws}


def search_with_serper(keyword: str, api_key: str) -> List[str]:
    """Query Serper and return a list of links (deduped)."""
    url = "https://google.serper.dev/search"
    payload = json.dumps({"q": keyword})
    headers = {'X-API-KEY': api_key, 'Content-Type': 'application/json'}
    try:
        resp = _session().post(url, headers=headers, data=payload, timeout=45)
        resp.raise_for_status()
        result = resp.json()
        links: List[str] = []
        for block in (result.get('organic', []) or []):
            u = block.get('link')
            if isinstance(u, str):
                links.append(u)
        for block in (result.get('news', []) or []):
            u = block.get('link')
            if isinstance(u, str):
                links.append(u)
        for block in (result.get('peopleAlsoAsk', []) or []):
            u = block.get('link')
            if isinstance(u, str):
                links.append(u)
        # de-duplicate while preserving order
        seen = set()
        uniq: List[str] = []
        for u in links:
            if u not in seen:
                seen.add(u)
                uniq.append(u)
        return uniq
    except Exception as e:
        print(f"Serper 搜索失败: {e}")
        return []
