import json
from typing import Union
import logging
from xml.etree.ElementTree import indent

import aiohttp
from urllib.parse import urlparse
from fastapi import (
    HTTPException,
    Request,
    APIRouter,
)
from pydantic import BaseModel
from typing_extensions import Optional

from fastapi.responses import StreamingResponse
from starlette.background import BackgroundTask

import llm_service.tools.rule
from llm_service.env import (
    AIOHTTP_CLIENT_TIMEOUT,
    AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST,
    SRC_LOG_LEVELS
)
from llm_service.tools.rule import get_function_by_name

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])


##########################################
#
# Utility functions
#
##########################################


async def send_get_request(url, key=None):
    timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
    try:
        async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
            async with session.get(
                    url, headers={**({"Authorization": f"Bearer {key}"} if key else {})}
            ) as response:
                return await response.json()
    except Exception as e:
        # Handle connection error here
        log.error(f"Connection error: {e}")
        return None


async def cleanup_response(
        response: Optional[aiohttp.ClientResponse],
        session: Optional[aiohttp.ClientSession],
):
    if response:
        response.close()
    if session:
        await session.close()


async def send_post_request(
        url: str,
        payload: Union[str, bytes],
        stream: bool = True,
        key: Optional[str] = None,
        content_type: Optional[str] = None,
):
    r = None
    try:
        session = aiohttp.ClientSession(
            trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT)
        )

        r = await session.post(
            url,
            data=payload,
            headers={
                "Content-Type": "application/json",
                **({"Authorization": f"Bearer {key}"} if key else {}),
            },
        )
        r.raise_for_status()

        if stream:
            response_headers = dict(r.headers)

            if content_type:
                response_headers["Content-Type"] = content_type

            return StreamingResponse(
                r.content,
                status_code=r.status,
                headers=response_headers,
                background=BackgroundTask(
                    cleanup_response, response=r, session=session
                ),
            )
        else:
            res = await r.json()
            await cleanup_response(r, session)
            return res

    except Exception as e:
        detail = None

        if r is not None:
            try:
                res = await r.json()
                if "error" in res:
                    detail = f"Ollama: {res.get('error', 'Unknown error')}"
            except Exception:
                detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status if r else 500,
            detail=detail if detail else "Open WebUI: Server Connection Error",
        )


def get_api_key(idx, url, configs):
    parsed_url = urlparse(url)
    base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
    return configs.get(str(idx), configs.get(base_url, {})).get(
        "key", None
    )  # Legacy support


router = APIRouter()


class ChatMessage(BaseModel):
    role: str
    content: str
    tool_calls: Optional[list[dict]] = None
    images: Optional[list[str]] = None


class GenerateChatCompletionForm(BaseModel):
    model: str
    messages: list[ChatMessage]
    format: Optional[Union[dict, str]] = None
    options: Optional[dict] = None
    template: Optional[str] = None
    stream: Optional[bool] = True
    keep_alive: Optional[Union[int, str]] = None
    tools: Optional[list[dict]] = None


class OllamaConfigForm(BaseModel):
    ENABLE_OLLAMA_API: Optional[bool] = None
    OLLAMA_BASE_URLS: list[str]
    OLLAMA_API_CONFIGS: dict


tools = [llm_service.tools.rule.search_knowledge_base_schema,
         llm_service.tools.rule.get_weather_schema,
         llm_service.tools.rule.send_email_schema,
         llm_service.tools.rule.create_rule_schema
         ]


@router.post("/api/chat")
async def generate_chat_completion(request: Request, form_data: dict):
    try:
        form_data = GenerateChatCompletionForm(**form_data)
    except Exception as e:
        log.exception(e)
        raise HTTPException(status_code=400, detail=str(e))

    url = "http://localhost:11434"
    payload = {**form_data.model_dump(exclude_none=True),
               "tools": [{**{"type": "function"}, "function": tool} for tool in tools]}

    print("send to ollama" + "*" * 40)
    print(json.dumps(payload, indent=4, ensure_ascii=False))

    result = await send_post_request(url=f"{url}/api/chat", payload=json.dumps(payload), stream=form_data.stream,
                                     content_type="application/x-ndjson", )

    payload["messages"].append(result["message"])

    # 如果不需要调用工具，则直接返回最终答案
    if result["message"].get("tool_calls") is None:
        result["history"] = payload["messages"]
        print("result from ollama" + "$" * 40)
        print(json.dumps(result, indent=4, ensure_ascii=False))
        return result

    # 如果需要调用工具，则进行模型的多轮调用，直到模型判断无需调用工具
    while result["message"].get("tool_calls") is not None:
        function_result = []
        for item in result["message"]["tool_calls"]:  # 遍历每个工具调用
            print("tool_calls" + "@" * 40)
            print(json.dumps(item, indent=4, ensure_ascii=False))
            try:
                fn_res = get_function_by_name(item["function"]["name"])(**item["function"]["arguments"])  # 调用工具函数
                print(fn_res)  # 打印函数返回值
                function_result.append(
                    {"role": "tool", "content": fn_res, "name": item["function"]["name"]})  # 将工具函数的返回值添加到消息列表中
            except Exception as e:
                print(e)
                continue

        print("function_result_message" + "*" * 40)
        print(function_result)
        if function_result:
            print("function_payload send ollama" + "*" * 40)
            print(json.dumps(payload, indent=4, ensure_ascii=False))
            payload["messages"].extend(function_result)
            result = await send_post_request(url=f"{url}/api/chat", payload=json.dumps(payload),
                                             stream=form_data.stream,
                                             content_type="application/x-ndjson", )
            # 添加函数调用结果提问后的结果
            payload["messages"].append(result["message"])
            print("function_result from ollama" + "&" * 40)
            print(json.dumps(result, indent=4, ensure_ascii=False))
        else: # 如果函数调用结果为空，则结束循环
            break
    result["history"] = payload["messages"]
    return result
