from fastapi import FastAPI, Request, HTTPException, Query
from fastapi.responses import StreamingResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from openai import OpenAI
import os
import json
import uuid
import urllib.parse
from datetime import datetime
import asyncio
import sqlite3
from mcp_api import router as mcp_router, get_mcp_server_details  # Import MCP router and helper
from fastmcp import Client
from fastmcp.client.transports import SSETransport
from dotenv import load_dotenv

if not os.path.exists('.env'):
    raise ValueError('环境变量文件.env不存在，请检查')

load_dotenv()

# 从env中获取配置
API_KEY = os.getenv('API_KEY')
BASE_URL = os.getenv('BASE_URL')
MODEL_NAME = os.getenv('MODEL_NAME')
# 博查搜索
BOCHAAI_SEARCH_API_KEY = os.getenv("BOCHAAI_SEARCH_API_KEY")

if not API_KEY or not BASE_URL or not MODEL_NAME:
    raise ValueError('env文件里配置错误，请检查环境变量 .env文件')

app = FastAPI()

app.include_router(mcp_router)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

app.mount("/static", StaticFiles(directory="static"), name="static")

ai_client = OpenAI(
    api_key = API_KEY,
    base_url = BASE_URL
)

def init_db():
    conn = sqlite3.connect('chat_history.db')
    cursor = conn.cursor()
    # Create chat sessions table
    cursor.execute('''
    CREATE TABLE IF NOT EXISTS chat_sessions (
        id TEXT PRIMARY KEY,
        summary TEXT,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
        updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
    )
    ''')
    
    # Create messages table
    cursor.execute('''
    CREATE TABLE IF NOT EXISTS messages (
        id INTEGER PRIMARY KEY AUTOINCREMENT,
        session_id TEXT,
        role TEXT,
        content TEXT,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
        FOREIGN KEY (session_id) REFERENCES chat_sessions (id)
    )
    ''')
    
    # Create MCP servers table
    cursor.execute('''
    CREATE TABLE IF NOT EXISTS mcp_servers (
        id TEXT PRIMARY KEY,
        name TEXT NOT NULL,
        url TEXT NOT NULL,
        description TEXT,
        auth_type TEXT,
        auth_value TEXT,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
        updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
    )
    ''')
    
    # Create MCP tools table
    cursor.execute('''
    CREATE TABLE IF NOT EXISTS mcp_tools (
        id TEXT PRIMARY KEY,
        server_id TEXT,
        name TEXT NOT NULL,
        description TEXT,
        input_schema TEXT,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
        FOREIGN KEY (server_id) REFERENCES mcp_servers(id)
    )
    ''')
    
    conn.commit()
    conn.close()
    print("数据库初始化完成")

async def perform_web_search(query: str):
    try:
        import requests
        headers = {
            'Content-Type': 'application/json',
            'Authorization': f'Bear {BOCHAAI_SEARCH_API_KEY}'
        }

        payload = json.dumps({
            "query": query,
            "freshness": "noLimit",
            "summary": True,
            "count": 10
        })
        # 使用搜索API, 参考文档 https://bocha-ai.feishu.cn/wiki/RXEOw02rFiwzGSkd9mUcqoeAnNK
        
        url = "https://api.bochaai.com/v1/web-search"
        response = requests.post(url=url, headers=headers, data=payload)

        # check status
        if response.status_code != 200:
            return f"搜索失败，此状态码：{response.status_code}"
        
        try:
            json_data = response.json()
            print(f'bocha_ai search respsonse: {json_data}')
            return str(json_data)
        except json.JSONDecodeError as e:
            return f"搜索结果JSON解析失败: {str(e)}"
    except Exception as e:
        return f'执行网络搜索时出错: {str(e)}'

# Add message to existing session
async def add_message_to_session(session_id: str, query: str, response: str):
    conn = sqlite3.connect('chat_history.db')
    cursor = conn.cursor()
    cursor.execute(
        '''
        INSERT INTO messages (session_id, role, content, created_at)
        VALUES (?, ?, ?, ?)
        ''',
        (session_id, "user", query, datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
    )
    cursor.execute(
        '''
        INSERT INTO messages (session_id, role, content, created_at)
        VALUES (?, ?, ?, ?)
        ''',
        (session_id, "assistant", response, datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
    )
    cursor.execute(
        '''
        UPDATE chat_sessions
        SET updated_at = ?
        WHERE id = ?
        ''',
        (datetime.now().strftime('%Y-%m-%d %H:%M:%S'), session_id)
    )
    conn.commit()
    conn.close()

async def process_stream_request(query: str, session_id: str = None, web_search: bool = False, agent_mode: bool = False):
    print(f"query: {query}")
    print(f"session_id: {session_id}")
    print(f"web_search: {web_search}")
    print(f"agent_mode: {agent_mode}")

    conn = sqlite3.connect('chat_history.db')
    cursor = conn.cursor()
    cursor.execute("SELECT id FROM chat_sessions WHERE id = ?", (session_id,))

    has_session = cursor.fetchone()
    if not has_session:
        session_id = str(uuid.uuid4())
    
    # build context (only web search if enabled)
    context_parts = []
    if web_search:
        web_results = await perform_web_search(query)
        context_parts.append(web_search)
    
    context = "\n".json(context_parts) if context_parts else '无上下文信息'

    # Common response generator function
    async def generate(content_stream=None, initial_content=""):
        full_response = initial_content

        # Handle streaming content if provided
        if content_stream:
            try:
                for chunk in content_stream:
                    if chunk.choices[0].delta.content:
                        content =  chunk.choices[0].delta.content
                        full_response += content
                        yield f"data: {json.dumps({'content': content, 'session_id': session_id})}\n\n"
                        await asyncio.sleep(0.01)
                    if chunk.choices[0].finish_reason is not None:
                        yield f"data: {json.dumps({'content': content, 'session_id': session_id, 'done': True})}\n\n"
                        break
            except Exception as e:
                yield f"data: {json.dumps({'content': f'错误：GLM API请求失败 - {str(e)}', 'session_id': session_id, 'done': True})}\n\n"
                return
        else:
            # For direct response (non-streaming)
            yield f"data: {json.dumps({'content': full_response, 'session_id': session_id})}\n\n"
            yield f"data: {json.dumps({'content': '', 'session_id': session_id, 'done': True})}\n\n"
        
        if has_session:
            await add_message_to_session(session_id, query, full_response)
        else:
            await create_new_chat_session(session_id, query, full_response)

    if agent_mode:
        # Fetch available tools
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        cursor.execute("SELECT t.*, s.url from mcp_tools t LEFT JOIN mcp_servers s ON t.server_id = s.id ")
        tools = [dict(row) for row in cursor.fetchall()]
        conn.close()

        # Construct tool descriptions for the LLM
        tool_descriptions = "\n".join([
            f"server_url: {tool['url']}\n\ntool_name: {tool['name']}\nDescription: {tool['description']}\ninput_schema: {tool['input_schema']}"
            for tool in tools
        ]) if tools else "无可用工具"

        # Prompt to decide tool invocation
        agent_prompt = f"""
        上下文信息：\n{context}\n
        问题：{query} \n
        可用工具：\n{tool_descriptions}\n
        你是一个智能助手，可以根据用户问题选择合适的工具执行操作。
        如果需要使用工具，请返回以下格式的JSON：
        ```json
        {{
            "server_url: "server_url",
            "tool_name": "tool_name",
            "parameters":{{"param_name1": "param_value1", "param_name2": "param_value2"}}
        }}
        ```
        如果不需要工具，直接返回回答内容的字符串。
        """

        # Call GLM API using openai (non-streaming)
        try:
            response = ai_client.chat.completions.create(
                model= MODEL_NAME,
                messages=[
                    {"role": "system", "content": "你是一个智能助手，擅长选择合适的工具或直接回答问题。"},
                    {"role": "user", "content": agent_prompt}
                ],
                stream=False,
                response_format={"type": "json_object"}
            )
            decision = response.choices[0].message.content.strip()
        except Exception as e:
            raise HTTPException(status_code=500, detail=f"GLM API request failed: {str(e)}")

        try:
            # Check if the response is a tool invocation
            decision_json = json.loads(decision)
            if "server_url" in decision_json and "tool_name" in decision_json:
                server_url = decision_json["server_url"]
                tool_name = decision_json["tool_name"]
                parameters = decision_json["parameters"]

                try:
                    async with Client(SSETransport(server_url)) as client:
                
                except Exception as e:
                    return StreamingResponse(
                        generate(initial_content=f"工具")
                    )
        except json.JSONDecodeError:

