use crate::state::{AgentWrapper, AppState};
use anyhow::Result;
use axum::{
    Router,
    extract::{Json, Path, State},
    http::StatusCode,
    response::{IntoResponse, Response},
    routing::{get, post},
};
use bytes::Bytes;
use futures::{Stream, stream::StreamExt};
use rig::{
    completion::CompletionError,
    embeddings::{EmbedError, EmbeddingError, EmbeddingsBuilder},
    providers::anthropic::ANTHROPIC_VERSION_LATEST,
    providers::{anthropic, deepseek, gemini, moonshot, openai, xai},
    streaming::{StreamingChoice, StreamingPrompt},
    vector_store::{VectorStoreIndex, in_memory_store::InMemoryVectorStore},
};
use rig_fastembed::FastembedModel;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use std::{
    convert::Infallible,
    pin::Pin,
    task::{Context, Poll},
};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;

// 自定义错误类型
struct AppError(anyhow::Error);
impl IntoResponse for AppError {
    fn into_response(self) -> Response {
        (
            StatusCode::INTERNAL_SERVER_ERROR,
            format!("Error: {}", self.0),
        )
            .into_response()
    }
}
impl From<anyhow::Error> for AppError {
    fn from(err: anyhow::Error) -> Self {
        AppError(err)
    }
}

impl From<CompletionError> for AppError {
    fn from(err: CompletionError) -> Self {
        AppError(anyhow::anyhow!("{}", err))
    }
}

impl From<EmbeddingError> for AppError {
    fn from(err: EmbeddingError) -> Self {
        AppError(anyhow::anyhow!("{}", err))
    }
}

impl From<EmbedError> for AppError {
    fn from(err: EmbedError) -> Self {
        AppError(anyhow::anyhow!("{}", err))
    }
}

// 创建代理的请求和响应
#[derive(Deserialize)]
struct CreateAgentPayload {
    provider: String,
    model: String,
    api_key: String,
    preamble: String,
    base_url: Option<String>,
    temperature: Option<f64>,
    max_tokens: Option<u64>,
    anthropic_betas: Option<Vec<String>>,
    anthropic_version: Option<String>,
    agent_id: Option<String>,
}

#[derive(Serialize)]
struct CreateAgentResponse {
    agent_id: String,
}

// 流式聊天的请求
#[derive(Deserialize)]
struct StreamChatPayload {
    prompt: String,
}

// 嵌入的请求和响应
#[derive(Deserialize)]
struct EmbedPayload {
    provider: String,
    text: String,
}

#[derive(Serialize)]
struct EmbedResponse {
    embedding: Vec<f32>,
}

// 图像的请求和响应（假设功能）
#[derive(Deserialize)]
struct ImagePayload {
    provider: String,
    prompt: String,
}

#[derive(Serialize)]
struct ImageResponse {
    image_url: String, // 假设返回 URL
}

/// Agent信息返回结构
#[derive(Serialize)]
struct AgentInfo {
    id: String,
    provider: String,
    model: String,
}

/// 获取所有agent列表
async fn list_agents(State(state): State<Arc<AppState>>) -> Json<Vec<AgentInfo>> {
    let agents = state.agents.lock().await;

    let agent_list = agents
        .iter()
        .map(|(id, wrapper)| {
            let provider_and_model = match wrapper {
                AgentWrapper::OpenAI(_) => ("openai", "gpt"),
                AgentWrapper::Anthropic(_) => ("anthropic", "claude"),
                AgentWrapper::Xai(_) => ("xai", "xai"),
                AgentWrapper::DeepSeek(_) => ("deepseek", "deepseek"),
                AgentWrapper::Gemini(_) => ("gemini", "gemini"),
                AgentWrapper::Moonshot(_) => ("moonshot", "moonshot"),
            };

            AgentInfo {
                id: id.clone(),
                provider: provider_and_model.0.to_string(),
                model: provider_and_model.1.to_string(),
            }
        })
        .collect();

    Json(agent_list)
}

// Custom SSE response type for streaming messages
pub struct SseResponse {
    rx: ReceiverStream<String>,
}

impl SseResponse {
    fn new(rx: ReceiverStream<String>) -> Self {
        Self { rx }
    }
}

impl Stream for SseResponse {
    type Item = Result<Bytes, Infallible>;

    fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
        Pin::new(&mut self.rx)
            .poll_next(cx)
            .map(|opt| opt.map(|s| Ok(Bytes::from(s))))
    }
}

impl IntoResponse for SseResponse {
    fn into_response(self) -> axum::response::Response {
        let stream = self;
        let body = axum::body::Body::from_stream(stream);

        http::Response::builder()
            .header("Content-Type", "text/event-stream")
            .header("Cache-Control", "no-cache")
            .header("Connection", "keep-alive")
            .body(body)
            .unwrap()
    }
}

// Message event types for SSE streaming
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
enum MessageEvent {
    Message {
        message: String,
    },
    ToolCall {
        name: String,
        id: String,
        params: serde_json::Value,
    },
    Error {
        error: String,
    },
    Finish {
        reason: String,
    },
}

// Stream a message as an SSE event
async fn stream_event(
    event: MessageEvent,
    tx: &mpsc::Sender<String>,
) -> Result<(), mpsc::error::SendError<String>> {
    let json = serde_json::to_string(&event).unwrap_or_else(|e| {
        format!(
            r#"{{"type":"Error","error":"Failed to serialize event: {}"}}"#,
            e
        )
    });
    tx.send(format!("data: {}\n\n", json)).await
}

// 创建代理
async fn create_agent(
    State(state): State<Arc<AppState>>,
    Json(payload): Json<CreateAgentPayload>,
) -> Result<Response, AppError> {
    let fastembed_client = state.fastembed.clone();
    let embedding_model = fastembed_client.embedding_model(&FastembedModel::AllMiniLML6V2Q);
    let tool_set = state.mcp_manager.get_tool_set().await?;

    let embeddings = EmbeddingsBuilder::new(embedding_model.clone())
        .documents(tool_set.schemas()?)?
        .build()
        .await?;

    let vector_store = InMemoryVectorStore::from_documents_with_id_f(embeddings, |f| {
        tracing::info!("store tool {}", f.name);
        f.name.clone()
    });

    // Create vector store index
    let index = vector_store.index(embedding_model);

    let agent_wrapper = match payload.provider.as_str() {
        "openai" => {
            let mut builder = openai::Client::from_url(
                &payload.api_key,
                &payload
                    .base_url
                    .unwrap_or_else(|| "https://api.openai.com/v1".to_string()),
            )
            .agent(&payload.model)
            .dynamic_tools(4, index, tool_set)
            .preamble(&payload.preamble);

            if let Some(temp) = payload.temperature {
                builder = builder.temperature(temp);
            }

            if let Some(max) = payload.max_tokens {
                builder = builder.max_tokens(max);
            }

            AgentWrapper::OpenAI(builder.build())
        }
        "anthropic" => {
            let betas: Option<Vec<&str>> = payload
                .anthropic_betas
                .as_ref()
                .map(|v| v.iter().map(|s| s.as_str()).collect());

            let mut builder = anthropic::Client::new(
                &payload.api_key,
                "https://api.anthropic.com",
                betas,
                payload
                    .anthropic_version
                    .as_deref()
                    .unwrap_or(ANTHROPIC_VERSION_LATEST),
            )
            .agent(&payload.model)
            .preamble(&payload.preamble);

            if let Some(temp) = payload.temperature {
                builder = builder.temperature(temp);
            }

            if let Some(max) = payload.max_tokens {
                builder = builder.max_tokens(max);
            }

            AgentWrapper::Anthropic(builder.build())
        }
        "gemini" => {
            let mut builder = gemini::Client::from_url(
                &payload.api_key,
                &payload
                    .base_url
                    .unwrap_or_else(|| "https://generativelanguage.googleapis.com".to_string()),
            )
            .agent(&payload.model)
            .preamble(&payload.preamble);
            if let Some(temp) = payload.temperature {
                builder = builder.temperature(temp);
            }

            if let Some(max) = payload.max_tokens {
                builder = builder.max_tokens(max);
            }

            AgentWrapper::Gemini(builder.build())
        }
        "xai" => AgentWrapper::Xai(
            xai::Client::new(&payload.api_key)
                .agent(&payload.model)
                .preamble(&payload.preamble)
                .build(),
        ),
        "deepseek" => AgentWrapper::DeepSeek(
            deepseek::Client::new(&payload.api_key)
                .agent(&payload.model)
                .preamble(&payload.preamble)
                .build(),
        ),
        "moonshot" => AgentWrapper::Moonshot(
            moonshot::Client::new(&payload.api_key)
                .agent(&payload.model)
                .preamble(&payload.preamble)
                .build(),
        ),
        _ => {
            return Ok((
                StatusCode::BAD_REQUEST,
                "Only 'openai' and 'xai' provider is supported currently".to_string(),
            )
                .into_response());
        }
    };

    // 创建更短的UUID
    let agent_id = match payload.agent_id {
        Some(id) => id,
        None => {
            let uuid = uuid::Uuid::new_v4();
            uuid.to_string()
                .split('-')
                .next()
                .unwrap_or_default()
                .to_string()
        }
    };

    state
        .agents
        .lock()
        .await
        .insert(agent_id.clone(), agent_wrapper);

    Ok((StatusCode::CREATED, Json(CreateAgentResponse { agent_id })).into_response())
}

// 流式聊天
async fn stream_chat(
    State(state): State<Arc<AppState>>,
    Path(agent_id): Path<String>,
    Json(payload): Json<StreamChatPayload>,
) -> Result<SseResponse, AppError> {
    let agents = state.agents.lock().await;

    let agent_wrapper = agents
        .get(&agent_id)
        .ok_or_else(|| anyhow::anyhow!("Agent not found"))?;

    let (tx, rx) = mpsc::channel(100);
    let stream = ReceiverStream::new(rx);

    let mut response = match agent_wrapper {
        AgentWrapper::OpenAI(agent) => agent.stream_prompt(&payload.prompt).await?,
        AgentWrapper::Anthropic(agent) => agent.stream_prompt(&payload.prompt).await?,
        AgentWrapper::Gemini(agent) => agent.stream_prompt(&payload.prompt).await?,
        AgentWrapper::Xai(agent) => agent.stream_prompt(&payload.prompt).await?,
        AgentWrapper::DeepSeek(agent) => agent.stream_prompt(&payload.prompt).await?,
        AgentWrapper::Moonshot(agent) => agent.stream_prompt(&payload.prompt).await?,
    };

    while let Some(chunk) = response.next().await {
        match chunk {
            Ok(StreamingChoice::Message(text)) => {
                tracing::info!("Message: {}", text);
                let _ = stream_event(MessageEvent::Message { message: text }, &tx).await;
            }
            Ok(StreamingChoice::ToolCall(name, _, params)) => {
                let res = match agent_wrapper {
                    AgentWrapper::OpenAI(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                    AgentWrapper::Anthropic(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                    AgentWrapper::Gemini(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                    AgentWrapper::Xai(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                    AgentWrapper::DeepSeek(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                    AgentWrapper::Moonshot(agent) => agent
                        .tools
                        .call(&name, params.to_string())
                        .await
                        .map_err(|e| AppError(anyhow::anyhow!("{}", e)))?,
                };
                tracing::info!("Tool Result: {}", res);
                let _ = stream_event(
                    MessageEvent::ToolCall {
                        name: name.to_string(),
                        id: "tool-call".to_string(),
                        params: serde_json::from_str(&params.to_string()).unwrap_or_default(),
                    },
                    &tx,
                )
                .await;
            }
            Err(e) => {
                tracing::info!("Error: {}", e.to_string());
                let _ = stream_event(
                    MessageEvent::Error {
                        error: e.to_string(),
                    },
                    &tx,
                )
                .await;
                break;
            }
        }
    }

    // Send finish event
    let _ = stream_event(
        MessageEvent::Finish {
            reason: "stop".to_string(),
        },
        &tx,
    )
    .await;

    Ok(SseResponse::new(stream))
}

// 生成嵌入
async fn embed(
    State(state): State<Arc<AppState>>,
    Path(agent_id): Path<String>,
    Json(payload): Json<EmbedPayload>,
) -> Result<Response, AppError> {
    let agents = state.agents.lock().await;

    let agent_wrapper = agents
        .get(&agent_id)
        .ok_or_else(|| anyhow::anyhow!("Agent not found"))?;

    // Mock implementation for demonstration purposes
    // In a real application, use the rig library's embedding functionality correctly
    let embedding = vec![0.1, 0.2, 0.3, 0.4, 0.5]; // Mock embedding

    Ok((StatusCode::OK, Json(EmbedResponse { embedding })).into_response())
}

// 处理图像（假设功能）
async fn image(
    State(state): State<Arc<AppState>>,
    Path(agent_id): Path<String>,
    Json(payload): Json<ImagePayload>,
) -> Result<Response, AppError> {
    let agents = state.agents.lock().await;

    let agent_wrapper = agents
        .get(&agent_id)
        .ok_or_else(|| anyhow::anyhow!("Agent not found"))?;

    let image_url = "https://example.com/generated_image.jpg".to_string(); // 占位

    Ok((StatusCode::OK, Json(ImageResponse { image_url })).into_response())
}

pub fn routes(state: AppState) -> Router {
    Router::new()
        .route("/agent", post(create_agent))
        .route("/agent", get(list_agents))
        .route("/agent/{id}/stream", post(stream_chat))
        .route("/agent/{id}/embed", post(embed))
        .route("/agent/{id}/image", post(image))
        .with_state(Arc::new(state))
}
