use crate::common::error::SearchError;
use crate::common::register::SearchSourceRegistry;
use crate::common::search::{
    FailedRequest, MultiSourceQueryResponse, QueryHits, QuerySource, SearchQuery,
};
use crate::common::traits::SearchSource;
use crate::server::servers::logout_coco_server;
use crate::server::servers::mark_server_as_offline;
use function_name::named;
use futures::StreamExt;
use futures::stream::FuturesUnordered;
use reqwest::StatusCode;
use std::cmp::Reverse;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use tauri::{AppHandle, Manager};
use tokio::time::{Duration, timeout};

#[named]
#[tauri::command]
pub async fn query_coco_fusion(
    tauri_app_handle: AppHandle,
    from: u64,
    size: u64,
    query_strings: HashMap<String, String>,
    query_timeout: u64,
) -> Result<MultiSourceQueryResponse, SearchError> {
    let opt_query_source_id = query_strings.get("querysource");
    let search_sources = tauri_app_handle.state::<SearchSourceRegistry>();
    let query_source_list = search_sources.get_sources().await;
    let timeout_duration = Duration::from_millis(query_timeout);
    let search_query = SearchQuery::new(from, size, query_strings.clone());

    log::debug!(
        "{}() invoked with parameters: from: [{}], size: [{}], query_strings: [{:?}], timeout: [{:?}]",
        function_name!(),
        from,
        size,
        query_strings,
        timeout_duration
    );

    // Dispatch to different `query_coco_fusion_xxx()` functions.
    if let Some(query_source_id) = opt_query_source_id {
        query_coco_fusion_single_query_source(
            tauri_app_handle,
            query_source_list,
            query_source_id.clone(),
            timeout_duration,
            search_query,
        )
        .await
    } else {
        query_coco_fusion_multi_query_sources(
            tauri_app_handle,
            query_source_list,
            timeout_duration,
            search_query,
        )
        .await
    }
}

/// Query only 1 query source.
///
/// The logic here is much simpler than `query_coco_fusion_multi_query_sources()`
/// as we don't need to re-rank due to fact that this does not involve multiple
/// query sources.
async fn query_coco_fusion_single_query_source(
    tauri_app_handle: AppHandle,
    mut query_source_list: Vec<Arc<dyn SearchSource>>,
    id_of_query_source_to_query: String,
    timeout_duration: Duration,
    search_query: SearchQuery,
) -> Result<MultiSourceQueryResponse, SearchError> {
    // If this query source ID is specified, we only query this query source.
    log::debug!(
        "parameter [querysource={}] specified, will only query this query source",
        id_of_query_source_to_query
    );

    let opt_query_source_trait_object_index = query_source_list
        .iter()
        .position(|query_source| query_source.get_type().id == id_of_query_source_to_query);

    let Some(query_source_trait_object_index) = opt_query_source_trait_object_index else {
        // It is possible (an edge case) that the frontend invokes `query_coco_fusion()`
        // with a querysource that does not exist in the source list:
        //
        // 1. Search applications
        // 2. Navigate to the application sub page
        // 3. Disable the application extension in settings, which removes this
        //    query source from the list
        // 4. hide the search window
        // 5. Re-open the search window, you will still be in the sub page, type to search
        //    something
        //
        // The application query source is not in the source list because the extension
        // was disabled and thus removed from the query sources, but the last
        // search is indeed invoked with parameter `querysource=application`.
        return Ok(MultiSourceQueryResponse {
            failed: Vec::new(),
            hits: Vec::new(),
            total_hits: 0,
        });
    };

    let query_source_trait_object = query_source_list.remove(query_source_trait_object_index);
    let query_source = query_source_trait_object.get_type();
    let search_fut = query_source_trait_object.search(tauri_app_handle.clone(), search_query);
    let timeout_result = timeout(timeout_duration, search_fut).await;

    let mut failed_requests: Vec<FailedRequest> = Vec::new();
    let mut hits = Vec::new();
    let mut total_hits = 0;

    match timeout_result {
        // Ignore the `_timeout` variable as it won't provide any useful debugging information.
        Err(_timeout) => {
            log::warn!(
                "searching query source [{}] timed out, skip this request",
                query_source.id
            );
        }
        Ok(query_result) => match query_result {
            Ok(response) => {
                total_hits = response.total_hits;

                for (document, score) in response.hits {
                    log::debug!(
                        "document from query source [{}]: ID [{}], title [{:?}], score [{}]",
                        response.source.id,
                        document.id,
                        document.title,
                        score
                    );

                    let query_hit = QueryHits {
                        source: Some(response.source.clone()),
                        score,
                        document,
                    };

                    hits.push(query_hit);
                }
            }
            Err(search_error) => {
                query_coco_fusion_handle_failed_request(
                    tauri_app_handle.clone(),
                    &mut failed_requests,
                    query_source,
                    search_error,
                )
                .await;
            }
        },
    }

    Ok(MultiSourceQueryResponse {
        failed: failed_requests,
        hits,
        total_hits,
    })
}

async fn query_coco_fusion_multi_query_sources(
    tauri_app_handle: AppHandle,
    query_source_trait_object_list: Vec<Arc<dyn SearchSource>>,
    timeout_duration: Duration,
    search_query: SearchQuery,
) -> Result<MultiSourceQueryResponse, SearchError> {
    log::debug!(
        "will query query sources {:?}",
        query_source_trait_object_list
            .iter()
            .map(|search_source| search_source.get_type().id.clone())
            .collect::<Vec<String>>()
    );

    let query_keyword = search_query
        .query_strings
        .get("query")
        .unwrap_or(&"".to_string())
        .clone();
    let size = search_query.size;

    let mut futures = FuturesUnordered::new();

    let query_source_list_len = query_source_trait_object_list.len();
    for query_source_trait_object in query_source_trait_object_list {
        let query_source = query_source_trait_object.get_type().clone();
        let tauri_app_handle_clone = tauri_app_handle.clone();
        let search_query_clone = search_query.clone();

        futures.push(async move {
            (
                // Store `query_source` as part of future for debugging purposes.
                query_source,
                timeout(timeout_duration, async {
                    query_source_trait_object
                        .search(tauri_app_handle_clone, search_query_clone)
                        .await
                })
                .await,
            )
        });
    }

    let mut total_hits = 0;
    let mut need_rerank = true; //TODO set default to false when boost supported in Pizza
    let mut failed_requests = Vec::new();
    let mut all_hits: Vec<(String, QueryHits, f64)> = Vec::new();
    let mut hits_per_source: HashMap<String, Vec<(QueryHits, f64)>> = HashMap::new();

    if query_source_list_len > 1 {
        need_rerank = true; // If we have more than one source, we need to rerank the hits
    }

    while let Some((query_source, timeout_result)) = futures.next().await {
        match timeout_result {
            // Ignore the `_timeout` variable as it won't provide any useful debugging information.
            Err(_timeout) => {
                log::warn!(
                    "searching query source [{}] timed out, skip this request",
                    query_source.id
                );
            }
            Ok(query_result) => match query_result {
                Ok(response) => {
                    total_hits += response.total_hits;
                    let source_id = response.source.id.clone();

                    for (document, score) in response.hits {
                        log::debug!(
                            "document from query source [{}]: ID [{}], title [{:?}], score [{}]",
                            response.source.id,
                            document.id,
                            document.title,
                            score
                        );

                        let query_hit = QueryHits {
                            source: Some(response.source.clone()),
                            score,
                            document,
                        };

                        all_hits.push((source_id.clone(), query_hit.clone(), score));

                        hits_per_source
                            .entry(source_id.clone())
                            .or_insert_with(Vec::new)
                            .push((query_hit, score));
                    }
                }
                Err(search_error) => {
                    query_coco_fusion_handle_failed_request(
                        tauri_app_handle.clone(),
                        &mut failed_requests,
                        query_source,
                        search_error,
                    )
                    .await;
                }
            },
        }
    }

    // Sort hits within each source by score (descending)
    for hits in hits_per_source.values_mut() {
        hits.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Greater));
    }

    let total_sources = hits_per_source.len();
    let max_hits_per_source = if total_sources > 0 {
        size as usize / total_sources
    } else {
        size as usize
    };

    let mut final_hits = Vec::new();
    let mut seen_docs = HashSet::new(); // To track documents we've already added

    // Distribute hits fairly across sources
    for (_source_id, hits) in &mut hits_per_source {
        let take_count = hits.len().min(max_hits_per_source);
        for (doc, score) in hits.drain(0..take_count) {
            if !seen_docs.contains(&doc.document.id) {
                seen_docs.insert(doc.document.id.clone());
                log::debug!(
                    "collect doc: {}, {:?}, {}",
                    doc.document.id,
                    doc.document.title,
                    score
                );
                final_hits.push(doc);
            }
        }
    }

    log::debug!("final hits: {:?}", final_hits.len());

    let mut unique_sources = HashSet::new();
    for hit in &final_hits {
        if let Some(source) = &hit.source {
            if source.id != crate::extension::built_in::calculator::DATA_SOURCE_ID {
                unique_sources.insert(&source.id);
            }
        }
    }

    log::debug!(
        "Multiple sources found: {:?}, no rerank needed",
        unique_sources
    );

    if unique_sources.len() < 1 {
        need_rerank = false; // If we have hits from multiple sources, we don't need to rerank
    }

    if need_rerank && final_hits.len() > 1 {
        // Precollect (index, title)
        let titles_to_score: Vec<(usize, &str)> = final_hits
            .iter()
            .enumerate()
            .filter_map(|(idx, hit)| {
                let source = hit.source.as_ref()?;
                let title = hit.document.title.as_deref()?;

                if source.id != crate::extension::built_in::calculator::DATA_SOURCE_ID {
                    Some((idx, title))
                } else {
                    None
                }
            })
            .collect();

        // Score them
        let scored_hits = boosted_levenshtein_rerank(query_keyword.as_str(), titles_to_score);

        // Sort descending by score
        let mut scored_hits = scored_hits;
        scored_hits.sort_by_key(|&(_, score)| Reverse((score * 1000.0) as u64));

        // Apply new scores to final_hits
        for (idx, score) in scored_hits.into_iter().take(size as usize) {
            final_hits[idx].score = score;
        }
    } else if final_hits.len() < size as usize {
        // If we still need more hits, take the highest-scoring remaining ones

        let remaining_needed = size as usize - final_hits.len();

        // Sort all hits by score descending, removing duplicates by document ID
        all_hits.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal));

        let extra_hits = all_hits
            .into_iter()
            .filter(|(source_id, _, _)| hits_per_source.contains_key(source_id)) // Only take from known sources
            .filter_map(|(_, doc, _)| {
                if !seen_docs.contains(&doc.document.id) {
                    seen_docs.insert(doc.document.id.clone());
                    Some(doc)
                } else {
                    None
                }
            })
            .take(remaining_needed)
            .collect::<Vec<_>>();

        final_hits.extend(extra_hits);
    }

    // **Sort final hits by score descending**
    final_hits.sort_by(|a, b| {
        b.score
            .partial_cmp(&a.score)
            .unwrap_or(std::cmp::Ordering::Equal)
    });

    if final_hits.len() < 5 {
        //TODO: Add a recommendation system to suggest more sources
        log::info!(
            "Less than 5 hits found, consider using recommendation to find more suggestions."
        );
        //local: recent history, local extensions
        //remote: ai agents, quick links, other tasks, managed by server
    }

    Ok(MultiSourceQueryResponse {
        failed: failed_requests,
        hits: final_hits,
        total_hits,
    })
}

fn boosted_levenshtein_rerank(query: &str, titles: Vec<(usize, &str)>) -> Vec<(usize, f64)> {
    use strsim::levenshtein;

    let query_lower = query.to_lowercase();

    titles
        .into_iter()
        .map(|(idx, title)| {
            let mut score = 0.0;

            if title.contains(query) {
                score += 0.4;
            } else if title.to_lowercase().contains(&query_lower) {
                score += 0.2;
            }

            let dist = levenshtein(&query_lower, &title.to_lowercase());
            let max_len = query_lower.len().max(title.len());
            if max_len > 0 {
                score += (1.0 - (dist as f64 / max_len as f64)) as f32;
            }

            (idx, score.min(1.0) as f64)
        })
        .collect()
}

/// Helper function to handle a failed request.
///
/// Extracted as a function because `query_coco_fusion_single_query_source()` and
/// `query_coco_fusion_multi_query_sources()` share the same error handling logic.
async fn query_coco_fusion_handle_failed_request(
    tauri_app_handle: AppHandle,
    failed_requests: &mut Vec<FailedRequest>,
    query_source: QuerySource,
    search_error: SearchError,
) {
    log::error!(
        "searching query source [{}] failed, error [{}]",
        query_source.id,
        search_error
    );

    let mut status_code_num: u16 = 0;

    if let SearchError::HttpError {
        status_code: opt_status_code,
        msg: _,
    } = search_error
    {
        if let Some(status_code) = opt_status_code {
            status_code_num = status_code.as_u16();
            if status_code != StatusCode::OK {
                if status_code == StatusCode::UNAUTHORIZED {
                    // This Coco server is unavailable. In addition to marking it as
                    // unavailable, we need to log out because the status code is 401.
                    logout_coco_server(tauri_app_handle.clone(), query_source.id.to_string()).await.unwrap_or_else(|e| {
                        panic!(
                          "the search request to Coco server [id {}, name {}] failed with status code {}, the login token is invalid, we are trying to log out, but failed with error [{}]", 
                          query_source.id, query_source.name, StatusCode::UNAUTHORIZED, e
                        );
                    })
                } else {
                    // This Coco server is unavailable
                    mark_server_as_offline(tauri_app_handle.clone(), &query_source.id).await;
                }
            }
        }
    }

    failed_requests.push(FailedRequest {
        source: query_source,
        status: status_code_num,
        error: Some(search_error.to_string()),
        reason: None,
    });
}
