// Copyright (c) 2023-2025 ParadeDB, Inc.
//
// This file is part of ParadeDB - Postgres for Search and Analytics
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.

pub mod heap_field_filter;
mod more_like_this;
pub mod pdb_query;
pub(crate) mod proximity;
mod range;
mod score;

use heap_field_filter::HeapFieldFilter;

use crate::api::operator::searchqueryinput_typoid;
use crate::api::FieldName;
use crate::api::HashMap;
use crate::postgres::customscan::explain::{format_for_explain, ExplainFormat};
use crate::postgres::utils::convert_pg_date_string;
use crate::query::more_like_this::MoreLikeThisQuery;
use crate::query::pdb_query::pdb;
use crate::query::score::ScoreFilter;
use crate::schema::SearchIndexSchema;
use anyhow::Result;
use core::panic;
use pgrx::{pg_sys, IntoDatum, PgBuiltInOids, PgOid, PostgresType};
use serde::de::{MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use smallvec::{smallvec, SmallVec};
use std::fmt::{Debug, Formatter};
use std::ops::Bound;
use tantivy::query::{
    AllQuery, BooleanQuery, BoostQuery, ConstScoreQuery, DisjunctionMaxQuery, EmptyQuery,
    Query as TantivyQuery, QueryParser, TermSetQuery,
};
use tantivy::DateTime;
use tantivy::{
    query_grammar::Occur,
    schema::{Field, FieldType, OwnedValue, DATE_TIME_PRECISION_INDEXED},
    Searcher, Term,
};
use thiserror::Error;

// F64 can exactly represent integers up to 2^53 (permissive boundary).
// This is used when converting F64 to integer types where 2^53 is losslessly convertible.
pub(crate) const F64_EXACT_INTEGER_MAX: u64 = 1u64 << 53;

// Conservative boundary (2^53-2) for deciding when to create F64 variants from integers.
// Matches types.rs classification logic to ensure consistency between indexing and querying.
// Values > this threshold are stored/queried as I64/U64 only, not F64.
pub(crate) const F64_SAFE_INTEGER_MAX: u64 = (1u64 << 53) - 2;

/// Expands a numeric value into multiple Tantivy term variants to handle
/// JSON numeric type mismatches (e.g., 1 stored as I64 vs 1.0 stored as F64).
/// This enables cross-type matching for equality and IN clause queries.
/// Uses SmallVec to avoid heap allocation for typical cases (up to 3 terms).
pub(crate) fn expand_json_numeric_to_terms(
    tantivy_field: Field,
    value: &OwnedValue,
    path: Option<&str>,
) -> anyhow::Result<SmallVec<[Term; 3]>> {
    let mut terms = SmallVec::new();

    match value {
        OwnedValue::I64(i64_val) => {
            // Create I64 variant (matches JSON integers like 1)
            let i64_term = value_to_json_term(tantivy_field, value, path, true, false)?;
            terms.push(i64_term);

            // Also create F64 variant (matches JSON floats like 1.0)
            let f64_value = OwnedValue::F64(*i64_val as f64);
            let f64_term = value_to_json_term(tantivy_field, &f64_value, path, true, false)?;
            terms.push(f64_term);

            // For non-negative i64 values, also create U64 variant
            // This handles the case where JSON stores a number as U64 but query uses I64
            if *i64_val >= 0 {
                let u64_value = OwnedValue::U64(*i64_val as u64);
                let u64_term = value_to_json_term(tantivy_field, &u64_value, path, true, false)?;
                terms.push(u64_term);
            }
        }
        OwnedValue::U64(u64_val) => {
            // Create U64 variant (matches large JSON integers)
            let u64_term = value_to_json_term(tantivy_field, value, path, true, false)?;
            terms.push(u64_term);

            // Also create F64 variant if it doesn't lose precision
            if *u64_val <= F64_SAFE_INTEGER_MAX {
                let f64_value = OwnedValue::F64(*u64_val as f64);
                let f64_term = value_to_json_term(tantivy_field, &f64_value, path, true, false)?;
                terms.push(f64_term);
            }

            // If value fits in I64, also create I64 variant
            if *u64_val <= i64::MAX as u64 {
                let i64_value = OwnedValue::I64(*u64_val as i64);
                let i64_term = value_to_json_term(tantivy_field, &i64_value, path, true, false)?;
                terms.push(i64_term);
            }
        }
        OwnedValue::F64(f64_val) => {
            // Always create F64 variant
            let f64_term = value_to_json_term(tantivy_field, value, path, true, false)?;
            terms.push(f64_term);

            // For special float values (NaN, Infinity, -Infinity), only create F64 variant
            if !f64_val.is_finite() {
                return Ok(terms);
            }

            // If it's a whole number, create integer variants
            if f64_val.fract() == 0.0 {
                // Create I64 variant if in i64 range and within safe precision
                // Using permissive boundary: 2^53 can be exactly represented in F64
                if *f64_val >= i64::MIN as f64
                    && *f64_val <= i64::MAX as f64
                    && *f64_val >= -(F64_EXACT_INTEGER_MAX as f64)
                    && *f64_val <= F64_EXACT_INTEGER_MAX as f64
                {
                    let i64_value = OwnedValue::I64(*f64_val as i64);
                    let i64_term =
                        value_to_json_term(tantivy_field, &i64_value, path, true, false)?;
                    terms.push(i64_term);
                }

                // Create U64 variant if in u64 range (including values > i64::MAX)
                // Note: We check >= 0 because U64 can't represent negative numbers
                // Using permissive boundary for lossless conversion
                if *f64_val >= 0.0 && *f64_val <= F64_EXACT_INTEGER_MAX as f64 {
                    let u64_value = OwnedValue::U64(*f64_val as u64);
                    let u64_term =
                        value_to_json_term(tantivy_field, &u64_value, path, true, false)?;
                    terms.push(u64_term);
                }
            }
        }
        _ => return Err(anyhow::anyhow!("Expected numeric value")),
    }

    Ok(terms)
}

#[derive(Debug, PostgresType, Deserialize, Serialize, Clone, PartialEq, Default)]
#[serde(rename_all = "snake_case")]
pub enum SearchQueryInput {
    #[default]
    Uninitialized,
    All,
    Boolean {
        #[serde(default)]
        #[serde(skip_serializing_if = "Vec::is_empty")]
        must: Vec<SearchQueryInput>,

        #[serde(default)]
        #[serde(skip_serializing_if = "Vec::is_empty")]
        should: Vec<SearchQueryInput>,

        #[serde(default)]
        #[serde(skip_serializing_if = "Vec::is_empty")]
        must_not: Vec<SearchQueryInput>,
    },
    Boost {
        query: Box<SearchQueryInput>,
        factor: f32,
    },
    ConstScore {
        query: Box<SearchQueryInput>,
        score: f32,
    },
    ScoreFilter {
        bounds: Vec<(Bound<f32>, Bound<f32>)>,
        query: Option<Box<SearchQueryInput>>,
    },
    DisjunctionMax {
        disjuncts: Vec<SearchQueryInput>,
        tie_breaker: Option<f32>,
    },
    Empty,
    MoreLikeThis {
        min_doc_frequency: Option<u64>,
        max_doc_frequency: Option<u64>,
        min_term_frequency: Option<usize>,
        max_query_terms: Option<usize>,
        min_word_length: Option<usize>,
        max_word_length: Option<usize>,
        boost_factor: Option<f32>,
        stopwords: Option<Vec<String>>,
        document: Option<Vec<(String, OwnedValue)>>,
        key_value: Option<OwnedValue>,
        fields: Option<Vec<String>>,
    },
    Parse {
        query_string: String,
        lenient: Option<bool>,
        conjunction_mode: Option<bool>,
    },

    TermSet {
        terms: Vec<TermInput>,
    },
    WithIndex {
        oid: pg_sys::Oid,
        query: Box<SearchQueryInput>,
    },
    PostgresExpression {
        expr: PostgresExpression,
    },
    /// Mixed query with indexed search and heap field filters
    HeapFilter {
        indexed_query: Box<SearchQueryInput>,
        field_filters: Vec<HeapFieldFilter>,
    },

    #[serde(serialize_with = "serialize_fielded_query")]
    #[serde(deserialize_with = "deserialize_fielded_query")]
    #[serde(untagged)]
    FieldedQuery {
        field: FieldName,
        query: pdb::Query,
    },
}

fn serialize_fielded_query<S>(
    field: &FieldName,
    query: &pdb::Query,
    serializer: S,
) -> Result<S::Ok, S::Error>
where
    S: Serializer,
{
    let mut query_json = serde_json::to_value(query).unwrap();

    if let Some(map) = query_json.as_object_mut() {
        let fielded_query_input_entry = map.values_mut().next().unwrap();
        fielded_query_input_entry
            .as_object_mut()
            .unwrap()
            .shift_insert(0, "field".into(), serde_json::to_value(field).unwrap());

        query_json.serialize(serializer)
    } else if let Some(variant_name) = query_json.as_str() {
        let mut map = serde_json::Map::new();
        map.insert("field".into(), serde_json::to_value(field).unwrap());

        let mut object = serde_json::Map::new();
        object.insert(variant_name.to_string(), serde_json::Value::Object(map));
        object.serialize(serializer)
    } else {
        Err(<S::Error as serde::ser::Error>::custom(
            "this does not appear to be a `pdb::Query` instance",
        ))
    }
}

fn deserialize_fielded_query<'de, D>(deserializer: D) -> Result<(FieldName, pdb::Query), D::Error>
where
    D: Deserializer<'de>,
{
    struct Visitor;
    impl<'de> serde::de::Visitor<'de> for Visitor {
        type Value = (FieldName, pdb::Query);

        fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
            formatter.write_str("a map")
        }

        fn visit_map<A>(self, mut map: A) -> std::result::Result<Self::Value, A::Error>
        where
            A: MapAccess<'de>,
        {
            let Some((key, mut value)) = map.next_entry::<String, serde_json::Value>()? else {
                return Err(<A::Error as serde::de::Error>::custom(
                    "this does not appear to be a `pdb::Query` instance",
                ));
            };

            if let Some(field_entry) = value.as_object_mut().unwrap().remove_entry("field") {
                // pull the field out of the object that also contains the FieldedQueryInput
                let field = field_entry.1;
                let field = serde_json::from_value::<FieldName>(field).unwrap();

                if value.as_object_mut().unwrap().is_empty() {
                    let field_query_input =
                        serde_json::from_value::<pdb::Query>(serde_json::Value::String(key))
                            .unwrap();
                    Ok((field, field_query_input))
                } else {
                    let mut reconstructed = serde_json::Map::new();
                    reconstructed.insert(key, value);

                    let field_query_input = serde_json::from_value::<pdb::Query>(
                        serde_json::Value::Object(reconstructed),
                    )
                    .unwrap();
                    Ok((field, field_query_input))
                }
            } else {
                Err(<A::Error as serde::de::Error>::custom(
                    "this does not appear to be a `pdb::Query` instance",
                ))
            }
        }
    }
    deserializer.deserialize_map(Visitor)
}

impl SearchQueryInput {
    pub fn postgres_expression(node: *mut pg_sys::Node) -> Self {
        SearchQueryInput::PostgresExpression {
            expr: PostgresExpression {
                node: PostgresPointer(node.cast()),
                expr_state: PostgresPointer::default(),
            },
        }
    }

    pub fn need_scores(&self) -> bool {
        match self {
            SearchQueryInput::Boolean {
                must,
                should,
                must_not,
            } => must
                .iter()
                .chain(should.iter())
                .chain(must_not.iter())
                .any(Self::need_scores),
            SearchQueryInput::Boost { query, .. } => Self::need_scores(query),
            SearchQueryInput::ConstScore { query, .. } => Self::need_scores(query),
            SearchQueryInput::DisjunctionMax { disjuncts, .. } => {
                disjuncts.iter().any(Self::need_scores)
            }
            SearchQueryInput::WithIndex { query, .. } => Self::need_scores(query),
            SearchQueryInput::HeapFilter { indexed_query, .. } => Self::need_scores(indexed_query),
            SearchQueryInput::MoreLikeThis { .. } => true,
            SearchQueryInput::ScoreFilter { .. } => true,
            _ => false,
        }
    }

    pub fn index_oid(&self) -> Option<pg_sys::Oid> {
        match self {
            SearchQueryInput::WithIndex { oid, .. } => Some(*oid),
            _ => None,
        }
    }

    pub fn is_full_scan_query(&self) -> bool {
        match self {
            // All by itself is a full scan
            SearchQueryInput::All => true,

            // Boolean queries - analyze based on Boolean semantics:
            // A document matches if it matches ALL Must AND NONE of MustNot AND at least one of (Must OR Should)
            SearchQueryInput::Boolean {
                must,
                should,
                must_not,
            } => {
                // For the query to be a full scan, ALL documents must match

                // Check if all Must clauses would match all documents
                let all_must_match_all =
                    must.is_empty() || must.iter().all(Self::is_full_scan_query);

                // Check if we have at least one clause that matches all documents
                // If we have Must clauses, the "at least one" is satisfied if all Must are full scans
                // If we have no Must clauses, we need at least one Should to be a full scan
                let has_matching_clause = if !must.is_empty() {
                    // If Must clauses exist and all match all docs, then we satisfy "at least one"
                    all_must_match_all
                } else {
                    // No Must clauses, so we need at least one Should to match all docs
                    should.iter().any(Self::is_full_scan_query)
                };

                // MustNot clauses must not exclude any documents
                // This means all MustNot clauses must be Empty (which matches nothing, so "not nothing" = everything)
                let must_not_excludes_nothing = must_not.is_empty()
                    || must_not
                        .iter()
                        .all(|q| matches!(q, SearchQueryInput::Empty));

                // Only a full scan if all conditions are met
                all_must_match_all && has_matching_clause && must_not_excludes_nothing
            }

            // DisjunctionMax - full scan if any disjunct is full scan (OR semantics)
            SearchQueryInput::DisjunctionMax { disjuncts, .. } => {
                disjuncts.iter().any(Self::is_full_scan_query)
            }

            // Wrapper queries - inherit from inner query
            SearchQueryInput::WithIndex { query, .. } => Self::is_full_scan_query(query),
            SearchQueryInput::Boost { query, .. } => Self::is_full_scan_query(query),
            SearchQueryInput::ConstScore { query, .. } => Self::is_full_scan_query(query),
            SearchQueryInput::ScoreFilter {
                query: Some(query), ..
            } => Self::is_full_scan_query(query),
            SearchQueryInput::HeapFilter { indexed_query, .. } => {
                Self::is_full_scan_query(indexed_query)
            }

            // All other variants are not full scans
            _ => false,
        }
    }

    pub fn extract_field_names(&self, field_names: &mut crate::api::HashSet<String>) {
        match self {
            SearchQueryInput::Boolean {
                must,
                should,
                must_not,
            } => {
                for q in must.iter().chain(should.iter()).chain(must_not.iter()) {
                    q.extract_field_names(field_names);
                }
            }
            SearchQueryInput::Boost { query, .. } => {
                query.extract_field_names(field_names);
            }
            SearchQueryInput::ConstScore { query, .. } => {
                query.extract_field_names(field_names);
            }
            SearchQueryInput::DisjunctionMax { disjuncts, .. } => {
                for q in disjuncts {
                    q.extract_field_names(field_names);
                }
            }
            SearchQueryInput::WithIndex { query, .. } => {
                query.extract_field_names(field_names);
            }
            SearchQueryInput::HeapFilter { indexed_query, .. } => {
                indexed_query.extract_field_names(field_names);
            }
            SearchQueryInput::FieldedQuery { field, .. } => {
                field_names.insert(field.root());
            }
            // For other query types, we can't easily extract field names
            // This is a conservative approach - if we can't determine, we allow it
            _ => {}
        }
    }

    pub fn visit(&mut self, visitor: &mut impl FnMut(&mut SearchQueryInput)) {
        // Visit this node.
        visitor(self);
        // Then recurse on its children.
        match self {
            SearchQueryInput::Boolean {
                must,
                should,
                must_not,
            } => {
                for q in must_not {
                    q.visit(visitor);
                }
                for q in should {
                    q.visit(visitor);
                }
                for q in must {
                    q.visit(visitor);
                }
            }
            SearchQueryInput::Boost { query, .. } => {
                query.visit(visitor);
            }
            SearchQueryInput::ConstScore { query, .. } => {
                query.visit(visitor);
            }
            SearchQueryInput::ScoreFilter { query, .. } => {
                query
                    .as_mut()
                    .expect("ScoreFilter's query should have been set")
                    .visit(visitor);
            }
            SearchQueryInput::DisjunctionMax { disjuncts, .. } => {
                for q in disjuncts {
                    q.visit(visitor);
                }
            }
            SearchQueryInput::WithIndex { query, .. } => {
                query.visit(visitor);
            }
            SearchQueryInput::HeapFilter { indexed_query, .. } => {
                indexed_query.visit(visitor);
            }

            SearchQueryInput::Uninitialized
            | SearchQueryInput::All
            | SearchQueryInput::Empty
            | SearchQueryInput::MoreLikeThis { .. }
            | SearchQueryInput::Parse { .. }
            | SearchQueryInput::TermSet { .. }
            | SearchQueryInput::PostgresExpression { .. }
            | SearchQueryInput::FieldedQuery { .. } => {}
        }
    }

    pub fn canonical_query_string(&self) -> String {
        let mut cleaned_query = serde_json::to_value(self)
            .unwrap_or_else(|_| serde_json::Value::String("Error serializing query".to_string()));
        cleanup_variabilities_from_tantivy_query(&mut cleaned_query);
        serde_json::to_string(&cleaned_query).unwrap_or_else(|_| "Error".to_string())
    }
}

/// Remove the oid from the with_index object
/// This helps to reduce the variability of the explain output used in regression tests
pub fn cleanup_variabilities_from_tantivy_query(json_value: &mut serde_json::Value) {
    match json_value {
        serde_json::Value::Object(obj) => {
            // Check if this is a "with_index" object and remove its "oid" if present
            if obj.contains_key("with_index") {
                if let Some(with_index) = obj.get_mut("with_index") {
                    if let Some(with_index_obj) = with_index.as_object_mut() {
                        with_index_obj.remove("oid");
                    }
                }
            }

            // Remove any field named "postgres_expression"
            obj.remove("postgres_expression");

            // Recursively process all values in the object
            for (_, value) in obj.iter_mut() {
                cleanup_variabilities_from_tantivy_query(value);
            }
        }
        serde_json::Value::Array(arr) => {
            // Recursively process all elements in the array
            for item in arr.iter_mut() {
                cleanup_variabilities_from_tantivy_query(item);
            }
        }
        // Base cases: primitive values don't need processing
        _ => {}
    }
}

impl ExplainFormat for SearchQueryInput {
    fn explain_format(&self) -> String {
        format_for_explain(self)
    }
}

#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct TermInput {
    pub field: FieldName,
    pub value: OwnedValue,
    #[serde(default)]
    pub is_datetime: bool,
}

/// Serialize a [`SearchQueryInput`] node to a Postgres [`pg_sys::Const`] node, palloc'd
/// in the current memory context.
impl From<SearchQueryInput> for *mut pg_sys::Const {
    fn from(value: SearchQueryInput) -> Self {
        unsafe {
            pg_sys::makeConst(
                searchqueryinput_typoid(),
                -1,
                pg_sys::Oid::INVALID,
                -1,
                value.into_datum().unwrap(),
                false,
                false,
            )
        }
    }
}

fn check_range_bounds(
    typeoid: PgOid,
    lower_bound: Bound<OwnedValue>,
    upper_bound: Bound<OwnedValue>,
) -> Result<(Bound<OwnedValue>, Bound<OwnedValue>), QueryError> {
    let one_day_nanos: i64 = 86_400_000_000_000;
    let lower_bound = match (typeoid, lower_bound.clone()) {
        // Excluded U64 needs to be canonicalized
        (_, Bound::Excluded(OwnedValue::U64(n))) => Bound::Included(OwnedValue::U64(n + 1)),
        // Excluded I64 needs to be canonicalized
        (_, Bound::Excluded(OwnedValue::I64(n))) => Bound::Included(OwnedValue::I64(n + 1)),
        // Excluded Date needs to be canonicalized
        (
            PgOid::BuiltIn(PgBuiltInOids::DATEOID | PgBuiltInOids::DATERANGEOID),
            Bound::Excluded(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            let nanos = datetime.into_timestamp_nanos();
            Bound::Included(OwnedValue::Date(DateTime::from_timestamp_nanos(
                nanos + one_day_nanos,
            )))
        }
        (
            PgOid::BuiltIn(
                PgBuiltInOids::TIMESTAMPOID
                | PgBuiltInOids::TSRANGEOID
                | PgBuiltInOids::TIMESTAMPTZOID
                | pg_sys::BuiltinOid::TSTZRANGEOID,
            ),
            Bound::Excluded(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            Bound::Excluded(OwnedValue::Date(datetime))
        }
        (
            PgOid::BuiltIn(
                PgBuiltInOids::DATEOID
                | PgBuiltInOids::DATERANGEOID
                | PgBuiltInOids::TIMESTAMPOID
                | PgBuiltInOids::TSRANGEOID
                | PgBuiltInOids::TIMESTAMPTZOID
                | pg_sys::BuiltinOid::TSTZRANGEOID,
            ),
            Bound::Included(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            Bound::Included(OwnedValue::Date(datetime))
        }
        _ => lower_bound,
    };

    let upper_bound = match (typeoid, upper_bound.clone()) {
        // Included U64 needs to be canonicalized
        (_, Bound::Included(OwnedValue::U64(n))) => Bound::Excluded(OwnedValue::U64(n + 1)),
        // Included I64 needs to be canonicalized
        (_, Bound::Included(OwnedValue::I64(n))) => Bound::Excluded(OwnedValue::I64(n + 1)),
        // Included Date needs to be canonicalized
        (
            PgOid::BuiltIn(PgBuiltInOids::DATEOID | PgBuiltInOids::DATERANGEOID),
            Bound::Included(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            let nanos = datetime.into_timestamp_nanos();
            Bound::Excluded(OwnedValue::Date(DateTime::from_timestamp_nanos(
                nanos + one_day_nanos,
            )))
        }
        (
            PgOid::BuiltIn(
                PgBuiltInOids::TIMESTAMPOID
                | PgBuiltInOids::TSRANGEOID
                | PgBuiltInOids::TIMESTAMPTZOID
                | pg_sys::BuiltinOid::TSTZRANGEOID,
            ),
            Bound::Included(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            Bound::Included(OwnedValue::Date(datetime))
        }
        (
            PgOid::BuiltIn(
                PgBuiltInOids::DATEOID
                | PgBuiltInOids::DATERANGEOID
                | PgBuiltInOids::TIMESTAMPOID
                | PgBuiltInOids::TSRANGEOID
                | PgBuiltInOids::TIMESTAMPTZOID
                | pg_sys::BuiltinOid::TSTZRANGEOID,
            ),
            Bound::Excluded(OwnedValue::Str(date_string)),
        ) => {
            let datetime = convert_pg_date_string(typeoid, &date_string);
            Bound::Excluded(OwnedValue::Date(datetime))
        }
        _ => upper_bound,
    };
    Ok((lower_bound, upper_bound))
}

fn coerce_bound_to_field_type(
    bound: Bound<OwnedValue>,
    field_type: &FieldType,
) -> Bound<OwnedValue> {
    match bound {
        Bound::Included(OwnedValue::U64(n)) if matches!(field_type, FieldType::F64(_)) => {
            Bound::Included(OwnedValue::F64(n as f64))
        }
        Bound::Included(OwnedValue::I64(n)) if matches!(field_type, FieldType::F64(_)) => {
            Bound::Included(OwnedValue::F64(n as f64))
        }
        Bound::Excluded(OwnedValue::U64(n)) if matches!(field_type, FieldType::F64(_)) => {
            Bound::Excluded(OwnedValue::F64(n as f64))
        }
        Bound::Excluded(OwnedValue::I64(n)) if matches!(field_type, FieldType::F64(_)) => {
            Bound::Excluded(OwnedValue::F64(n as f64))
        }
        bound => bound,
    }
}

impl SearchQueryInput {
    #[allow(clippy::too_many_arguments)]
    pub fn into_tantivy_query<QueryParserCtor: Fn() -> QueryParser>(
        self,
        schema: &SearchIndexSchema,
        parser: &QueryParserCtor,
        searcher: &Searcher,
        index_oid: pg_sys::Oid,
        relation_oid: Option<pg_sys::Oid>,
        expr_context: Option<std::ptr::NonNull<pg_sys::ExprContext>>,
        planstate: Option<std::ptr::NonNull<pg_sys::PlanState>>,
    ) -> Result<Box<dyn TantivyQuery>> {
        match self {
            SearchQueryInput::Uninitialized => {
                panic!("this `SearchQueryInput` instance is uninitialized")
            }
            SearchQueryInput::All => Ok(Box::new(ConstScoreQuery::new(Box::new(AllQuery), 0.0))),
            SearchQueryInput::Boolean {
                must,
                should,
                must_not,
            } => {
                let mut subqueries = vec![];
                for input in must {
                    subqueries.push((
                        Occur::Must,
                        input.into_tantivy_query(
                            schema,
                            parser,
                            searcher,
                            index_oid,
                            relation_oid,
                            expr_context,
                            planstate,
                        )?,
                    ));
                }
                for input in should {
                    subqueries.push((
                        Occur::Should,
                        input.into_tantivy_query(
                            schema,
                            parser,
                            searcher,
                            index_oid,
                            relation_oid,
                            expr_context,
                            planstate,
                        )?,
                    ));
                }
                for input in must_not {
                    subqueries.push((
                        Occur::MustNot,
                        input.into_tantivy_query(
                            schema,
                            parser,
                            searcher,
                            index_oid,
                            relation_oid,
                            expr_context,
                            planstate,
                        )?,
                    ));
                }
                Ok(Box::new(BooleanQuery::new(subqueries)))
            }
            SearchQueryInput::Boost { query, factor } => Ok(Box::new(BoostQuery::new(
                query.into_tantivy_query(
                    schema,
                    parser,
                    searcher,
                    index_oid,
                    relation_oid,
                    expr_context,
                    planstate,
                )?,
                factor,
            ))),
            SearchQueryInput::ConstScore { query, score } => Ok(Box::new(ConstScoreQuery::new(
                query.into_tantivy_query(
                    schema,
                    parser,
                    searcher,
                    index_oid,
                    relation_oid,
                    expr_context,
                    planstate,
                )?,
                score,
            ))),
            SearchQueryInput::ScoreFilter { bounds, query } => Ok(Box::new(ScoreFilter::new(
                bounds,
                query
                    .expect("ScoreFilter's query should have been set")
                    .into_tantivy_query(
                        schema,
                        parser,
                        searcher,
                        index_oid,
                        relation_oid,
                        expr_context,
                        planstate,
                    )?,
            ))),
            SearchQueryInput::DisjunctionMax {
                disjuncts,
                tie_breaker,
            } => {
                let disjuncts = disjuncts
                    .into_iter()
                    .map(|query| {
                        query.into_tantivy_query(
                            schema,
                            parser,
                            searcher,
                            index_oid,
                            relation_oid,
                            expr_context,
                            planstate,
                        )
                    })
                    .collect::<Result<_, _>>()?;
                if let Some(tie_breaker) = tie_breaker {
                    Ok(Box::new(DisjunctionMaxQuery::with_tie_breaker(
                        disjuncts,
                        tie_breaker,
                    )))
                } else {
                    Ok(Box::new(DisjunctionMaxQuery::new(disjuncts)))
                }
            }
            SearchQueryInput::Empty => Ok(Box::new(EmptyQuery)),
            SearchQueryInput::MoreLikeThis {
                min_doc_frequency,
                max_doc_frequency,
                min_term_frequency,
                max_query_terms,
                min_word_length,
                max_word_length,
                boost_factor,
                stopwords,
                document,
                key_value,
                fields,
            } => {
                let mut builder = MoreLikeThisQuery::builder();

                // default min_doc_frequency to 1, Tantivy's default is 5
                if let Some(min_doc_frequency) = min_doc_frequency {
                    builder = builder.with_min_doc_frequency(min_doc_frequency);
                } else {
                    builder = builder.with_min_doc_frequency(1);
                }
                // default min_term_frequency to 1, Tantivy's default is 2
                if let Some(min_term_frequency) = min_term_frequency {
                    builder = builder.with_min_term_frequency(min_term_frequency);
                } else {
                    builder = builder.with_min_term_frequency(1);
                }
                if let Some(max_doc_frequency) = max_doc_frequency {
                    builder = builder.with_max_doc_frequency(max_doc_frequency);
                }
                if let Some(max_query_terms) = max_query_terms {
                    builder = builder.with_max_query_terms(max_query_terms);
                }
                if let Some(min_work_length) = min_word_length {
                    builder = builder.with_min_word_length(min_work_length);
                }
                if let Some(max_work_length) = max_word_length {
                    builder = builder.with_max_word_length(max_work_length);
                }
                if let Some(boost_factor) = boost_factor {
                    builder = builder.with_boost_factor(boost_factor);
                }
                if let Some(stopwords) = stopwords {
                    builder = builder.with_stop_words(stopwords);
                }

                match (key_value, fields, document) {
                    (Some(key_value), fields, None) => {
                        Ok(match builder.with_key_value(key_value, fields, index_oid) {
                            Some(query) => Box::new(query),
                            None => Box::new(EmptyQuery),
                        })
                    }
                    (None, None, Some(doc)) => {
                        let mut fields_map = HashMap::default();
                        for (field, mut value) in doc {
                            let search_field = schema
                                .search_field(&field)
                                .ok_or(QueryError::NonIndexedField(field.into()))?;
                            search_field.try_coerce(&mut value)?;
                            fields_map
                                .entry(search_field.field())
                                .or_insert_with(Vec::new);

                            if let Some(vec) = fields_map.get_mut(&search_field.field()) {
                                vec.push(value)
                            }
                        }
                        Ok(Box::new(
                            builder.with_document(fields_map.into_iter().collect()),
                        ))
                    }
                    _ => {
                        panic!("more_like_this must be called with either key_value or document")
                    }
                }
            }
            SearchQueryInput::Parse {
                query_string,
                lenient,
                conjunction_mode,
            } => {
                let mut parser = parser();
                if let Some(true) = conjunction_mode {
                    parser.set_conjunction_by_default();
                }

                match lenient {
                    Some(true) => {
                        let (parsed_query, _) = parser.parse_query_lenient(&query_string);
                        Ok(Box::new(parsed_query))
                    }
                    _ => {
                        Ok(Box::new(parser.parse_query(&query_string).map_err(
                            |err| QueryError::ParseError(err, query_string),
                        )?))
                    }
                }
            }
            SearchQueryInput::TermSet { terms: fields } => {
                Ok(Box::new(TermSetQuery::new(fields.into_iter().flat_map(
                    |TermInput {
                         field,
                         value,
                         is_datetime,
                     }| {
                        let search_field = schema
                            .search_field(field.root())
                            .ok_or_else(|| QueryError::NonIndexedField(field.clone()))
                            .expect("could not find search field");
                        let field_type = search_field.field_entry().field_type();
                        let is_datetime = search_field.is_datetime() || is_datetime;

                        // Check if this is a JSON field with numeric value
                        let is_json_field = matches!(field_type, FieldType::JsonObject(_));
                        let is_numeric = matches!(
                            value,
                            OwnedValue::F64(_) | OwnedValue::I64(_) | OwnedValue::U64(_)
                        );

                        if is_json_field && is_numeric && !is_datetime {
                            // For JSON numeric fields, expand to multiple type variants
                            expand_json_numeric_to_terms(
                                search_field.field(),
                                &value,
                                field.path().as_deref(),
                            )
                            .expect("could not expand JSON numeric to terms")
                        } else {
                            // Standard term creation for non-JSON or non-numeric fields
                            smallvec![value_to_term(
                                search_field.field(),
                                &value,
                                field_type,
                                field.path().as_deref(),
                                is_datetime,
                            )
                            .expect("could not convert argument to search term")]
                        }
                    },
                ))))
            }
            SearchQueryInput::WithIndex { query, .. } => query.into_tantivy_query(
                schema,
                parser,
                searcher,
                index_oid,
                relation_oid,
                expr_context,
                planstate,
            ),
            SearchQueryInput::HeapFilter {
                indexed_query,
                field_filters,
            } => {
                // Convert indexed query first
                let indexed_tantivy_query = indexed_query.into_tantivy_query(
                    schema,
                    parser,
                    searcher,
                    index_oid,
                    relation_oid,
                    expr_context,
                    planstate,
                )?;

                // Is initialized in `begin_custom_scan` if `has_heap_filters`.
                let expr_context = expr_context
                    .expect("An expression context must be provided when heap filtering.");

                // Create combined query with heap field filters
                Ok(Box::new(heap_field_filter::HeapFilterQuery::new(
                    indexed_tantivy_query,
                    field_filters,
                    relation_oid.expect("relation_oid is required for HeapFilter queries"),
                    expr_context,
                    planstate,
                )))
            }
            SearchQueryInput::PostgresExpression { .. } => {
                panic!("postgres expressions have not been solved")
            }
            SearchQueryInput::FieldedQuery { field, query } => {
                query.into_tantivy_query(field, schema, parser, searcher)
            }
        }
    }
}

fn value_to_json_term(
    field: Field,
    value: &OwnedValue,
    path: Option<&str>,
    expand_dots: bool,
    is_datetime: bool,
) -> Result<Term> {
    let mut term = Term::from_field_json_path(field, path.unwrap_or_default(), expand_dots);
    match value {
        OwnedValue::Str(text) => {
            if is_datetime {
                let TantivyDateTime(date) = TantivyDateTime::try_from(text.as_str())?;
                // https://github.com/quickwit-oss/tantivy/pull/2456
                // It's a footgun that date needs to truncated when creating the Term
                term.append_type_and_fast_value(date.truncate(DATE_TIME_PRECISION_INDEXED));
            } else {
                term.append_type_and_str(text);
            }
        }
        OwnedValue::U64(value) => {
            if let Ok(i64_val) = (*value).try_into() {
                term.append_type_and_fast_value::<i64>(i64_val);
            } else {
                term.append_type_and_fast_value(*value);
            }
        }
        OwnedValue::I64(value) => {
            term.append_type_and_fast_value(*value);
        }
        OwnedValue::F64(value) => {
            term.append_type_and_fast_value(*value);
        }
        OwnedValue::Bool(value) => {
            term.append_type_and_fast_value(*value);
        }
        OwnedValue::Date(value) => {
            term.append_type_and_fast_value(*value);
        }
        unsupported => panic!(
            "Tantivy OwnedValue type {:?} not supported for JSON term",
            unsupported
        ),
    };

    Ok(term)
}

pub fn value_to_term(
    field: Field,
    value: &OwnedValue,
    field_type: &FieldType,
    path: Option<&str>,
    is_datetime: bool,
) -> Result<Term> {
    let json_options = match field_type {
        FieldType::JsonObject(ref options) => Some(options),
        _ => None,
    };

    if let Some(json_options) = json_options {
        return value_to_json_term(
            field,
            value,
            path,
            json_options.is_expand_dots_enabled(),
            is_datetime,
        );
    }

    if is_datetime {
        if let OwnedValue::Str(text) = value {
            let TantivyDateTime(date) = TantivyDateTime::try_from(text.as_str())?;
            // https://github.com/quickwit-oss/tantivy/pull/2456
            // It's a footgun that date needs to truncated when creating the Term
            return Ok(Term::from_field_date(
                field,
                date.truncate(DATE_TIME_PRECISION_INDEXED),
            ));
        }
    }

    Ok(match value {
        OwnedValue::Str(text) => Term::from_field_text(field, text),
        OwnedValue::PreTokStr(_) => panic!("pre-tokenized text cannot be converted to term"),
        OwnedValue::U64(u64) => {
            // Positive numbers seem to be automatically turned into u64s even if they are i64s,
            // so we should use the field type to assign the term type
            match field_type {
                FieldType::I64(_) => Term::from_field_i64(field, *u64 as i64),
                FieldType::U64(_) => Term::from_field_u64(field, *u64),
                _ => panic!("invalid field type for u64 value"),
            }
        }
        OwnedValue::I64(i64) => Term::from_field_i64(field, *i64),
        OwnedValue::F64(f64) => Term::from_field_f64(field, *f64),
        OwnedValue::Bool(bool) => Term::from_field_bool(field, *bool),
        OwnedValue::Date(date) => {
            Term::from_field_date(field, date.truncate(DATE_TIME_PRECISION_INDEXED))
        }
        OwnedValue::Facet(facet) => Term::from_facet(field, facet),
        OwnedValue::Bytes(bytes) => Term::from_field_bytes(field, bytes),
        OwnedValue::Object(_) => panic!("json cannot be converted to term"),
        OwnedValue::IpAddr(ip) => Term::from_field_ip_addr(field, *ip),
        _ => panic!("Tantivy OwnedValue type not supported"),
    })
}

struct TantivyDateTime(pub DateTime);
impl TryFrom<&str> for TantivyDateTime {
    type Error = QueryError;

    fn try_from(text: &str) -> Result<Self, Self::Error> {
        let datetime = match chrono::NaiveDateTime::parse_from_str(text, "%Y-%m-%dT%H:%M:%SZ") {
            Ok(dt) => dt,
            Err(_) => chrono::NaiveDateTime::parse_from_str(text, "%Y-%m-%dT%H:%M:%S%.fZ")
                .map_err(|_| QueryError::FieldTypeMismatch)?,
        };
        Ok(TantivyDateTime(DateTime::from_timestamp_micros(
            datetime.and_utc().timestamp_micros(),
        )))
    }
}

#[allow(dead_code)]
#[derive(Debug, Error)]
pub enum QueryError {
    #[error("wrong field type for field: {0}")]
    WrongFieldType(FieldName),
    #[error("invalid field map json: {0}")]
    FieldMapJsonValue(#[source] serde_json::Error),
    #[error("field map json must be an object")]
    FieldMapJsonObject,
    #[error("invalid tokenizer setting, expected paradedb.tokenizer()")]
    InvalidTokenizer,
    #[error("field '{0}' is not part of the pg_search index")]
    NonIndexedField(FieldName),
    #[error("wrong type given for field")]
    FieldTypeMismatch,
    #[error("could not build regex with pattern '{1}': {0}")]
    RegexError(#[source] tantivy::TantivyError, String),
    #[error(
        r#"could not parse query string '{1}'.
           make sure to use column:term pairs, and to capitalize AND/OR."#
    )]
    ParseError(#[source] tantivy::query::QueryParserError, String),
    #[error("{0}")]
    TantivyError(#[source] tantivy::TantivyError),
    #[error("{0}")]
    InternalError(#[source] anyhow::Error),
}

impl From<tantivy::TantivyError> for QueryError {
    fn from(err: tantivy::TantivyError) -> QueryError {
        QueryError::TantivyError(err)
    }
}

impl From<anyhow::Error> for QueryError {
    fn from(err: anyhow::Error) -> QueryError {
        QueryError::InternalError(err)
    }
}

#[derive(Debug, Clone, PartialEq)]
struct PostgresPointer(*mut std::os::raw::c_void);

// SAFETY: PostgresPointer is only used within PostgreSQL's single-threaded context
// during query execution. The PostgresPointer serialization/deserialization handles
// the cross-thread boundary properly via nodeToString/stringToNode.
unsafe impl Send for PostgresPointer {}
unsafe impl Sync for PostgresPointer {}

impl Default for PostgresPointer {
    fn default() -> Self {
        PostgresPointer(std::ptr::null_mut())
    }
}

impl Serialize for PostgresPointer {
    fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
    where
        S: Serializer,
    {
        if self.0.is_null() {
            serializer.serialize_none()
        } else {
            unsafe {
                let s = pg_sys::nodeToString(self.0.cast());
                let cstr = core::ffi::CStr::from_ptr(s)
                    .to_str()
                    .map_err(serde::ser::Error::custom)?;
                let string = cstr.to_owned();
                pg_sys::pfree(s.cast());
                serializer.serialize_some(&string)
            }
        }
    }
}

impl<'de> Deserialize<'de> for PostgresPointer {
    fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
    where
        D: Deserializer<'de>,
    {
        struct NodeVisitor;
        impl<'de2> Visitor<'de2> for NodeVisitor {
            type Value = PostgresPointer;

            fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
                write!(formatter, "a string representing a Postgres node")
            }

            fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
            where
                E: serde::de::Error,
            {
                unsafe {
                    let cstr = std::ffi::CString::new(v).map_err(E::custom)?;
                    let node = pg_sys::stringToNode(cstr.as_ptr());
                    Ok(PostgresPointer(node.cast()))
                }
            }

            fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
            where
                D: Deserializer<'de2>,
            {
                deserializer.deserialize_str(self)
            }

            fn visit_none<E>(self) -> Result<Self::Value, E>
            where
                E: serde::de::Error,
            {
                Ok(PostgresPointer::default())
            }
        }

        deserializer.deserialize_option(NodeVisitor)
    }
}

#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PostgresExpression {
    node: PostgresPointer,
    #[serde(skip)]
    expr_state: PostgresPointer,
}

impl PostgresExpression {
    pub fn new(node: *mut pg_sys::Node) -> Self {
        Self {
            node: PostgresPointer(node.cast()),
            expr_state: PostgresPointer::default(),
        }
    }

    pub fn set_expr_state(&mut self, expr_state: *mut pg_sys::ExprState) {
        self.expr_state = PostgresPointer(expr_state.cast())
    }

    #[inline]
    pub fn node(&self) -> *mut pg_sys::Node {
        self.node.0.cast()
    }

    #[inline]
    pub fn expr_state(&self) -> *mut pg_sys::ExprState {
        assert!(
            !self.expr_state.0.is_null(),
            "ExprState has not been initialized"
        );
        self.expr_state.0.cast()
    }
}

#[cfg(any(test, feature = "pg_test"))]
#[pgrx::pg_schema]
mod tests {
    use super::{SearchQueryInput, TermInput};
    use crate::query::pdb_query::pdb;

    use pgrx::prelude::*;
    use tantivy::schema::OwnedValue;

    fn create_term_query() -> SearchQueryInput {
        SearchQueryInput::TermSet {
            terms: vec![TermInput {
                field: "test".into(),
                value: OwnedValue::Str("value".to_string()),
                is_datetime: false,
            }],
        }
    }

    fn create_match_query() -> SearchQueryInput {
        SearchQueryInput::FieldedQuery {
            field: "test".into(),
            query: pdb::Query::Match {
                value: "value".to_string(),
                tokenizer: None,
                distance: None,
                transposition_cost_one: None,
                prefix: None,
                conjunction_mode: None,
            },
        }
    }

    #[pg_test]
    fn test_is_full_scan_query_base_cases() {
        // All query is a full scan
        assert!(SearchQueryInput::All.is_full_scan_query());

        // Empty query is not a full scan
        assert!(!SearchQueryInput::Empty.is_full_scan_query());

        // Uninitialized is not a full scan
        assert!(!SearchQueryInput::Uninitialized.is_full_scan_query());

        // Term queries are not full scans
        assert!(!create_term_query().is_full_scan_query());

        // Match queries are not full scans
        assert!(!create_match_query().is_full_scan_query());

        // Exists queries are not full scans
        assert!(!SearchQueryInput::FieldedQuery {
            field: "test".into(),
            query: pdb::Query::Exists,
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_boolean_must_only() {
        // Single Must clause with All → full scan
        assert!(SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Multiple Must clauses with all All → full scan
        assert!(SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All, SearchQueryInput::All],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Must clause with All and term → not full scan (not all Must are full scan)
        assert!(!SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All, create_term_query()],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Must clause with only term → not full scan
        assert!(!SearchQueryInput::Boolean {
            must: vec![create_term_query()],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_boolean_should_only() {
        // Should clause with All → full scan
        assert!(SearchQueryInput::Boolean {
            must: vec![],
            should: vec![SearchQueryInput::All],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Should clause with All and term → full scan (any Should is full scan)
        assert!(SearchQueryInput::Boolean {
            must: vec![],
            should: vec![SearchQueryInput::All, create_term_query()],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Should clause with only term → not full scan
        assert!(!SearchQueryInput::Boolean {
            must: vec![],
            should: vec![create_term_query()],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Empty Should clause → not full scan
        assert!(!SearchQueryInput::Boolean {
            must: vec![],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_boolean_must_not() {
        // Must with All, MustNot with term → not full scan (MustNot excludes docs)
        assert!(!SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![],
            must_not: vec![create_term_query()],
        }
        .is_full_scan_query());

        // Must with All, MustNot with Empty → full scan (MustNot excludes nothing)
        assert!(SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![],
            must_not: vec![SearchQueryInput::Empty],
        }
        .is_full_scan_query());

        // Must with All, MustNot with multiple Empty → full scan
        assert!(SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![],
            must_not: vec![SearchQueryInput::Empty, SearchQueryInput::Empty],
        }
        .is_full_scan_query());

        // Must with All, MustNot with Empty and term → not full scan (one MustNot excludes docs)
        assert!(!SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![],
            must_not: vec![SearchQueryInput::Empty, create_term_query()],
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_boolean_mixed() {
        // Must with All, Should with term → full scan (Must satisfies "at least one")
        assert!(SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::All],
            should: vec![create_term_query()],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Must with term, Should with All → not full scan (not all Must are full scan)
        assert!(!SearchQueryInput::Boolean {
            must: vec![create_term_query()],
            should: vec![SearchQueryInput::All],
            must_not: vec![],
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_disjunction_max() {
        // DisjunctionMax with All → full scan
        assert!(SearchQueryInput::DisjunctionMax {
            disjuncts: vec![SearchQueryInput::All],
            tie_breaker: None,
        }
        .is_full_scan_query());

        // DisjunctionMax with All and term → full scan (any disjunct is full scan)
        assert!(SearchQueryInput::DisjunctionMax {
            disjuncts: vec![SearchQueryInput::All, create_term_query()],
            tie_breaker: None,
        }
        .is_full_scan_query());

        // DisjunctionMax with only terms → not full scan
        assert!(!SearchQueryInput::DisjunctionMax {
            disjuncts: vec![create_term_query(), create_match_query()],
            tie_breaker: None,
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_wrapper_queries() {
        // WithIndex wrapping All → full scan
        assert!(SearchQueryInput::WithIndex {
            oid: 12345.into(),
            query: Box::new(SearchQueryInput::All),
        }
        .is_full_scan_query());

        // WithIndex wrapping term → not full scan
        assert!(!SearchQueryInput::WithIndex {
            oid: 12345.into(),
            query: Box::new(create_term_query()),
        }
        .is_full_scan_query());

        // Boost wrapping All → full scan
        assert!(SearchQueryInput::Boost {
            query: Box::new(SearchQueryInput::All),
            factor: 2.0,
        }
        .is_full_scan_query());

        // ConstScore wrapping All → full scan
        assert!(SearchQueryInput::ConstScore {
            query: Box::new(SearchQueryInput::All),
            score: 1.0,
        }
        .is_full_scan_query());

        // ScoreFilter with All → full scan
        assert!(SearchQueryInput::ScoreFilter {
            bounds: vec![],
            query: Some(Box::new(SearchQueryInput::All)),
        }
        .is_full_scan_query());

        // ScoreFilter without query → not full scan
        assert!(!SearchQueryInput::ScoreFilter {
            bounds: vec![],
            query: None,
        }
        .is_full_scan_query());

        // HeapFilter with All → full scan
        assert!(SearchQueryInput::HeapFilter {
            indexed_query: Box::new(SearchQueryInput::All),
            field_filters: vec![],
        }
        .is_full_scan_query());
    }

    #[pg_test]
    fn test_is_full_scan_query_nested_queries() {
        // Nested Boolean with All deep inside Should
        assert!(SearchQueryInput::Boolean {
            must: vec![],
            should: vec![SearchQueryInput::Boolean {
                must: vec![SearchQueryInput::All],
                should: vec![],
                must_not: vec![],
            }],
            must_not: vec![],
        }
        .is_full_scan_query());

        // Nested WithIndex(Boolean(All))
        assert!(SearchQueryInput::WithIndex {
            oid: 12345.into(),
            query: Box::new(SearchQueryInput::Boolean {
                must: vec![SearchQueryInput::All],
                should: vec![],
                must_not: vec![],
            }),
        }
        .is_full_scan_query());

        // Complex nesting that should not be full scan
        assert!(!SearchQueryInput::Boolean {
            must: vec![SearchQueryInput::Boolean {
                must: vec![SearchQueryInput::All, create_term_query()], // Not all Must are full scan
                should: vec![],
                must_not: vec![],
            }],
            should: vec![],
            must_not: vec![],
        }
        .is_full_scan_query());
    }
}
