import json
import warnings
from asyncio import get_running_loop
from collections.abc import AsyncIterator
from datetime import datetime, timezone
from enum import Enum
from secrets import token_urlsafe
from typing import Annotated, Any, Optional, Union

import pandas as pd
import sqlalchemy as sa
from fastapi import APIRouter, Depends, Header, HTTPException, Path, Query
from pydantic import BaseModel, Field
from sqlalchemy import exists, select, update
from starlette.requests import Request
from starlette.responses import Response, StreamingResponse
from starlette.status import HTTP_404_NOT_FOUND
from strawberry.relay import GlobalID

from phoenix.config import DEFAULT_PROJECT_NAME
from phoenix.datetime_utils import normalize_datetime
from phoenix.db import models
from phoenix.db.helpers import SupportedSQLDialect, get_ancestor_span_rowids
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
from phoenix.server.api.routers.utils import df_to_bytes
from phoenix.server.api.routers.v1.annotations import SpanAnnotationData
from phoenix.server.api.types.node import from_global_id_with_expected_type
from phoenix.server.authorization import is_not_locked
from phoenix.server.bearer_auth import PhoenixUser
from phoenix.server.dml_event import SpanAnnotationInsertEvent, SpanDeleteEvent
from phoenix.trace.attributes import flatten, unflatten
from phoenix.trace.dsl import SpanQuery as SpanQuery_
from phoenix.trace.schemas import (
    Span as SpanForInsertion,
)
from phoenix.trace.schemas import (
    SpanContext as InsertionSpanContext,
)
from phoenix.trace.schemas import (
    SpanEvent as InternalSpanEvent,
)
from phoenix.trace.schemas import (
    SpanKind,
    SpanStatusCode,
)
from phoenix.utilities.json import encode_df_as_json_string

from .models import V1RoutesBaseModel
from .utils import (
    PaginatedResponseBody,
    RequestBody,
    ResponseBody,
    _get_project_by_identifier,
    add_errors_to_responses,
)

DEFAULT_SPAN_LIMIT = 1000

router = APIRouter(tags=["spans"])


class SpanQuery(V1RoutesBaseModel):
    select: Optional[dict[str, Any]] = None
    filter: Optional[dict[str, Any]] = None
    explode: Optional[dict[str, Any]] = None
    concat: Optional[dict[str, Any]] = None
    rename: Optional[dict[str, Any]] = None
    index: Optional[dict[str, Any]] = None


class QuerySpansRequestBody(V1RoutesBaseModel):
    queries: list[SpanQuery]
    start_time: Optional[datetime] = None
    end_time: Optional[datetime] = None
    limit: int = DEFAULT_SPAN_LIMIT
    root_spans_only: Optional[bool] = None
    orphan_span_as_root_span: bool = True
    project_name: Optional[str] = Field(
        default=None,
        description=(
            "The name of the project to query. "
            "This parameter has been deprecated, use the project_name query parameter instead."
        ),
        deprecated=True,
    )
    stop_time: Optional[datetime] = Field(
        default=None,
        description=(
            "An upper bound on the time to query for. "
            "This parameter has been deprecated, use the end_time parameter instead."
        ),
        deprecated=True,
    )


################################################################################
# Autogenerated OTLP models

# These models are autogenerated from the OTLP v1 protobuf schemas
# Source: https://github.com/open-telemetry/opentelemetry-proto/
# ...blob/main/opentelemetry/proto/trace/v1/trace.proto
# ...blob/main/opentelemetry/proto/common/v1/common.proto
# ...blob/main/opentelemetry/proto/resource/v1/resource.proto

# The autogeneration is done using the `protoc` tool and the `protoc-gen-jsonschema` go plugin
# The generated JSON schemas are coverted to Pydantic using `datamodel-codegen`
################################################################################


class OtlpDoubleValue(Enum):
    Infinity = "Infinity"
    field_Infinity = "-Infinity"
    NaN = "NaN"


class OtlpArrayValue(BaseModel):
    model_config = {"extra": "forbid"}

    values: Optional[list["OtlpAnyValue"]] = Field(
        None,
        description="Array of values. The array may be empty (contain 0 elements).",
    )


class OtlpAnyValue(BaseModel):
    model_config = {"extra": "forbid"}

    array_value: Optional[OtlpArrayValue] = None
    bool_value: Optional[bool] = None
    bytes_value: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = None
    double_value: Optional[Union[float, OtlpDoubleValue, str]] = None
    int_value: Optional[
        Union[
            Annotated[int, Field(ge=-9223372036854775808, lt=9223372036854775808)],
            Annotated[str, Field(pattern=r"^-?[0-9]+$")],
        ]
    ] = None
    kvlist_value: None = None  # TODO: Add KeyValueList model
    string_value: Optional[str] = None


class OtlpKeyValue(BaseModel):
    model_config = {"extra": "forbid"}

    key: Optional[str] = None
    value: Optional[OtlpAnyValue] = None


class StatusCode(str, Enum):
    # This is not autogenerated, but used to convert the status code in our DB to an OTLP
    # status code integer
    UNSET = "UNSET"
    OK = "OK"
    ERROR = "ERROR"

    def to_int(self) -> int:
        return {
            "UNSET": 0,
            "OK": 1,
            "ERROR": 2,
        }[self.value]


class OtlpStatus(BaseModel):
    model_config = {"extra": "forbid"}

    code: Optional[Annotated[int, Field(ge=-2147483648, le=2147483647)]] = Field(
        None, description="The status code."
    )
    message: Optional[str] = Field(
        None, description="A developer-facing human readable error message."
    )


class OtlpKind(Enum):
    SPAN_KIND_UNSPECIFIED = "SPAN_KIND_UNSPECIFIED"
    SPAN_KIND_INTERNAL = "SPAN_KIND_INTERNAL"
    SPAN_KIND_SERVER = "SPAN_KIND_SERVER"
    SPAN_KIND_CLIENT = "SPAN_KIND_CLIENT"
    SPAN_KIND_PRODUCER = "SPAN_KIND_PRODUCER"
    SPAN_KIND_CONSUMER = "SPAN_KIND_CONSUMER"


class OtlpEvent(BaseModel):
    model_config = {"extra": "forbid"}

    attributes: Optional[list[OtlpKeyValue]] = Field(
        None,
        description=(
            "attributes is a collection of attribute key/value pairs on the event. "
            "Attribute keys MUST be unique (it is not allowed to have more than one "
            "attribute with the same key)."
        ),
    )
    dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
        None,
        description=(
            "dropped_attributes_count is the number of dropped attributes. If the value is 0, "
            "then no attributes were dropped."
        ),
    )
    name: Optional[str] = Field(
        None,
        description=(
            "name of the event. This field is semantically required to be set to non-empty string."
        ),
    )
    time_unix_nano: Optional[
        Union[
            Annotated[int, Field(ge=0, lt=18446744073709551616)],
            Annotated[str, Field(pattern=r"^[0-9]+$")],
        ]
    ] = Field(
        None,
        description=(
            "time_unix_nano is the time the event occurred. "
            "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970."
        ),
    )


class OtlpSpan(BaseModel):
    model_config = {"extra": "forbid"}

    attributes: Optional[list[OtlpKeyValue]] = Field(
        None,
        description=(
            "attributes is a collection of key/value pairs. Note, global attributes like server "
            "name can be set using the resource API. Examples of attributes:\n\n"
            '    "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) '
            'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"\n'
            '    "/http/server_latency": 300\n'
            '    "example.com/myattribute": true\n'
            '    "example.com/score": 10.239\n\n'
            "The OpenTelemetry API specification further restricts the allowed value types:\n"
            "https://github.com/open-telemetry/opentelemetry-specification/blob/main/"
            "specification/common/README.md#attribute\n"
            "Attribute keys MUST be unique (it is not allowed to have more than one attribute "
            "with the same key)."
        ),
    )
    dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
        None,
        description=(
            "dropped_attributes_count is the number of attributes that were discarded. Attributes "
            "can be discarded because their keys are too long or because there are too many "
            "attributes. If this value is 0, then no attributes were dropped."
        ),
    )
    dropped_events_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
        None,
        description=(
            "dropped_events_count is the number of dropped events. If the value is 0, then no "
            "events were dropped."
        ),
    )
    dropped_links_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
        None,
        description=(
            "dropped_links_count is the number of dropped links after the maximum size was "
            "enforced. If this value is 0, then no links were dropped."
        ),
    )
    end_time_unix_nano: Optional[
        Union[
            Annotated[int, Field(ge=0, lt=18446744073709551616)],
            Annotated[str, Field(pattern=r"^[0-9]+$")],
        ]
    ] = Field(
        None,
        description=(
            "end_time_unix_nano is the end time of the span. On the client side, this is the time "
            "kept by the local machine where the span execution ends. On the server side, this is "
            "the time when the server application handler stops running.\n"
            "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
            "This field is semantically required and it is expected that end_time >= start_time."
        ),
    )
    events: Optional[list[OtlpEvent]] = Field(
        None,
        description=("events is a collection of Event items. A span with no events is valid."),
    )
    flags: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
        None,
        description=(
            "Flags, a bit field.\n\n"
            "Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace "
            "Context specification. To read the 8-bit W3C trace flag, use "
            "`flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.\n\n"
            "See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.\n\n"
            "Bits 8 and 9 represent the 3 states of whether a span's parent is remote. The states "
            "are (unknown, is not remote, is remote).\n"
            "To read whether the value is known, use "
            "`(flags & SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK) != 0`.\n"
            "To read whether the span is remote, use "
            "`(flags & SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK) != 0`.\n\n"
            "When creating span messages, if the message is logically forwarded from another "
            "source with an equivalent flags fields (i.e., usually another OTLP span message), the "
            "field SHOULD be copied as-is. If creating from a source that does not have an "
            "equivalent flags field (such as a runtime representation of an OpenTelemetry span), "
            "the high 22 bits MUST be set to zero.\n"
            "Readers MUST NOT assume that bits 10-31 (22 most significant bits) will be zero.\n\n"
            "[Optional]."
        ),
    )
    kind: Optional[Union[OtlpKind, Annotated[int, Field(ge=-2147483648, le=2147483647)]]] = Field(
        OtlpKind.SPAN_KIND_INTERNAL,  # INTERNAL because OpenInference uses its own SpanKind
        description=(
            "Distinguishes between spans generated in a particular context. For example, two spans "
            "with the same name may be distinguished using `CLIENT` (caller) and `SERVER` (callee) "
            "to identify queueing latency associated with the span."
        ),
    )
    links: None = None  # TODO: Add Link model
    name: Optional[str] = Field(
        None,
        description=(
            "A description of the span's operation.\n\n"
            "For example, the name can be a qualified method name or a file name and a line number "
            "where the operation is called. A best practice is to use the same display name at the "
            "same call point in an application. This makes it easier to correlate spans in "
            "different traces.\n\n"
            "This field is semantically required to be set to non-empty string. Empty value is "
            "equivalent to an unknown span name.\n\n"
            "This field is required."
        ),
    )
    parent_span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
        None,
        description=(
            "The `span_id` of this span's parent span. If this is a root span, then this field "
            "must be empty. The ID is an 8-byte array."
        ),
    )
    span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
        None,
        description=(
            "A unique identifier for a span within a trace, assigned when the span is created. The "
            "ID is an 8-byte array. An ID with all zeroes OR of length other than 8 bytes is "
            "considered invalid (empty string in OTLP/JSON is zero-length and thus is also "
            "invalid).\n\n"
            "This field is required."
        ),
    )
    start_time_unix_nano: Optional[
        Union[
            Annotated[int, Field(ge=0, lt=18446744073709551616)],
            Annotated[str, Field(pattern=r"^[0-9]+$")],
        ]
    ] = Field(
        None,
        description=(
            "start_time_unix_nano is the start time of the span. On the client side, this is the "
            "time kept by the local machine where the span execution starts. On the server side, "
            "this is the time when the server's application handler starts running.\n"
            "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
            "This field is semantically required and it is expected that end_time >= start_time."
        ),
    )
    status: Optional[OtlpStatus] = Field(
        None,
        description=(
            "An optional final status for this span. Semantically when Status isn't set, it means "
            "span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0)."
        ),
    )
    trace_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
        None,
        description=(
            "A unique identifier for a trace. All spans from the same trace share the same "
            "`trace_id`. The ID is a 16-byte array. An ID with all zeroes OR of length other than "
            "16 bytes is considered invalid (empty string in OTLP/JSON is zero-length and thus is "
            "also invalid).\n\n"
            "This field is required."
        ),
    )
    trace_state: Optional[str] = Field(
        None,
        description=(
            "trace_state conveys information about request position in multiple distributed "
            "tracing graphs. It is a trace_state in w3c-trace-context format: "
            "https://www.w3.org/TR/trace-context/#tracestate-header\n"
            "See also https://github.com/w3c/distributed-tracing for more details about this "
            "field."
        ),
    )


class OtlpSpansResponseBody(PaginatedResponseBody[OtlpSpan]):
    """Paginated response where each span follows OTLP JSON structure."""

    pass


################################################################################
# Phoenix Span Models
################################################################################


class SpanContext(V1RoutesBaseModel):
    trace_id: str = Field(description="OpenTelemetry trace ID")
    span_id: str = Field(description="OpenTelemetry span ID")


class SpanEvent(V1RoutesBaseModel):
    name: str = Field(description="Name of the event")
    timestamp: datetime = Field(description="When the event occurred")
    attributes: dict[str, Any] = Field(default_factory=dict, description="Event attributes")


class Span(V1RoutesBaseModel):
    id: str = Field(
        default="", description="Span Global ID, distinct from the OpenTelemetry span ID"
    )
    name: str = Field(description="Name of the span operation")
    context: SpanContext = Field(description="Span context containing trace_id and span_id")
    span_kind: str = Field(description="Type of work that the span encapsulates")
    parent_id: Optional[str] = Field(
        default=None, description="OpenTelemetry span ID of the parent span"
    )
    start_time: datetime = Field(description="Start time of the span")
    end_time: datetime = Field(description="End time of the span")
    status_code: str = Field(description="Status code of the span")
    status_message: str = Field(default="", description="Status message")
    attributes: dict[str, Any] = Field(default_factory=dict, description="Span attributes")
    events: list[SpanEvent] = Field(default_factory=list, description="Span events")


class SpansResponseBody(PaginatedResponseBody[Span]):
    pass


# TODO: Add property details to SpanQuery schema
@router.post(
    "/spans",
    operation_id="querySpans",
    summary="Query spans with query DSL",
    responses=add_errors_to_responses([404, 422]),
    include_in_schema=False,
)
async def query_spans_handler(
    request: Request,
    request_body: QuerySpansRequestBody,
    accept: Optional[str] = Header(None),
    project_name: Optional[str] = Query(
        default=None, description="The project name to get evaluations from"
    ),
) -> Response:
    queries = request_body.queries
    project_name = (
        project_name
        or request.query_params.get("project-name")  # for backward compatibility
        or request.headers.get(
            "project-name"
        )  # read from headers/payload for backward-compatibility
        or request_body.project_name
        or DEFAULT_PROJECT_NAME
    )
    end_time = request_body.end_time or request_body.stop_time
    try:
        span_queries = [SpanQuery_.from_dict(query.model_dump()) for query in queries]
    except Exception as e:
        raise HTTPException(
            detail=f"Invalid query: {e}",
            status_code=422,
        )

    async with request.app.state.db() as session:
        results: list[pd.DataFrame] = []
        for query in span_queries:
            df = await session.run_sync(
                query,
                project_name=project_name,
                start_time=normalize_datetime(
                    request_body.start_time,
                    timezone.utc,
                ),
                end_time=normalize_datetime(
                    end_time,
                    timezone.utc,
                ),
                limit=request_body.limit,
                root_spans_only=request_body.root_spans_only,
                orphan_span_as_root_span=request_body.orphan_span_as_root_span,
            )
            results.append(df)
    if not results:
        raise HTTPException(status_code=404)

    if accept == "application/json":
        boundary_token = token_urlsafe(64)
        return StreamingResponse(
            content=_json_multipart(results, boundary_token),
            media_type=f"multipart/mixed; boundary={boundary_token}",
        )

    async def content() -> AsyncIterator[bytes]:
        for result in results:
            yield df_to_bytes(result)

    return StreamingResponse(
        content=content(),
        media_type="application/x-pandas-arrow",
    )


async def _json_multipart(
    results: list[pd.DataFrame],
    boundary_token: str,
) -> AsyncIterator[str]:
    for df in results:
        yield f"--{boundary_token}\r\n"
        yield "Content-Type: application/json\r\n\r\n"
        yield await get_running_loop().run_in_executor(None, encode_df_as_json_string, df)
        yield "\r\n"
    yield f"--{boundary_token}--\r\n"


def _to_array_value(values: list[Any]) -> OtlpArrayValue:
    """Convert a list of values to an OtlpArrayValue.

    If the values are not all of the same type, they will be coerced to strings.
    Nested lists/tuples are not allowed and will be stringified.
    """
    if not values:
        return OtlpArrayValue(values=[])

    # Convert any list/tuple values to strings to prevent nesting
    processed_values = [str(v) if isinstance(v, (list, tuple)) else v for v in values]

    # Check if all values are of the same type
    first_type = type(processed_values[0])
    if all(isinstance(v, first_type) for v in processed_values):
        # All values are of the same type, convert normally
        return OtlpArrayValue(values=[_to_any_value(v) for v in processed_values])

    # Values are not homogeneous, convert everything to strings
    return OtlpArrayValue(values=[OtlpAnyValue(string_value=str(v)) for v in processed_values])


def _to_any_value(value: Any) -> OtlpAnyValue:
    if value is None:
        return OtlpAnyValue()
    elif isinstance(value, bool):
        return OtlpAnyValue(bool_value=value)
    elif isinstance(value, int):
        return OtlpAnyValue(int_value=value)
    elif isinstance(value, float):
        if value in (float("inf"), float("-inf"), float("nan")):
            return OtlpAnyValue(double_value=str(value))
        return OtlpAnyValue(double_value=value)
    elif isinstance(value, str):
        return OtlpAnyValue(string_value=value)
    elif isinstance(value, bytes):
        return OtlpAnyValue(bytes_value=value.hex())
    elif isinstance(value, (list, tuple)):
        return OtlpAnyValue(array_value=_to_array_value(list(value)))
    elif isinstance(value, dict):
        # TODO: Implement kvlist_value when KeyValueList model is added
        return OtlpAnyValue()
    else:
        # For any other type, convert to string
        return OtlpAnyValue(string_value=str(value))


@router.get(
    "/projects/{project_identifier}/spans/otlpv1",
    operation_id="spanSearch",
    summary="Search spans with simple filters (no DSL)",
    description="Return spans within a project filtered by time range. "
    "Supports cursor-based pagination.",
    responses=add_errors_to_responses([404, 422]),
)
async def span_search_otlpv1(
    request: Request,
    project_identifier: str = Path(
        description=(
            "The project identifier: either project ID or project name. If using a project name, "
            "it cannot contain slash (/), question mark (?), or pound sign (#) characters."
        )
    ),
    cursor: Optional[str] = Query(default=None, description="Pagination cursor (Span Global ID)"),
    limit: int = Query(default=100, gt=0, le=1000, description="Maximum number of spans to return"),
    start_time: Optional[datetime] = Query(default=None, description="Inclusive lower bound time"),
    end_time: Optional[datetime] = Query(default=None, description="Exclusive upper bound time"),
) -> OtlpSpansResponseBody:
    """Search spans with minimal filters instead of the old SpanQuery DSL."""

    async with request.app.state.db() as session:
        project = await _get_project_by_identifier(session, project_identifier)

    project_id: int = project.id
    order_by = [models.Span.id.desc()]

    stmt = (
        select(
            models.Span,
            models.Trace.trace_id,
        )
        .join(models.Trace, onclause=models.Trace.id == models.Span.trace_rowid)
        .where(models.Trace.project_rowid == project_id)
        .order_by(*order_by)
    )

    if start_time:
        stmt = stmt.where(models.Span.start_time >= normalize_datetime(start_time, timezone.utc))
    if end_time:
        stmt = stmt.where(models.Span.start_time < normalize_datetime(end_time, timezone.utc))

    if cursor:
        try:
            cursor_rowid = int(GlobalID.from_id(cursor).node_id)
            stmt = stmt.where(models.Span.id <= cursor_rowid)
        except Exception:
            raise HTTPException(status_code=422, detail="Invalid cursor")

    stmt = stmt.limit(limit + 1)

    async with request.app.state.db() as session:
        rows: list[tuple[models.Span, str]] = [r async for r in await session.stream(stmt)]

    if not rows:
        return OtlpSpansResponseBody(next_cursor=None, data=[])

    next_cursor: Optional[str] = None
    if len(rows) == limit + 1:
        *rows, extra = rows  # extra is first item of next page
        span_extra, _ = extra
        next_cursor = str(GlobalID("Span", str(span_extra.id)))

    # Convert ORM rows -> OTLP-style spans
    result_spans: list[OtlpSpan] = []
    for span_orm, trace_id in rows:
        try:
            status_code_enum = StatusCode(span_orm.status_code or "UNSET")
        except ValueError:
            status_code_enum = StatusCode.UNSET

        # Convert attributes to KeyValue list
        attributes_kv: list[OtlpKeyValue] = []
        if span_orm.attributes:
            for k, v in flatten(span_orm.attributes or {}, recurse_on_sequence=True):
                attributes_kv.append(OtlpKeyValue(key=k, value=_to_any_value(v)))

        # Convert events to OTLP Event list
        events: Optional[list[OtlpEvent]] = None
        if span_orm.events:
            events = []
            for event in span_orm.events:
                event_attributes: list[OtlpKeyValue] = []
                if event.get("attributes"):
                    for k, v in flatten(event["attributes"], recurse_on_sequence=True):
                        event_attributes.append(OtlpKeyValue(key=k, value=_to_any_value(v)))

                # Convert event timestamp to nanoseconds
                event_time = event.get("timestamp")
                time_unix_nano = None
                if event_time:
                    if isinstance(event_time, datetime):
                        time_unix_nano = int(event_time.timestamp() * 1_000_000_000)
                    elif isinstance(event_time, str):
                        try:
                            dt = datetime.fromisoformat(event_time)
                            time_unix_nano = int(dt.timestamp() * 1_000_000_000)
                        except ValueError:
                            pass
                    elif isinstance(event_time, (int, float)):
                        time_unix_nano = int(event_time)

                events.append(
                    OtlpEvent(
                        name=event.get("name"),
                        attributes=event_attributes,
                        time_unix_nano=time_unix_nano,
                        dropped_attributes_count=event.get("dropped_attributes_count"),
                    )
                )

        start_ns = (
            int(span_orm.start_time.timestamp() * 1_000_000_000) if span_orm.start_time else None
        )
        end_ns = int(span_orm.end_time.timestamp() * 1_000_000_000) if span_orm.end_time else None

        result_spans.append(
            OtlpSpan(
                trace_id=trace_id,
                span_id=span_orm.span_id,
                parent_span_id=span_orm.parent_id,
                name=span_orm.name,
                start_time_unix_nano=start_ns,
                end_time_unix_nano=end_ns,
                attributes=attributes_kv,
                events=events,
                status=OtlpStatus(
                    code=status_code_enum.to_int(), message=span_orm.status_message or None
                ),
            )
        )

    return OtlpSpansResponseBody(next_cursor=next_cursor, data=result_spans)


@router.get(
    "/projects/{project_identifier}/spans",
    operation_id="getSpans",
    summary="List spans with simple filters (no DSL)",
    description="Return spans within a project filtered by time range. "
    "Supports cursor-based pagination.",
    responses=add_errors_to_responses([404, 422]),
)
async def span_search(
    request: Request,
    project_identifier: str = Path(
        description=(
            "The project identifier: either project ID or project name. If using a project name, "
            "it cannot contain slash (/), question mark (?), or pound sign (#) characters."
        )
    ),
    cursor: Optional[str] = Query(default=None, description="Pagination cursor (Span Global ID)"),
    limit: int = Query(default=100, gt=0, le=1000, description="Maximum number of spans to return"),
    start_time: Optional[datetime] = Query(default=None, description="Inclusive lower bound time"),
    end_time: Optional[datetime] = Query(default=None, description="Exclusive upper bound time"),
) -> SpansResponseBody:
    async with request.app.state.db() as session:
        project = await _get_project_by_identifier(session, project_identifier)

    project_id: int = project.id
    order_by = [models.Span.id.desc()]

    stmt = (
        select(
            models.Span,
            models.Trace.trace_id,
        )
        .join(models.Trace, onclause=models.Trace.id == models.Span.trace_rowid)
        .where(models.Trace.project_rowid == project_id)
        .order_by(*order_by)
    )

    if start_time:
        stmt = stmt.where(models.Span.start_time >= normalize_datetime(start_time, timezone.utc))
    if end_time:
        stmt = stmt.where(models.Span.start_time < normalize_datetime(end_time, timezone.utc))

    if cursor:
        try:
            cursor_rowid = int(GlobalID.from_id(cursor).node_id)
        except Exception:
            raise HTTPException(status_code=422, detail="Invalid cursor")
        stmt = stmt.where(models.Span.id <= cursor_rowid)

    stmt = stmt.limit(limit + 1)

    async with request.app.state.db() as session:
        rows: list[tuple[models.Span, str]] = [r async for r in await session.stream(stmt)]

    if not rows:
        return SpansResponseBody(next_cursor=None, data=[])

    next_cursor: Optional[str] = None
    if len(rows) == limit + 1:
        *rows, extra = rows  # extra is first item of next page
        span_extra, _ = extra
        next_cursor = str(GlobalID("Span", str(span_extra.id)))

    # Convert ORM rows -> Phoenix spans
    result_spans: list[Span] = []
    for span_orm, trace_id in rows:
        # Convert events to Phoenix Event list
        events: list[SpanEvent] = []
        for event in span_orm.events:
            event_time = event.get("timestamp")
            parsed_time = None

            if event_time:
                if isinstance(event_time, datetime):
                    parsed_time = normalize_datetime(event_time, timezone.utc)
                elif isinstance(event_time, str):
                    try:
                        naive_time = datetime.fromisoformat(event_time)
                        parsed_time = normalize_datetime(naive_time, timezone.utc)
                    except ValueError:
                        # If ISO format fails, try to parse as timestamp
                        try:
                            parsed_time = datetime.fromtimestamp(float(event_time), tz=timezone.utc)
                        except (ValueError, TypeError):
                            parsed_time = datetime.now(timezone.utc)  # fallback
                elif isinstance(event_time, (int, float)):
                    try:
                        # Assume nanoseconds if very large, otherwise seconds
                        if event_time > 1e12:  # nanoseconds
                            parsed_time = datetime.fromtimestamp(
                                event_time / 1_000_000_000, tz=timezone.utc
                            )
                        else:  # seconds
                            parsed_time = datetime.fromtimestamp(event_time, tz=timezone.utc)
                    except (ValueError, OSError):
                        parsed_time = datetime.now(timezone.utc)  # fallback
            else:
                parsed_time = datetime.now(timezone.utc)  # fallback

            events.append(
                SpanEvent(
                    name=event.get("name", ""),
                    timestamp=parsed_time,
                    attributes=event.get("attributes", {}),
                )
            )

        attributes = {
            k: v for k, v in flatten(span_orm.attributes or dict(), recurse_on_sequence=True)
        }
        openinference_span_kind = attributes.pop("openinference.span.kind", "UNKNOWN")

        result_spans.append(
            Span(
                id=str(GlobalID("Span", str(span_orm.id))),
                name=span_orm.name or "",
                context=SpanContext(
                    trace_id=trace_id,
                    span_id=span_orm.span_id or "",
                ),
                span_kind=openinference_span_kind,
                parent_id=span_orm.parent_id,
                start_time=span_orm.start_time,
                end_time=span_orm.end_time,
                status_code=span_orm.status_code,
                status_message=span_orm.status_message or "",
                attributes=attributes,
                events=events,
            )
        )

    return SpansResponseBody(next_cursor=next_cursor, data=result_spans)


@router.get("/spans", include_in_schema=False, deprecated=True)
async def get_spans_handler(
    request: Request,
    request_body: QuerySpansRequestBody,
    project_name: Optional[str] = Query(
        default=None, description="The project name to get evaluations from"
    ),
) -> Response:
    return await query_spans_handler(request, request_body, project_name)


class AnnotateSpansRequestBody(RequestBody[list[SpanAnnotationData]]):
    data: list[SpanAnnotationData]


class InsertedSpanAnnotation(V1RoutesBaseModel):
    id: str = Field(description="The ID of the inserted span annotation")


class AnnotateSpansResponseBody(ResponseBody[list[InsertedSpanAnnotation]]):
    pass


@router.post(
    "/span_annotations",
    dependencies=[Depends(is_not_locked)],
    operation_id="annotateSpans",
    summary="Create span annotations",
    responses=add_errors_to_responses([{"status_code": 404, "description": "Span not found"}]),
    response_description="Span annotations inserted successfully",
    include_in_schema=True,
)
async def annotate_spans(
    request: Request,
    request_body: AnnotateSpansRequestBody,
    sync: bool = Query(default=False, description="If true, fulfill request synchronously."),
) -> AnnotateSpansResponseBody:
    if not request_body.data:
        return AnnotateSpansResponseBody(data=[])

    user_id: Optional[int] = None
    if request.app.state.authentication_enabled and isinstance(request.user, PhoenixUser):
        user_id = int(request.user.identity)

    span_annotations = request_body.data
    filtered_span_annotations = list(filter(lambda d: d.name != "note", span_annotations))
    if len(filtered_span_annotations) != len(span_annotations):
        warnings.warn(
            (
                "Span annotations with the name 'note' are not supported in this endpoint. "
                "They will be ignored."
            ),
            UserWarning,
        )
    precursors = [d.as_precursor(user_id=user_id) for d in filtered_span_annotations]
    if not sync:
        await request.state.enqueue_annotations(*precursors)
        return AnnotateSpansResponseBody(data=[])

    span_ids = {p.span_id for p in precursors}
    async with request.app.state.db() as session:
        existing_spans = {
            span_id: id_
            async for span_id, id_ in await session.stream(
                select(models.Span.span_id, models.Span.id).filter(
                    models.Span.span_id.in_(span_ids)
                )
            )
        }

        missing_span_ids = span_ids - set(existing_spans.keys())
        if missing_span_ids:
            raise HTTPException(
                detail=f"Spans with IDs {', '.join(missing_span_ids)} do not exist.",
                status_code=404,
            )
        inserted_ids = []
        dialect = SupportedSQLDialect(session.bind.dialect.name)
        for p in precursors:
            values = dict(as_kv(p.as_insertable(existing_spans[p.span_id]).row))
            span_annotation_id = await session.scalar(
                insert_on_conflict(
                    values,
                    dialect=dialect,
                    table=models.SpanAnnotation,
                    unique_by=("name", "span_rowid", "identifier"),
                ).returning(models.SpanAnnotation.id)
            )
            inserted_ids.append(span_annotation_id)
    request.state.event_queue.put(SpanAnnotationInsertEvent(tuple(inserted_ids)))
    return AnnotateSpansResponseBody(
        data=[
            InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(id_))))
            for id_ in inserted_ids
        ]
    )


class CreateSpansRequestBody(RequestBody[list[Span]]):
    data: list[Span]


class CreateSpansResponseBody(V1RoutesBaseModel):
    total_received: int = Field(description="Total number of spans received")
    total_queued: int = Field(description="Number of spans successfully queued for insertion")


@router.post(
    "/projects/{project_identifier}/spans",
    dependencies=[Depends(is_not_locked)],
    operation_id="createSpans",
    summary="Create spans",
    description=(
        "Submit spans to be inserted into a project. If any spans are invalid or "
        "duplicates, no spans will be inserted."
    ),
    responses=add_errors_to_responses([404, 400]),
    status_code=202,
)
async def create_spans(
    request: Request,
    request_body: CreateSpansRequestBody,
    project_identifier: str = Path(
        description=(
            "The project identifier: either project ID or project name. If using a project name, "
            "it cannot contain slash (/), question mark (?), or pound sign (#) characters."
        )
    ),
) -> CreateSpansResponseBody:
    def convert_api_span_for_insertion(api_span: Span) -> SpanForInsertion:
        """
        Convert from API Span to phoenix.trace.schemas.Span
        Note: The 'id' field has a default empty string and is ignored during insertion.
        """
        try:
            span_kind = SpanKind(api_span.span_kind.upper())
        except ValueError:
            span_kind = SpanKind.UNKNOWN

        try:
            status_code = SpanStatusCode(api_span.status_code.upper())
        except ValueError:
            status_code = SpanStatusCode.UNSET

        internal_events: list[InternalSpanEvent] = []
        for event in api_span.events:
            if event.timestamp:
                internal_events.append(
                    InternalSpanEvent(
                        name=event.name, timestamp=event.timestamp, attributes=event.attributes
                    )
                )

        # Add back the openinference.span.kind attribute since it's stored separately in the API
        attributes = dict(api_span.attributes)
        attributes["openinference.span.kind"] = api_span.span_kind
        attributes = unflatten(attributes.items())

        # Create span for insertion - note we ignore the 'id' field as it's server-generated
        return SpanForInsertion(
            name=api_span.name,
            context=InsertionSpanContext(
                trace_id=api_span.context.trace_id, span_id=api_span.context.span_id
            ),
            span_kind=span_kind,
            parent_id=api_span.parent_id,
            start_time=api_span.start_time,
            end_time=api_span.end_time,
            status_code=status_code,
            status_message=api_span.status_message,
            attributes=attributes,
            events=internal_events,
            conversation=None,  # Unused
        )

    try:
        id_ = from_global_id_with_expected_type(
            GlobalID.from_id(project_identifier),
            "Project",
        )
    except Exception:
        project_name = project_identifier
    else:
        stmt = select(models.Project).filter_by(id=id_)
        async with request.app.state.db() as session:
            project = await session.scalar(stmt)
        if project is None:
            raise HTTPException(
                status_code=HTTP_404_NOT_FOUND,
                detail=f"Project with ID {project_identifier} not found",
            )
        project_name = project.name

    total_received = len(request_body.data)
    duplicate_spans: list[dict[str, str]] = []
    invalid_spans: list[dict[str, str]] = []
    spans_to_queue: list[tuple[SpanForInsertion, str]] = []

    existing_span_ids: set[str] = set()
    span_ids = [span.context.span_id for span in request_body.data]
    async with request.app.state.db() as session:
        existing_result = await session.execute(
            select(models.Span.span_id).where(models.Span.span_id.in_(span_ids))
        )
        existing_span_ids = {row[0] for row in existing_result}

    for api_span in request_body.data:
        # Check if it's a duplicate
        if api_span.context.span_id in existing_span_ids:
            duplicate_spans.append(
                {
                    "span_id": api_span.context.span_id,
                    "trace_id": api_span.context.trace_id,
                }
            )
            continue

        try:
            span_for_insertion = convert_api_span_for_insertion(api_span)
            spans_to_queue.append((span_for_insertion, project_name))
        except Exception as e:
            invalid_spans.append(
                {
                    "span_id": api_span.context.span_id,
                    "trace_id": api_span.context.trace_id,
                    "error": str(e),
                }
            )

    # If there are any duplicates or invalid spans, reject the entire request
    if duplicate_spans or invalid_spans:
        error_detail = {
            "error": "Request contains invalid or duplicate spans",
            "total_received": total_received,
            "total_queued": 0,  # No spans are queued when there are validation errors
            "total_duplicates": len(duplicate_spans),
            "total_invalid": len(invalid_spans),
            "duplicate_spans": duplicate_spans,
            "invalid_spans": invalid_spans,
        }
        raise HTTPException(
            status_code=400,
            detail=json.dumps(error_detail),
        )

    # All spans are valid, queue them all
    for span_for_insertion, project_name in spans_to_queue:
        await request.state.enqueue_span(span_for_insertion, project_name)

    return CreateSpansResponseBody(
        total_received=total_received,
        total_queued=len(spans_to_queue),
    )


@router.delete(
    "/spans/{span_identifier}",
    dependencies=[Depends(is_not_locked)],
    operation_id="deleteSpan",
    summary="Delete a span by span_identifier",
    description=(
        """
        Delete a single span by identifier.

        **Important**: This operation deletes ONLY the specified span itself and does NOT
        delete its descendants/children. All child spans will remain in the trace and
        become orphaned (their parent_id will point to a non-existent span).

        Behavior:
        - Deletes only the target span (preserves all descendant spans)
        - If this was the last span in the trace, the trace record is also deleted
        - If the deleted span had a parent, its cumulative metrics (error count, token counts)
          are subtracted from all ancestor spans in the chain

        **Note**: This operation is irreversible and may create orphaned spans.
        """
    ),
    responses=add_errors_to_responses([404]),
    status_code=204,  # No Content for successful deletion
)
async def delete_span(
    request: Request,
    span_identifier: str = Path(
        description="The span identifier: either a relay GlobalID or OpenTelemetry span_id"
    ),
) -> None:
    """
    Delete a single span by identifier.

    This operation deletes ONLY the specified span and preserves all its descendants,
    which may become orphaned (parent_id pointing to non-existent span).

    Steps:
    1. Find the target span to delete (supports both GlobalID and OpenTelemetry span_id)
    2. Delete only the target span (all descendants remain untouched)
    3. If trace becomes empty, delete the trace record
    4. If deleted span had a parent, subtract its cumulative metrics from ancestor chain
    5. Return 204 No Content on success

    Args:
        request: FastAPI request object
        span_identifier: Either relay GlobalID or OpenTelemetry span_id

    Raises:
        HTTPException(404): If span not found

    Returns:
        None (204 No Content status)
    """
    async with request.app.state.db() as session:
        # Determine the predicate for deletion based on identifier type
        try:
            span_rowid = from_global_id_with_expected_type(
                GlobalID.from_id(span_identifier),
                "Span",
            )
            predicate = models.Span.id == span_rowid
            error_detail = f"Span with relay ID '{span_identifier}' not found"
        except Exception:
            predicate = models.Span.span_id == span_identifier
            error_detail = f"Span with span_id '{span_identifier}' not found"

        # Delete the span and return its data in one operation
        target_span = await session.scalar(
            sa.delete(models.Span).where(predicate).returning(models.Span)
        )

        if target_span is None:
            raise HTTPException(
                status_code=404,
                detail=error_detail,
            )

        # Store values needed for later operations
        trace_rowid = target_span.trace_rowid
        parent_id = target_span.parent_id
        cumulative_error_count = target_span.cumulative_error_count
        cumulative_llm_token_count_prompt = target_span.cumulative_llm_token_count_prompt
        cumulative_llm_token_count_completion = target_span.cumulative_llm_token_count_completion

        # Step 2: Check if trace is empty—if so, delete the trace record
        trace_is_empty = await session.scalar(
            select(~exists().where(models.Span.trace_rowid == trace_rowid))
        )

        if trace_is_empty:
            # Trace is empty, delete the trace record
            await session.execute(sa.delete(models.Trace).where(models.Trace.id == trace_rowid))

        # Step 3: Propagate negative cumulative values up ancestor chain if parent_id is not null
        if not trace_is_empty and parent_id is not None:
            # Use the helper function to get all ancestor span IDs
            ancestor_ids_query = get_ancestor_span_rowids(parent_id)

            # Propagate negative cumulative values to ancestors
            await session.execute(
                update(models.Span)
                .where(models.Span.id.in_(ancestor_ids_query))
                .values(
                    cumulative_error_count=(
                        models.Span.cumulative_error_count - cumulative_error_count
                    ),
                    cumulative_llm_token_count_prompt=(
                        models.Span.cumulative_llm_token_count_prompt
                        - cumulative_llm_token_count_prompt
                    ),
                    cumulative_llm_token_count_completion=(
                        models.Span.cumulative_llm_token_count_completion
                        - cumulative_llm_token_count_completion
                    ),
                )
            )
    # Trigger cache invalidation event
    request.state.event_queue.put(SpanDeleteEvent((trace_rowid,)))

    return None
