from __future__ import annotations

import logging
from collections.abc import Collection, Mapping, MutableMapping, Sequence
from datetime import datetime, timezone
from typing import TYPE_CHECKING, Any
from uuid import uuid4

import urllib3
from sentry_protos.snuba.v1.trace_item_pb2 import TraceItem
from urllib3.fields import RequestField
from urllib3.filepost import encode_multipart_formdata

from sentry import quotas
from sentry.conf.types.kafka_definition import Topic, get_topic_codec
from sentry.eventstream.base import EventStream, GroupStates
from sentry.eventstream.item_helpers import serialize_event_data_as_item
from sentry.eventstream.types import EventStreamEventType
from sentry.models.project import Project
from sentry.options.rollout import in_rollout_group
from sentry.services.eventstore.models import GroupEvent
from sentry.utils import json, metrics, snuba
from sentry.utils.eap import EAP_ITEMS_INSERT_ENDPOINT
from sentry.utils.safe import get_path
from sentry.utils.sdk import set_current_event_project

KW_SKIP_SEMANTIC_PARTITIONING = "skip_semantic_partitioning"


logger = logging.getLogger(__name__)

if TYPE_CHECKING:
    from sentry.services.eventstore.models import Event


# Version 1 format: (1, TYPE, [...REST...])
#   Insert: (1, 'insert', {
#       ...event json...
#   }, {
#       ...state for post-processing...
#   })
#
#   Mutations that *should be ignored*: (1, ('delete_groups'|'unmerge'|'merge'), {...})
#
#   In short, for protocol version 1 only messages starting with (1, 'insert', ...)
#   should be processed.

# Version 2 format: (2, TYPE, [...REST...])
#   Insert: (2, 'insert', {
#       ...event json...
#   }, {
#       ...state for post-processing...
#   })
#   Delete Groups: (2, '(start_delete_groups|end_delete_groups)', {
#       'transaction_id': uuid,
#       'project_id': id,
#       'group_ids': [id2, id2, id3],
#       'datetime': timestamp,
#   })
#   Merge: (2, '(start_merge|end_merge)', {
#       'transaction_id': uuid,
#       'project_id': id,
#       'previous_group_ids': [id2, id2],
#       'new_group_id': id,
#       'group_first_seen': timestamp,
#       'datetime': timestamp,
#   })
#   Unmerge: (2, '(start_unmerge|end_unmerge)', {
#       'transaction_id': uuid,
#       'project_id': id,
#       'previous_group_id': id,
#       'new_group_id': id,
#       'hashes': [hash2, hash2]
#       'datetime': timestamp,
#   })
#   Delete Tag: (2, '(start_delete_tag|end_delete_tag)', {
#       'transaction_id': uuid,
#       'project_id': id,
#       'tag': 'foo',
#       'datetime': timestamp,
#   })


class SnubaProtocolEventStream(EventStream):
    # Beware! Changing this protocol (introducing a new version, or the message
    # format/fields themselves) requires consideration of all downstream
    # consumers. This includes the post-processing forwarder code!
    EVENT_PROTOCOL_VERSION = 2

    # These keys correspond to tags that are typically prefixed with `sentry:`
    # and will wreak havok in the UI if both the `sentry:`-prefixed and
    # non-prefixed variations occur in a response.
    UNEXPECTED_TAG_KEYS = frozenset(["dist", "release", "user"])

    def _get_headers_for_insert(
        self,
        event: Event | GroupEvent,
        is_new: bool,
        is_regression: bool,
        is_new_group_environment: bool,
        primary_hash: str | None,
        received_timestamp: float | datetime,
        skip_consume: bool,
        group_states: GroupStates | None = None,
    ) -> MutableMapping[str, str]:
        return {
            "Received-Timestamp": str(received_timestamp),
        }

    def insert(
        self,
        event: Event | GroupEvent,
        is_new: bool,
        is_regression: bool,
        is_new_group_environment: bool,
        primary_hash: str | None,
        received_timestamp: float | datetime,
        skip_consume: bool = False,
        group_states: GroupStates | None = None,
        eventstream_type: str | None = None,
        **kwargs: Any,
    ) -> None:
        if isinstance(event, GroupEvent) and not event.occurrence:
            logger.error(
                "`GroupEvent` passed to `EventStream.insert`. `GroupEvent` may only be passed when "
                "associated with an `IssueOccurrence`",
            )
            return
        project = event.project
        set_current_event_project(project.id)
        retention_days = quotas.backend.get_event_retention(organization=project.organization)

        event_data = event.get_raw_data(for_stream=True)

        unexpected_tags = {
            k
            for (k, v) in (get_path(event_data, "tags", filter=True) or [])
            if k in self.UNEXPECTED_TAG_KEYS
        }
        if unexpected_tags:
            logger.error("%r received unexpected tags: %r", self, unexpected_tags)

        headers = self._get_headers_for_insert(
            event,
            is_new,
            is_regression,
            is_new_group_environment,
            primary_hash,
            received_timestamp,
            skip_consume,
            group_states,
        )

        skip_semantic_partitioning = (
            kwargs[KW_SKIP_SEMANTIC_PARTITIONING]
            if KW_SKIP_SEMANTIC_PARTITIONING in kwargs
            else False
        )

        event_type = self._get_event_type(event)
        occurrence_data = self._get_occurrence_data(event)

        # instead of normalizing and doing custom 'contexts' processing in snuba, we elect to do it here instead to
        # avoid having to clutter up snuba code with business logic
        if event_type == EventStreamEventType.Generic:
            event_data = dict(event_data)
            contexts = event_data.setdefault("contexts", {})

            # add user.geo to contexts if it exists
            user_dict = event_data.get("user") or {}
            geo = user_dict.get("geo", {})
            if "geo" not in contexts and isinstance(geo, dict):
                contexts["geo"] = geo

            # transactions processing has a configurable 'skipped contexts' to skip writing specific contexts maps
            # to the row. for now, we're ignoring that until we have a need for it

        self._send(
            project.id,
            "insert",
            extra_data=(
                {
                    "group_id": event.group_id,
                    "group_ids": [group.id for group in getattr(event, "groups", [])],
                    "group_first_seen": (
                        json.datetime_to_str(event.group.first_seen)
                        if event.group is not None
                        else None
                    ),
                    "event_id": event.event_id,
                    "organization_id": project.organization_id,
                    "project_id": event.project_id,
                    # TODO(mitsuhiko): We do not want to send this incorrect
                    # message but this is what snuba needs at the moment.
                    "message": event.search_message,
                    "platform": event.platform,
                    "datetime": json.datetime_to_str(event.datetime),
                    "data": event_data,
                    "primary_hash": primary_hash,
                    "retention_days": retention_days,
                    "occurrence_id": occurrence_data.get("id"),
                    "occurrence_data": occurrence_data,
                },
                {
                    "is_new": is_new,
                    "is_regression": is_regression,
                    "is_new_group_environment": is_new_group_environment,
                    "skip_consume": skip_consume,
                    "group_states": group_states,
                },
            ),
            headers=headers,
            asynchronous=kwargs.get("asynchronous", True),
            skip_semantic_partitioning=skip_semantic_partitioning,
            event_type=event_type,
        )

        if in_rollout_group("eventstream.eap_forwarding_rate", event.project_id):
            self._forward_event_to_items(event, event_data, event_type, project)

    def _missing_required_item_fields(self, event_data: Mapping[str, Any]) -> list[str]:
        root_level_fields = ["event_id", "timestamp"]
        missing_fields = [field for field in root_level_fields if field not in event_data]
        trace_id = get_path(event_data, "contexts", "trace", "trace_id", default=None)
        if trace_id is None:
            missing_fields.append("trace_id")

        return missing_fields

    def _forward_event_to_items(
        self,
        event: Event | GroupEvent,
        event_data: Mapping[str, Any],
        event_type: EventStreamEventType,
        project: Project,
    ) -> None:
        if not (
            event_type == EventStreamEventType.Error or event_type == EventStreamEventType.Generic
        ):
            return

        missing_fields = self._missing_required_item_fields(event_data)
        if missing_fields:
            logger.debug(
                "Event data is missing required fields to forward to items: %s", missing_fields
            )
            return

        self._send_item(serialize_event_data_as_item(event, event_data, project))

    def start_delete_groups(self, project_id: int, group_ids: Sequence[int]) -> Mapping[str, Any]:
        if not group_ids:
            raise ValueError("expected groups to delete!")

        state = {
            "transaction_id": str(uuid4().hex),
            "project_id": project_id,
            "group_ids": list(group_ids),
            "datetime": json.datetime_to_str(datetime.now(tz=timezone.utc)),
        }

        self._send(project_id, "start_delete_groups", extra_data=(state,), asynchronous=False)

        return state

    def end_delete_groups(self, state: Mapping[str, Any]) -> None:
        state_copy: MutableMapping[str, Any] = {**state}
        state_copy["datetime"] = json.datetime_to_str(datetime.now(tz=timezone.utc))
        self._send(
            state_copy["project_id"],
            "end_delete_groups",
            extra_data=(state_copy,),
            asynchronous=False,
        )

    def start_merge(
        self,
        project_id: int,
        previous_group_ids: Sequence[int],
        new_group_id: int,
        new_group_first_seen: datetime | None = None,
    ) -> dict[str, Any]:
        if not previous_group_ids:
            raise ValueError("expected groups to merge!")

        state = {
            "transaction_id": uuid4().hex,
            "project_id": project_id,
            "previous_group_ids": list(previous_group_ids),
            "new_group_id": new_group_id,
            "datetime": json.datetime_to_str(datetime.now(tz=timezone.utc)),
        }

        if new_group_first_seen is not None:
            state["new_group_first_seen"] = json.datetime_to_str(new_group_first_seen)

        self._send(project_id, "start_merge", extra_data=(state,), asynchronous=False)

        return state

    def end_merge(self, state: Mapping[str, Any]) -> None:
        state_copy: MutableMapping[str, Any] = {**state}
        state_copy["datetime"] = json.datetime_to_str(datetime.now(tz=timezone.utc))
        self._send(
            state_copy["project_id"], "end_merge", extra_data=(state_copy,), asynchronous=False
        )

    def start_unmerge(
        self, project_id: int, hashes: Collection[str], previous_group_id: int, new_group_id: int
    ) -> Mapping[str, Any] | None:
        if not hashes:
            return None

        state = {
            "transaction_id": uuid4().hex,
            "project_id": project_id,
            "previous_group_id": previous_group_id,
            "new_group_id": new_group_id,
            "hashes": list(hashes),
            "datetime": json.datetime_to_str(datetime.now(tz=timezone.utc)),
        }

        self._send(project_id, "start_unmerge", extra_data=(state,), asynchronous=False)

        return state

    def end_unmerge(self, state: Mapping[str, Any]) -> None:
        state_copy: MutableMapping[str, Any] = {**state}
        state_copy["datetime"] = json.datetime_to_str(datetime.now(tz=timezone.utc))
        self._send(
            state_copy["project_id"], "end_unmerge", extra_data=(state_copy,), asynchronous=False
        )

    def start_delete_tag(self, project_id: int, tag: str) -> Mapping[str, Any]:
        if not tag:
            raise ValueError("expected tag")

        state = {
            "transaction_id": uuid4().hex,
            "project_id": project_id,
            "tag": tag,
            "datetime": json.datetime_to_str(datetime.now(tz=timezone.utc)),
        }

        self._send(project_id, "start_delete_tag", extra_data=(state,), asynchronous=False)

        return state

    def end_delete_tag(self, state: Mapping[str, Any]) -> None:
        state_copy: MutableMapping[str, Any] = {**state}
        state_copy["datetime"] = json.datetime_to_str(datetime.now(tz=timezone.utc))
        self._send(
            state_copy["project_id"], "end_delete_tag", extra_data=(state_copy,), asynchronous=False
        )

    def tombstone_events_unsafe(
        self,
        project_id: int,
        event_ids: Sequence[str],
        old_primary_hash: str | None = None,
        from_timestamp: datetime | None = None,
        to_timestamp: datetime | None = None,
    ) -> None:
        """
        Tell Snuba to eventually delete these events.

        This marks events as deleted but does not immediately exclude those
        events from all queries. Because of that limitation this is not proper,
        because not immediate, event deletion.

        "Proper" group deletion is essentially running this function for every
        event in the group, plus `exclude_groups` to make sure the changes are
        immediately user-visible.

        Reprocessing (v2) splits a group into events-to-be-reprocessed
        (re-insert with new group_id) and events-to-be-deleted
        (`tombstone_events`), then excludes the group from all queries
        (`exclude_groups`).

        :param old_primary_hash: If present, the event is only tombstoned
            to be reinserted over with a guaranteed-different primary hash.
            This is necessary with Snuba's errors table as the primary_hash is
            part of the PK/sortkey.
        """

        state = {
            "project_id": project_id,
            "event_ids": event_ids,
            "old_primary_hash": old_primary_hash,
            "from_timestamp": from_timestamp,
            "to_timestamp": to_timestamp,
        }
        self._send(project_id, "tombstone_events", extra_data=(state,), asynchronous=False)

    def replace_group_unsafe(
        self,
        project_id: int,
        event_ids: Sequence[str],
        new_group_id: int,
        from_timestamp: datetime | None = None,
        to_timestamp: datetime | None = None,
    ) -> None:
        """
        Tell Snuba to move events into a new group ID

        Same caveats as tombstone_events
        """

        state = {
            "project_id": project_id,
            "event_ids": event_ids,
            "new_group_id": new_group_id,
            "from_timestamp": from_timestamp,
            "to_timestamp": to_timestamp,
        }
        self._send(project_id, "replace_group", extra_data=(state,), asynchronous=False)

    def exclude_groups(self, project_id: int, group_ids: Sequence[int]) -> None:
        """
        Exclude a group from queries for a while until event tombstoning takes
        effect. See docstring of `tombstone_events`.

        `exclude_groups` basically makes Snuba add `where group_id not in (1,
        2, ...)` to every query.
        """
        state = {"project_id": project_id, "group_ids": group_ids}
        self._send(project_id, "exclude_groups", extra_data=(state,), asynchronous=False)

    def _send(
        self,
        project_id: int,
        _type: str,
        extra_data: tuple[Any, ...] = (),
        asynchronous: bool = True,
        headers: MutableMapping[str, str] | None = None,
        skip_semantic_partitioning: bool = False,
        event_type: EventStreamEventType = EventStreamEventType.Error,
    ) -> None:
        raise NotImplementedError

    def _send_item(self, trace_item: TraceItem) -> None:
        raise NotImplementedError


class SnubaEventStream(SnubaProtocolEventStream):
    def _send(
        self,
        project_id: int,
        _type: str,
        extra_data: tuple[Any, ...] = (),
        asynchronous: bool = True,
        headers: MutableMapping[str, str] | None = None,
        skip_semantic_partitioning: bool = False,
        event_type: EventStreamEventType = EventStreamEventType.Error,
    ) -> None:
        if headers is None:
            headers = {}

        if event_type == EventStreamEventType.Error:
            # error events now have a timestamp_ms field, this does not exist on the nodestore event
            # but instead should be derived from the datetime field on regular Snuba processing.
            # Since here we insert it using the eventstream API we need to add it manually
            if "datetime" in extra_data[0]:
                extra_data[0]["timestamp_ms"] = extra_data[0]["datetime"]

        data = (self.EVENT_PROTOCOL_VERSION, _type) + extra_data

        entity = "events"
        if event_type == EventStreamEventType.Transaction:
            entity = "transactions"
        if event_type == EventStreamEventType.Generic:
            entity = "search_issues"

        serialized_data = json.dumps(data)

        topic_mapping: Mapping[str, Topic] = {
            "events": Topic.EVENTS,
            "transactions": Topic.TRANSACTIONS,
            "search_issues": Topic.EVENTSTREAM_GENERIC,
        }

        codec = get_topic_codec(topic_mapping[entity])
        codec.decode(serialized_data.encode("utf-8"), validate=True)

        try:
            resp = snuba._snuba_pool.urlopen(
                "POST",
                f"/tests/{entity}/eventstream",
                body=serialized_data,
                headers={f"X-Sentry-{k}": v for k, v in headers.items()},
            )
            if resp.status != 200:
                raise snuba.SnubaError(
                    f"HTTP {resp.status} response from Snuba! {json.loads(resp.data)}"
                )
            return None
        except urllib3.exceptions.HTTPError as err:
            raise snuba.SnubaError(err)

    def _send_item(self, trace_item: TraceItem) -> None:
        try:
            serialized = trace_item.SerializeToString()
            field = RequestField(name="item_0", data=serialized, filename="item_0")
            field.make_multipart(content_type="application/octet-stream")
            body, content_type = encode_multipart_formdata([field])

            resp = snuba._snuba_pool.urlopen(
                "POST",
                EAP_ITEMS_INSERT_ENDPOINT,
                body=body,
                headers={"Content-Type": content_type},
            )

            if resp.status == 200:
                metrics.incr(
                    "eventstream.eap.occurrence_insert.success",
                    tags={"backend": "snuba_http"},
                )
            else:
                logger.warning(
                    "Failed to insert EAP occurrence item via Snuba HTTP",
                    extra={
                        "status": resp.status,
                        "organization_id": trace_item.organization_id,
                        "project_id": trace_item.project_id,
                        "item_id": trace_item.item_id.decode("utf-8"),
                        "trace_id": trace_item.trace_id,
                        "backend": "snuba_http",
                    },
                )
                metrics.incr(
                    "eventstream.eap.occurrence_insert.failure",
                    tags={"backend": "snuba_http"},
                )
        except Exception:
            logger.exception(
                "Exception while inserting EAP occurrence item via Snuba HTTP",
                extra={
                    "organization_id": trace_item.organization_id,
                    "project_id": trace_item.project_id,
                    "item_id": trace_item.item_id.decode("utf-8"),
                    "trace_id": trace_item.trace_id,
                    "backend": "snuba_http",
                },
            )
            metrics.incr(
                "eventstream.eap.occurrence_insert.failure",
                tags={"backend": "snuba_http"},
            )

    def requires_post_process_forwarder(self) -> bool:
        return False

    def insert(
        self,
        event: Event | GroupEvent,
        is_new: bool,
        is_regression: bool,
        is_new_group_environment: bool,
        primary_hash: str | None,
        received_timestamp: float | datetime,
        skip_consume: bool = False,
        group_states: GroupStates | None = None,
        eventstream_type: str | None = None,
        **kwargs: Any,
    ) -> None:
        super().insert(
            event,
            is_new,
            is_regression,
            is_new_group_environment,
            primary_hash,
            received_timestamp,
            skip_consume,
            group_states,
            **kwargs,
        )
        self._dispatch_post_process_group_task(
            event.event_id,
            event.project_id,
            event.group_id,
            is_new,
            is_regression,
            is_new_group_environment,
            primary_hash,
            skip_consume,
            group_states,
            occurrence_id=event.occurrence_id if isinstance(event, GroupEvent) else None,
            eventstream_type=eventstream_type,
        )
