# Copyright 2024 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import typing
from collections.abc import Iterator
from unittest.mock import Mock, patch

import kafka.errors
import pytest
import taosws

import mlrun.common.schemas
import mlrun.runtimes
from mlrun.datastore.datastore_profile import DatastoreProfileKafkaStream

import services.api
import services.api.crud.model_monitoring.deployment as mm_dep


@pytest.fixture()
def monitoring_deployment() -> mm_dep.MonitoringDeployment:
    return mm_dep.MonitoringDeployment(
        project=Mock(spec=str),
        auth_info=Mock(spec=mm_dep.mlrun.common.schemas.AuthInfo),
        db_session=Mock(spec=mm_dep.sqlalchemy.orm.Session),
        model_monitoring_access_key=None,
    )


class SecretTester(services.api.crud.secrets.Secrets):
    _secrets: dict[str, dict[str, str]] = {}

    def store_project_secrets(
        self,
        project: str,
        secrets: mlrun.common.schemas.SecretsData,
        allow_internal_secrets: bool = False,
        key_map_secret_key: typing.Optional[str] = None,
        allow_storing_key_maps: bool = False,
    ):
        self._secrets[project] = secrets.secrets

    def get_project_secret(
        self,
        project: str,
        provider: mlrun.common.schemas.SecretProviderName,
        secret_key: str,
        token: typing.Optional[str] = None,
        allow_secrets_from_k8s: bool = False,
        allow_internal_secrets: bool = False,
        key_map_secret_key: typing.Optional[str] = None,
    ) -> typing.Optional[str]:
        return self._secrets.get(project, {}).get(secret_key)


class TestAppDeployment:
    """Test nominal flow of the app deployment"""

    @staticmethod
    @pytest.fixture(autouse=True)
    def _patch_build_function() -> Iterator[None]:
        with patch(
            "services.api.api.endpoints.nuclio._deploy_function",
            new=Mock(return_value=Mock(spec=mlrun.runtimes.ServingRuntime)),
        ):
            with patch("services.api.crud.Secrets", new=SecretTester):
                yield

    @staticmethod
    def test_app_dep(monitoring_deployment: mm_dep.MonitoringDeployment) -> None:
        monitoring_deployment.deploy_histogram_data_drift_app(
            image="mlrun/mlrun", overwrite=True
        )

    @pytest.mark.skipif(
        os.getenv("V3IO_FRAMESD") is None
        or os.getenv("V3IO_ACCESS_KEY") is None
        or os.getenv("V3IO_API") is None,
        reason="Configure Framsed to access V3IO store targets",
    )
    def test_credentials(
        self,
        monitoring_deployment: mm_dep.MonitoringDeployment,
    ) -> None:
        # new project case
        with patch(
            "services.api.crud.Functions.get_function",
            side_effect=mlrun.errors.MLRunNotFoundError,
        ):
            with pytest.raises(mlrun.errors.MLRunBadRequestError):
                monitoring_deployment.check_if_credentials_are_set()

            with pytest.raises(mlrun.errors.MLRunInvalidMMStoreTypeError):
                monitoring_deployment.set_credentials(
                    stream_path="kafka://stream",
                    tsdb_connection="wrong",
                )

            with pytest.raises(taosws.QueryError):
                monitoring_deployment.set_credentials(
                    stream_path="v3io",
                    tsdb_connection="taosws://",
                )

            with pytest.raises(kafka.errors.NoBrokersAvailable):
                monitoring_deployment.set_credentials(
                    stream_path="kafka://stream",
                    tsdb_connection="v3io",
                )

            monitoring_deployment.set_credentials(
                stream_path="v3io",
            )

            secrets = monitoring_deployment._get_monitoring_mandatory_project_secrets()

            monitoring_deployment.set_credentials(
                tsdb_connection="v3io",
            )

            with pytest.raises(mlrun.errors.MLRunConflictError):
                monitoring_deployment.set_credentials(
                    stream_path="v3io",
                    tsdb_connection="v3io",
                )
            monitoring_deployment.set_credentials(
                replace_creds=True,
            )

            secrets = monitoring_deployment._get_monitoring_mandatory_project_secrets()
        # existing project - upgrade from 1.6.0 case
        monitoring_deployment.project = "1.6.0_project"
        with patch(
            "services.api.crud.Functions.get_function", return_value=Mock(spec={})
        ):
            with pytest.raises(mlrun.errors.MLRunConflictError):
                monitoring_deployment.set_credentials(
                    stream_path="v3io",
                    tsdb_connection="v3io",
                )
            secrets = monitoring_deployment._get_monitoring_mandatory_project_secrets()
            for key, value in secrets.items():
                if (
                    key
                    != mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
                ):
                    assert value == "v3io"
            monitoring_deployment.set_credentials(
                stream_path="v3io",
                tsdb_connection="v3io",
                replace_creds=True,
            )

            secrets = monitoring_deployment._get_monitoring_mandatory_project_secrets()


@pytest.mark.parametrize(
    "nuclio_annotations",
    [
        {
            "nuclio.io/kafka-metadata-retry-max": "10000",
            "nuclio.io/kafka-metadata-timeout": "1200s",
        },
        None,
    ],
)
@patch("mlrun.datastore.sources.KafkaSource.create_topics")
def test_apply_and_create_kafka_source(
    create_topics_mock: Mock,
    nuclio_annotations: typing.Optional[dict[str, str]],
    monitoring_deployment: mm_dep.MonitoringDeployment,
) -> None:
    """Test that the Kafka trigger is set correctly"""
    replication_factor = 3

    kafka_profile = DatastoreProfileKafkaStream(
        name="test-kafka-profile",
        brokers=["sub.confluent.cloud:9092"],
        topics=[],
        sasl_user="usr1",
        sasl_pass="pass123",
        kwargs_public={
            "security_protocol": "SASL_SSL",
            "api_version_auto_timeout_ms": 15_000,
            "tls": {"enable": True},
            "new_topic": {"replication_factor": replication_factor},
            **(
                {"nuclio_annotations": nuclio_annotations} if nuclio_annotations else {}
            ),
        },
    )

    fn = mlrun.runtimes.ServingRuntime()
    monitoring_deployment._apply_and_create_kafka_source(
        kafka_profile=kafka_profile,
        function=fn,
        function_name="test-confluent-trigger",
        stream_args=mlrun.mlconf.model_endpoint_monitoring.serving_stream,
        ignore_stream_already_exists_failure=True,
    )

    create_topics_mock.assert_called_once_with(
        num_partitions=8, replication_factor=replication_factor
    )

    kafka_trigger_conf = fn.spec.config.get("spec.triggers.kafka")
    assert kafka_trigger_conf, "Expected a Kafka trigger"
    assert (
        kafka_trigger_conf.get("kind") == "kafka-cluster"
    ), "Expected `kafka-cluster` kind"
    assert (
        fn.spec.base_spec.get("metadata", {}).get("annotations") == nuclio_annotations
    ), "The set annotations are different than expected"
