# ===----------------------------------------------------------------------=== #
# Copyright (c) 2025, Modular Inc. All rights reserved.
#
# Licensed under the Apache License v2.0 with LLVM Exceptions:
# https://llvm.org/LICENSE.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===----------------------------------------------------------------------=== #
"""Ops for generating random numbers."""

from __future__ import annotations

import weakref
from collections.abc import MutableMapping
from dataclasses import replace

import numpy as np
from max._core.dialects import kgen, rmo
from max.dtype import DType

from .. import dtype_promotion
from ..graph import Graph
from ..type import DeviceRef, TensorType
from ..value import TensorValue, TensorValueLike
from .elementwise import _accum_type

SEEDS: MutableMapping[Graph, TensorValue] = weakref.WeakKeyDictionary()
SeedType = TensorType(DType.int64, [], device=DeviceRef.CPU())


def _rotate_seed(seed: TensorValue):  # noqa: ANN202
    # Let's just get some different random numbers
    # from the initial seed for now.
    return seed + 1


def assert_scalar(value: TensorValueLike) -> None:
    if isinstance(value, np.ndarray | TensorValue) and value.shape:
        raise ValueError("Expected a scalar value")


def _next_seed():  # noqa: ANN202
    graph = Graph.current
    seed = _peek_seed()
    SEEDS[graph] = _rotate_seed(seed)
    return seed


def _peek_seed():  # noqa: ANN202
    graph = Graph.current
    try:
        return SEEDS[graph]
    except LookupError:
        raise RuntimeError("No seed set! Set with `ops.random.set_seed`.")  # noqa: B904


def set_seed(seed: TensorValue | int = 0) -> None:
    """Sets the seed for random numbers generated in the graph.

    This must be set at least once for each graph using random number utilities.

    - If set to a static value, random numbers generated by the graph will be
      deterministic with each graph execution.
    - To get different random values, expose a `seed` input to your graph
      and call `set_seed` with it.

    Args:
        seed: The seed value to use for future random operations. Each subsequent
            random operation will rotate this seed value automatically.
    """
    assert_scalar(seed)
    seed = dtype_promotion._promote_to_strong(
        seed, DType.int64, DeviceRef.CPU()
    )
    if seed.dtype != DType.int64:
        raise TypeError("Seed value must be int64")
    SEEDS[Graph.current] = seed


def gaussian(
    like: TensorType,
    mean: TensorValueLike = 0,
    std: TensorValueLike = 1,
) -> TensorValue:
    assert_scalar(mean)
    assert_scalar(std)
    # Check whether we have a seed before we add other constants to the graph.
    seed = _next_seed()
    accum_type = _accum_type(like) if like.dtype.is_float() else DType.float32
    random_accum = Graph.current._add_op_generated(
        rmo.MoRandomNormalOp,
        result=replace(like, dtype=accum_type),
        shape=TensorValue(like.shape),
        mean=dtype_promotion._promote_to_strong(
            mean, DType.float32, DeviceRef.CPU()
        ),
        variance=dtype_promotion._promote_to_strong(
            std, DType.float32, DeviceRef.CPU()
        ),
        seed=seed,
        output_param_decls=kgen.ParamDeclArrayAttr([]),
    )[0].tensor
    if not like.dtype.is_float():
        random_accum = round(random_accum)
    return random_accum.cast(like.dtype)


# Alias normal <-> gaussian
normal = gaussian


def uniform(
    like: TensorType,
    range: tuple[TensorValueLike, TensorValueLike] = (0, 1),
) -> TensorValue:
    lower, upper = range

    assert_scalar(lower)
    assert_scalar(upper)
    # Check whether we have a seed before we add other constants to the graph.
    seed = _next_seed()
    return Graph.current._add_op_generated(
        rmo.MoRandomUniformOp,
        result=like,
        shape=TensorValue(like.shape),
        lower_bound=dtype_promotion._promote_to_strong(
            lower, like.dtype, DeviceRef.CPU()
        ),
        upper_bound=dtype_promotion._promote_to_strong(
            upper, like.dtype, DeviceRef.CPU()
        ),
        seed=seed,
        output_param_decls=kgen.ParamDeclArrayAttr([]),
    )[0].tensor
