"""
Local neural network architecture for locELM.

Implements shallow neural networks with random hidden layer weights
and trainable output layer weights.
"""

import tensorflow as tf
import numpy as np
from typing import List, Optional, Tuple


class LocalELMNetwork:
    """Local Extreme Learning Machine network for a single subdomain."""

    def __init__(self,
                 input_dim: int,
                 output_dim: int,
                 hidden_layers: List[int],
                 activation: str = 'tanh',
                 r_max: float = 3.0,
                 seed: Optional[int] = None,
                 subdomain_region: Optional[List[Tuple[float, float]]] = None):
        """
        Initialize local ELM network.

        Parameters
        ----------
        input_dim : int
            Input dimension
        output_dim : int
            Output dimension
        hidden_layers : List[int]
            Number of nodes in each hidden layer
        activation : str
            Activation function ('tanh', 'relu', 'sigmoid')
        r_max : float
            Maximum magnitude for random weight/bias coefficients
        seed : Optional[int]
            Random seed for weight initialization
        subdomain_region : Optional[List[Tuple[float, float]]]
            Subdomain bounds for input normalization
        """
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.hidden_layers = hidden_layers
        self.activation = activation
        self.r_max = r_max
        self.seed = seed
        self.subdomain_region = subdomain_region

        # Number of training parameters (last hidden layer size * output_dim + output_dim for bias)
        self.n_params = hidden_layers[-1] * output_dim + output_dim

        # Build network
        self.model = self._build_network()
        self.last_hidden_model = self._build_last_hidden_model()

    def _get_activation(self):
        """Get activation function."""
        if self.activation == 'tanh':
            return tf.nn.tanh
        elif self.activation == 'relu':
            return tf.nn.relu
        elif self.activation == 'sigmoid':
            return tf.nn.sigmoid
        else:
            raise ValueError(f"Unknown activation: {self.activation}")

    def _build_network(self) -> tf.keras.Model:
        """Build the local neural network."""
        if self.seed is not None:
            tf.random.set_seed(self.seed)

        # Input layer
        inputs = tf.keras.Input(shape=(self.input_dim,))

        # Normalization layer
        x = inputs
        if self.subdomain_region is not None:
            # Affine mapping to [-1, 1]
            region_array = np.array(self.subdomain_region)
            a = region_array[:, 0]  # lower bounds
            b = region_array[:, 1]  # upper bounds
            center = (a + b) / 2
            scale = (b - a) / 2

            x = tf.keras.layers.Lambda(
                lambda t: (t - center) / scale,
                name='normalization'
            )(x)

        # Hidden layers with random weights
        act_fn = self._get_activation()

        for i, n_nodes in enumerate(self.hidden_layers):
            # Random weight initialization
            kernel_init = tf.keras.initializers.RandomUniform(
                minval=-self.r_max,
                maxval=self.r_max,
                seed=self.seed + i if self.seed is not None else None
            )
            bias_init = tf.keras.initializers.RandomUniform(
                minval=-self.r_max,
                maxval=self.r_max,
                seed=self.seed + i + 1000 if self.seed is not None else None
            )

            x = tf.keras.layers.Dense(
                n_nodes,
                activation=act_fn,
                kernel_initializer=kernel_init,
                bias_initializer=bias_init,
                trainable=False,  # Hidden layers are not trainable
                name=f'hidden_{i}'
            )(x)

        # Output layer (linear, WITH bias for constant terms, trainable)
        outputs = tf.keras.layers.Dense(
            self.output_dim,
            activation=None,
            use_bias=True,  # FIXED: Need bias to represent constant offsets
            kernel_initializer='zeros',
            bias_initializer='zeros',
            trainable=True,
            name='output'
        )(x)

        model = tf.keras.Model(inputs=inputs, outputs=outputs, name='local_elm')
        return model

    def _build_last_hidden_model(self) -> tf.keras.Model:
        """Build model that outputs from last hidden layer."""
        # Get output from the layer before the output layer
        last_hidden_layer = self.model.layers[-2]
        last_hidden_model = tf.keras.Model(
            inputs=self.model.input,
            outputs=last_hidden_layer.output,
            name='last_hidden'
        )
        return last_hidden_model

    def compute_last_hidden_output(self, x: np.ndarray) -> np.ndarray:
        """
        Compute output of last hidden layer V_j(x).

        Parameters
        ----------
        x : np.ndarray
            Input points of shape (n_points, input_dim)

        Returns
        -------
        np.ndarray
            Last hidden layer output of shape (n_points, M)
        """
        return self.last_hidden_model(x, training=False).numpy()

    def set_output_weights(self, weights: np.ndarray):
        """
        Set the output layer weights and bias.

        Parameters
        ----------
        weights : np.ndarray
            Flattened weights of shape (M*output_dim + output_dim,)
            First M*output_dim are weights, last output_dim are biases
        """
        M = self.hidden_layers[-1]
        n_weights = M * self.output_dim

        # Split into weights and biases
        weight_matrix = weights[:n_weights].reshape(M, self.output_dim)
        bias_vector = weights[n_weights:]

        # Set weights in output layer (expects [weights, biases])
        self.model.layers[-1].set_weights([weight_matrix, bias_vector])

    def __call__(self, x: np.ndarray) -> np.ndarray:
        """Forward pass through the network."""
        return self.model(x, training=False).numpy()


class MultiSubdomainNetwork:
    """Collection of local ELM networks for all subdomains."""

    def __init__(self,
                 n_subdomains: int,
                 input_dim: int,
                 output_dim: int,
                 hidden_layers: List[int],
                 activation: str = 'tanh',
                 r_max: float = 3.0,
                 seed: Optional[int] = None):
        """
        Initialize multi-subdomain network.

        Parameters
        ----------
        n_subdomains : int
            Number of subdomains
        input_dim : int
            Input dimension
        output_dim : int
            Output dimension
        hidden_layers : List[int]
            Number of nodes in each hidden layer
        activation : str
            Activation function
        r_max : float
            Maximum magnitude for random coefficients
        seed : Optional[int]
            Random seed
        """
        self.n_subdomains = n_subdomains
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.hidden_layers = hidden_layers
        self.activation = activation
        self.r_max = r_max
        self.seed = seed

        self.networks = []
        self.n_params_per_subdomain = hidden_layers[-1] * output_dim + output_dim  # Include bias
        self.n_total_params = n_subdomains * self.n_params_per_subdomain

    def add_subdomain_network(self, subdomain_region: List[Tuple[float, float]],
                              subdomain_seed: Optional[int] = None):
        """Add a network for a specific subdomain."""
        if subdomain_seed is None and self.seed is not None:
            subdomain_seed = self.seed + len(self.networks) * 10000

        network = LocalELMNetwork(
            input_dim=self.input_dim,
            output_dim=self.output_dim,
            hidden_layers=self.hidden_layers,
            activation=self.activation,
            r_max=self.r_max,
            seed=subdomain_seed,
            subdomain_region=subdomain_region
        )
        self.networks.append(network)

    def get_network(self, subdomain_idx: int) -> LocalELMNetwork:
        """Get network for a specific subdomain."""
        return self.networks[subdomain_idx]

    def set_all_weights(self, weights: np.ndarray):
        """
        Set weights for all subdomain networks.

        Parameters
        ----------
        weights : np.ndarray
            Flattened weights for all subdomains
        """
        start_idx = 0
        for network in self.networks:
            end_idx = start_idx + network.n_params
            network_weights = weights[start_idx:end_idx]  # Keep flattened
            network.set_output_weights(network_weights)
            start_idx = end_idx

    def __len__(self):
        """Return number of subdomain networks."""
        return len(self.networks)
