import numpy as np


def dataAugmentation(
    x: np.ndarray,
    y: np.ndarray,
    augmentation_ratio: float = 0.5,
    remove_specific_keypoints: list = None,
    remove_rand_keypoints_nbr: int = None,
    random_noise_standard_deviation: float = None,
    scaling_factor: float = None,
    rotation_angle: float = None,
    scaling_factor_standard_deviation: float = None,
    rotation_angle_standard_deviation: float = None,
):

    """This function adds entries in the dataset by applying several data augmentation techniques
    depending on the arguments that are given.

    Args:
        x (np.ndarray): Dataset of entries for the neural network, works with either BODY25 or BODY18
        y (np.ndarray): Labels, one per entry
        augmentation_ratio (float, optional): The given float will be proportion of entries of the dataset that will be created by the
        data augmentation function. Defaults to .5.
        remove_specific_keypoints (list, optional): Remove keypoints indicated in the given list. Defaults to None.
        remove_rand_keypoints_nbr (int, optional): Remove the given number of keypoints randomly for each entry. Defaults to None.
        random_noise_standard_deviation (float, optional): Add noise for each keypoint following a normal distribution of
        the given standard deviation. Defaults to None.
        scaling_factor (float, optional): Scale every keypoint by the given scaling factor. Defaults to None.
        rotation_angle (float, optional): Rotate every keypoint by the given rotating angle. Defaults to None.
        scaling_factor_standard_deviation (float, optional): Scale each keypoint by a different scaling factor
        generated by a normal distribution of the given standard deviation. Defaults to None.
        rotation_angle_standard_deviation (float, optional): Rotate each keypoint by a different rotation angle
        generated by a normal distribution of the given standard deviation. Defaults to None.

    Returns:
        tuple(np.ndarray, np.ndarray): returns all the created entries and the labels associated
    """

    size_dataset, number_keypoints, *_ = x.shape

    # Number of entries that will be created
    number_entries_to_create = size_dataset * augmentation_ratio

    # Where is stored newly created entries
    new_x = []
    new_y = []

    # Shuffle the entries
    shuffler = np.random.permutation(size_dataset)
    x = x[shuffler]
    y = y[shuffler]

    index_dataset = 0

    # Go through each entry one by one
    while number_entries_to_create != 0:

        entry = []

        # The scaling factor that will be used for this entry
        if type(scaling_factor_standard_deviation) != type(None):
            scaling_factor_random = np.random.normal(
                1, scaling_factor_standard_deviation
            )

        # The rotation angle that will be used for this entry
        if type(rotation_angle_standard_deviation) != type(None):
            rotation_angle_random = np.random.normal(
                0, rotation_angle_standard_deviation
            )

        # The loist of keypoints that will be removed for this entry
        if type(remove_rand_keypoints_nbr) != type(None):
            list_random_keypoints = [
                np.random.randint(0, number_keypoints)
                for i in range(remove_rand_keypoints_nbr)
            ]

        # Go through the keypoints of the entry
        for i in range(number_keypoints):
            keypoint_x = x[index_dataset][i][0]
            keypoint_y = x[index_dataset][i][1]

            # Apply normal noise
            if type(random_noise_standard_deviation) != type(None):
                keypoint_x += np.random.normal(0, random_noise_standard_deviation)
                keypoint_y += np.random.normal(0, random_noise_standard_deviation)

            # Apply the scaling faction
            if type(scaling_factor) != type(None):
                keypoint_x *= scaling_factor
                keypoint_y *= scaling_factor
            if type(scaling_factor_standard_deviation) != type(None):
                keypoint_x *= scaling_factor_random
                keypoint_y *= scaling_factor_random

            # Apply the rotation
            if type(rotation_angle) != type(None):
                theta = np.radians(rotation_angle)
                c, s = np.cos(theta), np.sin(theta)
                rotation_matrix = np.array(((c, -s), (s, c)))
                keypoint = np.array([keypoint_x, keypoint_y])
                rotated_keypoint = np.dot(rotation_matrix, keypoint)
                keypoint_x = rotated_keypoint[0]
                keypoint_y = rotated_keypoint[1]
            if type(rotation_angle_standard_deviation) != type(None):
                theta = np.radians(rotation_angle_random)
                c, s = np.cos(theta), np.sin(theta)
                rotation_matrix = np.array(((c, -s), (s, c)))
                keypoint = np.array([keypoint_x, keypoint_y])
                rotated_keypoint = np.dot(rotation_matrix, keypoint)
                keypoint_x = rotated_keypoint[0]
                keypoint_y = rotated_keypoint[1]

            # Remove the points
            if type(remove_rand_keypoints_nbr) != type(None):
                if i in list_random_keypoints:
                    keypoint_x = 0.0
                    keypoint_y = 0.0
            if type(remove_specific_keypoints) != type(None):
                if i in remove_specific_keypoints:
                    keypoint_x = 0.0
                    keypoint_y = 0.0
            # Add additionnal augmentation features
            entry.append([keypoint_x, keypoint_y])

        new_x.append(entry)
        new_y.append(y[index_dataset])

        # If the augmentation_ratio is more than 1, after going through the whole
        # dataset, it will start over
        index_dataset = (index_dataset + 1) % size_dataset

        number_entries_to_create -= 1

    new_x = np.array(new_x)
    new_y = np.array(new_y)

    return (new_x, new_y)
