"""This module is for getting and operation information from AWS buckets."""
import math

import boto3
from cki_lib.logger import get_logger

LOGGER = get_logger(__name__)


# pylint: disable=R0903
class S3BucketObjects:
    """This class is dealing with AWS api using boto3 library."""

    def __init__(self, client='s3'):
        """Initialise bucket."""
        self.bucket = boto3.Session().client(client)

    def get_bucket_size(self, bucket: str, prefix: str = '') -> int:
        """
        Get bucket size.

        :param bucket: name of the bucket
        :param prefix: path to the files
        :return: bucket size
        """
        paginator = self.bucket.get_paginator('list_objects_v2')
        pages = paginator.paginate(Bucket=bucket, Prefix=prefix)
        return sum(obj['Size'] for page in pages for obj in page.get('Contents', []))


def convert_size(size_bytes: int) -> str:
    """
    Convert bytes into human-readable format.

    :param size_bytes: number of bytes
    :return: bytes into human-readable format
    """
    if size_bytes == 0:
        return "0B"
    size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
    i_bytes = int(math.floor(math.log(size_bytes, 1000)))
    math_pow = math.pow(1000, i_bytes)
    s_bytes = round(size_bytes / math_pow, 2)
    return f"{s_bytes} {size_name[i_bytes]}"


def delete_bucket_object(s3_config: dict, folder_path: str) -> None:
    """
    Delete all objects in a specified folder in each bucket listed in the S3 configuration.

    :param s3_config: A dictionary containing the S3 configuration.
                      It should have a 'buckets' key containing a list of dictionaries,
                      each with a 'name' key specifying the name of a bucket.
    :param folder_path: The full path of the folder.
                        Objects in the folder in each bucket will be deleted.

    :return: None
    """
    assert "buckets" in s3_config and s3_config['buckets'] and all(
        isinstance(bucket, dict) and "name" in bucket
        for bucket in s3_config["buckets"]
    ), "Invalid S3 config"

    s3_resource = boto3.resource('s3')
    for bucket in s3_config['buckets']:
        bucket_resource = s3_resource.Bucket(bucket['name'])
        objects_to_delete = []

        list_objects = bucket_resource.objects.filter(Prefix=folder_path)

        for obj in list_objects:
            objects_to_delete.append({'Key': obj.key})
            LOGGER.info("Deleting object: %s in bucket: %s", obj.key, bucket['name'])

        if objects_to_delete:
            bucket_resource.delete_objects(Delete={'Objects': objects_to_delete})
        else:
            LOGGER.info("No files in bucket %s for folder path %s", bucket['name'], folder_path)


def get_bucket_url_size(pipeline_ids: list, s3_config: dict) -> dict:
    """
    For the provided bucket regex names gets content and size for each bucket.

    :param pipeline_ids: list of pipeline ids
    :param s3_config: dict of auth parameters
    :return: bucket names, size, total size of the buckets
    """
    buckets = S3BucketObjects()
    total_size = 0
    res_buckets = []
    for bucket in s3_config['buckets']:
        for pipeline_id in pipeline_ids:
            prefix = f'trusted-artifacts/{pipeline_id}/'
            size = buckets.get_bucket_size(bucket=bucket['name'], prefix=prefix)
            total_size += size
            url_size = {
                'url': f"https://s3.amazonaws.com/{bucket['name']}/index.html?prefix={prefix}",
                'size': convert_size(size),
                'bucket_name': bucket['name']}
            res_buckets.append(url_size)
    return {'buckets': res_buckets, 'total_size': total_size}
