from typing import Dict
import ee
import os
from google.cloud import storage

from common.logger import logger


def gee_upload_image(
    gee_id: str, gcs_filename: str, properties: Dict, bands=None, pyramidingPolicy=None
):
    """upload tiff image to Google Earth Engine from Google Cloud Platform

    Parameters
    ----------
    gee_id: str
        path to asset (/users/username/folder/asset_name)
    gcp_filename: str
        path to file on GCP (gs://bucket_name/path_to_blob)
    """

    params = {}
    params["id"] = gee_id

    sources = [{"primaryPath": gcs_filename}]
    params["tilesets"] = [{"sources": sources}]
    # params["sources"] = [{"primaryPath": gcs_filename}]

    if pyramidingPolicy is not None:
        params["pyramidingPolicy"] = pyramidingPolicy  # 'MODE'

    if bands is not None:
        params["bands"] = bands  # [{'id':l['band_name']} for l in md['sublayers']]

    params["properties"] = properties

    newId = ee.data.newTaskId()[0]
    # ret = ee.data.startTableIngestion(newId, params)
    ret = ee.data.startIngestion(newId, params)
    return ret


def list_blobs(bucket_name):
    """Lists all the blobs in the bucket."""
    # bucket_name = "your-bucket-name"
    name_list = []
    storage_client = storage.Client()

    # Note: Client.list_blobs requires at least package version 1.17.0.
    blobs = storage_client.list_blobs(bucket_name)
    for blob in blobs:
        name_list.append(blob.name)
    return name_list


def download_blob(bucket_name, destination):
    """Downloads a blob from the bucket."""
    os.makedirs(destination, exist_ok=True)
    storage_client = storage.Client()

    bucket = storage_client.bucket(bucket_name)
    items = list_blobs(bucket_name)
    # Construct a client side representation of a blob.
    # Note `Bucket.blob` differs from `Bucket.get_blob` as it doesn't retrieve
    # any content from Google Cloud Storage. As we don't need additional data,
    # using `Bucket.blob` is preferred here.
    for item in items:
        if "download" in item and "tif" in item:
            blob = bucket.blob(item)
            item = item.replace("download/", "")
            blob.download_to_filename(os.path.join(destination, item))

            logger.info(
                "Downloaded storage object {} from bucket {} to local file {}.".format(
                    item, bucket_name, os.path.join(destination, item)
                )
            )


def list_blobs(bucket_name):
    """Lists all the blobs in the bucket."""
    # bucket_name = "your-bucket-name"
    name_list = []
    storage_client = storage.Client()

    # Note: Client.list_blobs requires at least package version 1.17.0.
    blobs = storage_client.list_blobs(bucket_name)
    for blob in blobs:
        name_list.append(blob.name)
    return name_list


def change_name(bucket_name, blob_name, new_name):
    storage_client = storage.Client()
    bucket = storage_client.bucket(bucket_name)
    blob = bucket.blob(blob_name)
    new_blob = bucket.rename_blob(blob, new_name)


def upload_blob(bucket_name, source_file_name, destination_blob_name):
    """Uploads a file to the bucket."""
    storage_client = storage.Client()
    bucket = storage_client.bucket(bucket_name)
    blob = bucket.blob(destination_blob_name)
    generation_match_precondition = 0

    blob.upload_from_filename(
        source_file_name, if_generation_match=generation_match_precondition
    )

    logger.info(f"File {source_file_name} uploaded to {destination_blob_name}.")


def delete_blob(bucket_name):
    """Deletes a blob from the bucket."""
    # bucket_name = "your-bucket-name"
    # blob_name = "your-object-name"

    storage_client = storage.Client()

    bucket = storage_client.bucket(bucket_name)
    items = list_blobs(bucket)
    # Construct a client side representation of a blob.
    # Note `Bucket.blob` differs from `Bucket.get_blob` as it doesn't retrieve
    # any content from Google Cloud Storage. As we don't need additional data,
    # using `Bucket.blob` is preferred here.
    for item in items:
        if "download" or "upload " in item and "tif" in item:
            generation_match_precondition = None
            blob = bucket.blob(item)
            blob.reload()  # Fetch blob metadata to use in generation_match_precondition.
            generation_match_precondition = blob.generation
            blob.delete(if_generation_match=generation_match_precondition)

            print(f"Blob {item} deleted.")