import os
import zipfile
import json
from urllib import request
from urllib.parse import urlparse, unquote
import logging
logger = logging.getLogger(__name__)


class FileInfo:
    """
    A class to represent file information.

    Attributes:
        archive_url (str): The URL of the archive file.
        meta (Any, optional): Metadata associated with the file. Defaults to None.
    """

    def __init__(self, archive_url, meta = None):
        """
        Initialize a FileInfo object.

        Args:
            archive_url (str): The URL of the archive file.
            meta (Any, optional): Metadata associated with the file. Defaults to None.
        """
        self.archive_url = archive_url
        self.meta = meta

    def get_archive_url(self):
        """
        Get the archive URL.

        Returns:
            str: The URL of the archive file.
        """
        return self.archive_url

    def get_meta(self):
        """
        Get the metadata associated with the file.

        Returns:
            Any: The metadata of the file.
        """
        return self.meta


class FileOperatorHelper:
    extract_path = '/tmp/sh-computing/operators/'
    download_path = '/tmp/sh-computing/operators_archive/'

    def __init__(self, file_info: FileInfo, extract_path = None, download_path = None):
        self.file_info = file_info
        if extract_path:
            self.base_extract_path = extract_path
        else:
            self.base_extract_path = FileOperatorHelper.extract_path

        if download_path:
            self.base_download_path = download_path
        else:
            self.base_download_path = FileOperatorHelper.download_path

        logger.debug('FileOperatorHelper init with extract path: %s, download path: %s' % (self.base_extract_path, self.base_download_path))
        
        os.makedirs(self.base_download_path, exist_ok=True)
        os.makedirs(self.base_extract_path, exist_ok=True)

    def _clean_filename(self, url):
        parsed_url = urlparse(url)
        base_name = os.path.basename(unquote(parsed_url.path))
        return base_name


    def download_and_extract(self) -> str:
        zip_name = self._clean_filename(self.file_info.get_archive_url())
        if not zip_name:
            raise Exception('Invalid archive zip name: %s' % zip_name)

        download_path = os.path.join(self.base_download_path, zip_name)

        if not os.path.exists(download_path):
            self._download_file(self.file_info.get_archive_url(), download_path)
        else:
            logger.info('File {%s} already exists, no need to download' % download_path)

        extract_directory = zip_name.replace('.zip', '')
        extract_path = os.path.join(self.base_extract_path, extract_directory)

        if not os.path.exists(extract_path):
            os.makedirs(extract_path)
            self._extract_file(download_path, extract_path)
        else:
            logger.info('Directory {%s} already exists, no need to extract' % extract_path)

        meta_file_path = os.path.join(extract_path, 'META')
        self._create_meta_file(meta_file_path)

        return extract_directory

    def _download_file(self, url, save_path):
        logger.debug(f'Prepare to downloaded {url} to {save_path}')
        request.urlretrieve(url, save_path)
        logger.info(f'Downloaded {url} to {save_path}')

    def _extract_file(self, zip_path, extract_to):
        with zipfile.ZipFile(zip_path, 'r') as zip_ref:
            zip_ref.extractall(extract_to)
        logger.info(f'Extracted {zip_path} to {extract_to}')

    def _create_meta_file(self, meta_path):
        with open(meta_path, 'w') as meta_file:
            json.dump({
                'archive_url': self.file_info.get_archive_url(),
                'meta': self.file_info.get_meta()
            }, meta_file)
        logger.info(f'Created META file at {meta_path}')

