import os
import json
import hashlib
from pathlib import Path
from typing import Union, Optional, Dict, Any, BinaryIO
from urllib.parse import urlparse


def get_file_path(url: str, directory: str, extension: Optional[str] = None) -> str:
    """Generate a file path for a URL.
    
    Args:
        url: The URL to generate a file path for.
        directory: The directory to save the file in.
        extension: The file extension to use.
        
    Returns:
        The file path.
    """
    # Create a hash of the URL to use as the filename
    url_hash = hashlib.md5(url.encode()).hexdigest()
    
    # Parse the URL to get components
    parsed_url = urlparse(url)
    
    # Create directory structure based on domain
    domain_parts = parsed_url.netloc.split(".")
    domain_dir = os.path.join(directory, *domain_parts[::-1])
    
    # Create the directory if it doesn't exist
    os.makedirs(domain_dir, exist_ok=True)
    
    # Determine the file extension
    if extension:
        ext = extension
    elif parsed_url.path:
        ext = os.path.splitext(parsed_url.path)[1]
        if not ext:
            ext = ".html"
    else:
        ext = ".html"
    
    # Ensure the extension starts with a dot
    if not ext.startswith("."):
        ext = f".{ext}"
    
    # Generate the file path
    file_path = os.path.join(domain_dir, f"{url_hash}{ext}")
    
    return file_path


def save_content(content: Union[bytes, str, Dict[str, Any]], 
                url: str, 
                directory: str, 
                extension: Optional[str] = None,
                metadata: Optional[Dict[str, Any]] = None) -> str:
    """Save content to a file.
    
    Args:
        content: The content to save.
        url: The URL the content was fetched from.
        directory: The directory to save the file in.
        extension: The file extension to use.
        metadata: Additional metadata to save with the content.
        
    Returns:
        The path to the saved file.
    """
    file_path = get_file_path(url, directory, extension)
    
    # Create directory if it doesn't exist
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    
    # Save content based on its type
    if isinstance(content, dict) or metadata:
        # For JSON content or when metadata is provided
        data = {
            "url": url,
            "content": content if isinstance(content, dict) else None,
            "metadata": metadata or {}
        }
        
        # If content is raw bytes or string, save it in a separate file
        if not isinstance(content, dict):
            content_file_path = f"{file_path}.data"
            
            # Determine mode based on content type
            mode = "wb" if isinstance(content, bytes) else "w"
            encoding = None if isinstance(content, bytes) else "utf-8"
            
            with open(content_file_path, mode, encoding=encoding) as file:
                file.write(content)
            
            # Add the path to the JSON file
            data["content_file"] = content_file_path
        
        # Save the JSON metadata
        with open(f"{file_path}.json", "w", encoding="utf-8") as file:
            json.dump(data, file, ensure_ascii=False, indent=2)
        
        return file_path
    else:
        # For raw content (bytes or string)
        # Determine mode based on content type
        mode = "wb" if isinstance(content, bytes) else "w"
        encoding = None if isinstance(content, bytes) else "utf-8"
        
        with open(file_path, mode, encoding=encoding) as file:
            file.write(content)
        
        return file_path


def load_content(url: str, directory: str, extension: Optional[str] = None) -> Optional[Union[bytes, str, Dict[str, Any]]]:
    """Load content from a file.
    
    Args:
        url: The URL the content was fetched from.
        directory: The directory the file is saved in.
        extension: The file extension to use.
        
    Returns:
        The loaded content.
    """
    file_path = get_file_path(url, directory, extension)
    
    # Check if the JSON metadata file exists
    json_path = f"{file_path}.json"
    if os.path.exists(json_path):
        # Load the JSON metadata
        with open(json_path, "r", encoding="utf-8") as file:
            data = json.load(file)
        
        # If content is stored in a separate file
        if "content_file" in data and os.path.exists(data["content_file"]):
            # Determine if content is binary based on file extension
            content_file_path = data["content_file"]
            is_binary = not content_file_path.endswith((".txt", ".html", ".xml", ".json", ".css", ".js"))
            
            # Load the content
            mode = "rb" if is_binary else "r"
            encoding = None if is_binary else "utf-8"
            
            with open(content_file_path, mode, encoding=encoding) as file:
                content = file.read()
                
            # Attach metadata
            return {
                "content": content,
                "metadata": data.get("metadata", {})
            }
        
        # If content is stored in the JSON file
        if "content" in data and data["content"] is not None:
            return data
        
        # Return the metadata if nothing else is available
        return data
    
    # Check if the raw content file exists
    if os.path.exists(file_path):
        # Determine if content is binary based on file extension
        is_binary = not file_path.endswith((".txt", ".html", ".xml", ".json", ".css", ".js"))
        
        # Load the content
        mode = "rb" if is_binary else "r"
        encoding = None if is_binary else "utf-8"
        
        with open(file_path, mode, encoding=encoding) as file:
            return file.read()
    
    # File not found
    return None


def list_saved_urls(directory: str, domain: Optional[str] = None) -> Dict[str, str]:
    """List all saved URLs in the directory.
    
    Args:
        directory: The directory to search.
        domain: Optional domain to filter by.
        
    Returns:
        Dictionary mapping URLs to file paths.
    """
    result = {}
    
    # Create the search directory
    search_dir = directory
    if domain:
        domain_parts = domain.split(".")
        search_dir = os.path.join(directory, *domain_parts[::-1])
    
    # Check if the directory exists
    if not os.path.exists(search_dir):
        return result
    
    # Walk through the directory
    for root, _, files in os.walk(search_dir):
        for file in files:
            if file.endswith(".json"):
                file_path = os.path.join(root, file)
                
                # Load the JSON file to get the URL
                with open(file_path, "r", encoding="utf-8") as f:
                    data = json.load(f)
                    if "url" in data:
                        result[data["url"]] = file_path
    
    return result 