#!/usr/bin/env python3

import os
import sys
import threading
import signal
import resource
import time
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, as_completed
from collections import defaultdict

# Safety limits - greatly relaxed due to streaming approach
MAX_MEMORY_MB = 1024   # Generous memory limit for safety net
MAX_FILES = 10000      # High file count limit
MAX_WORKERS = 4        # More workers with streaming
TIMEOUT_SECONDS = 60   # Longer timeout for large directories

# Dangerous paths to skip
DANGEROUS_PATHS = [
    '/proc/kcore',
    '/proc/kmsg', 
    '/proc/sched_debug',
    '/sys/kernel/debug',
    '/sys/fs/selinux',
    '/proc/vmallocinfo',
    '/proc/iomem',
    '/proc/kallsyms',
    '/proc/ksyms',
    '/proc/modules',
    '/proc/slabinfo'
]

# Dangerous file patterns - memory and virtual content files
DANGEROUS_PATTERNS = [
    '/proc/[0-9]*/mem',
    '/proc/[0-9]*/map_files',
    '/proc/[0-9]*/pagemap',
    '/proc/[0-9]*/smaps',
    '/proc/[0-9]*/numa_maps',
    '/proc/[0-9]*/clear_refs',
    '/proc/[0-9]*/syscall',
    '/proc/[0-9]*/auxv',
    '/proc/[0-9]*/environ',
    '/proc/[0-9]*/exe',
    '/proc/[0-9]*/cwd',
    '/proc/[0-9]*/root',
    '/proc/[0-9]*/fd',
    '/proc/[0-9]*/fdinfo',
    '/proc/[0-9]*/attr',
    '/proc/[0-9]*/task/[0-9]*/mem',
    '/proc/[0-9]*/task/[0-9]*/pagemap',
    '/proc/[0-9]*/task/[0-9]*/smaps',
    '/proc/[0-9]*/task/[0-9]*/numa_maps',
    '/proc/[0-9]*/task/[0-9]*/clear_refs',
    '/proc/[0-9]*/task/[0-9]*/syscall',
    '/proc/[0-9]*/task/[0-9]*/auxv',
    '/proc/[0-9]*/task/[0-9]*/environ',
    '/proc/[0-9]*/task/[0-9]*/exe',
    '/proc/[0-9]*/task/[0-9]*/cwd',
    '/proc/[0-9]*/task/[0-9]*/root',
    '/proc/[0-9]*/task/[0-9]*/fd',
    '/proc/[0-9]*/task/[0-9]*/fdinfo',
    '/proc/[0-9]*/task/[0-9]*/attr',
    '/proc/[0-9]*/maps',
    '/proc/[0-9]*/statm',
    '/proc/[0-9]*/io',
    '/proc/[0-9]*/net',
    '/proc/[0-9]*/ns',
    '/proc/[0-9]*/wchan',
    '/proc/[0-9]*/stack',
    '/proc/[0-9]*/status',
    '/proc/[0-9]*/stat',
    '/proc/[0-9]*/cmdline',
    '/proc/[0-9]*/comm',
    '/proc/[0-9]*/cgroup',
    '/proc/[0-9]*/oom_score',
    '/proc/[0-9]*/oom_adj',
    '/proc/[0-9]*/oom_score_adj',
    '/proc/[0-9]*/task/[0-9]*/maps',
    '/proc/[0-9]*/task/[0-9]*/statm',
    '/proc/[0-9]*/task/[0-9]*/io',
    '/proc/[0-9]*/task/[0-9]*/net',
    '/proc/[0-9]*/task/[0-9]*/ns',
    '/proc/[0-9]*/task/[0-9]*/wchan',
    '/proc/[0-9]*/task/[0-9]*/stack',
    '/proc/[0-9]*/task/[0-9]*/status',
    '/proc/[0-9]*/task/[0-9]*/stat',
    '/proc/[0-9]*/task/[0-9]*/cmdline',
    '/proc/[0-9]*/task/[0-9]*/comm',
    '/proc/[0-9]*/task/[0-9]*/cgroup',
    '/proc/[0-9]*/task/[0-9]*/oom_score',
    '/proc/[0-9]*/task/[0-9]*/oom_adj',
    '/proc/[0-9]*/task/[0-9]*/oom_score_adj'
]

# Cache for size formatting
_SIZE_FORMATS = ['B', 'KB', 'MB', 'GB', 'TB']

def setup_memory_limit():
    """Set memory limit for the process"""
    try:
        memory_limit = MAX_MEMORY_MB * 1024 * 1024  # Convert to bytes
        resource.setrlimit(resource.RLIMIT_AS, (memory_limit, memory_limit))
    except (ValueError, OSError):
        pass  # Ignore if we can't set memory limit

def signal_handler(signum, frame):
    """Handle timeout signals"""
    print(f"\nOperation timed out after {TIMEOUT_SECONDS} seconds", file=sys.stderr)
    sys.exit(1)

def is_safe_path(filepath):
    """Check if a path is safe to read"""
    try:
        # Skip dangerous paths
        for dangerous in DANGEROUS_PATHS:
            if filepath.startswith(dangerous):
                return False
        
        # Check dangerous patterns
        import re
        for pattern in DANGEROUS_PATTERNS:
            if re.match(pattern.replace('*', '[0-9]*'), filepath):
                return False
        
        # Skip device files and sockets
        if os.path.islink(filepath):
            return False
            
        # Skip obvious device files
        try:
            mode = os.stat(filepath).st_mode
            if mode & 0o20000:  # Character device
                return False
            if mode & 0o60000:  # Block device
                return False
            if mode & 0o14000:  # Socket or FIFO
                return False
        except OSError:
            return False
            
        # Check file size first (skip very large files)
        try:
            file_size = os.path.getsize(filepath)
            if file_size > 5 * 1024 * 1024:  # Skip files > 5MB
                return False
        except OSError:
            return False
            
        return True
    except (OSError, PermissionError):
        return False

def get_pseudo_file_size(filepath):
    """Get the actual data size of a pseudo-file using streaming approach"""
    try:
        if not os.path.isfile(filepath) or not is_safe_path(filepath):
            return 0
        
        # Streaming approach - read and discard data immediately
        size = 0
        chunk_size = 65536  # 64KB chunks for efficiency
        emergency_limit = 100 * 1024 * 1024  # 100MB absolute limit per file
        
        with open(filepath, 'rb') as f:
            while True:
                chunk = f.read(chunk_size)
                if not chunk:
                    break
                
                size += len(chunk)
                
                # Emergency limit check for safety
                if size > emergency_limit:
                    print(f"Emergency limit reached for file: {filepath} ({size} bytes)", file=sys.stderr)
                    break
                
                # Periodic memory check (less frequent due to streaming)
                if size % (1024 * 1024) == 0:  # Check every 1MB read
                    try:
                        current_memory = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
                        if current_memory > MAX_MEMORY_MB * 1024:  # Convert to KB
                            print(f"Memory limit reached, stopping file read", file=sys.stderr)
                            break
                    except:
                        pass
        
        return size
    except (IOError, OSError, PermissionError, MemoryError, TimeoutError):
        # Skip files we can't read or memory errors
        return 0

def process_file_batch(file_batch, results, lock):
    """Process a batch of files and update results thread-safely"""
    batch_size = 0
    batch_count = 0
    
    for filepath in file_batch:
        size = get_pseudo_file_size(filepath)
        batch_size += size
        batch_count += 1
    
    with lock:
        results['total_size'] += batch_size
        results['file_count'] += batch_count

def calculate_pseudo_dir_size(directory, max_workers=MAX_WORKERS):
    """Calculate total data size of all files in a pseudo-filesystem directory"""
    if not os.path.isdir(directory):
        raise ValueError(f"{directory} is not a valid directory")
    
    # Special handling for /proc - use relaxed limits with streaming
    global MAX_FILES, MAX_MEMORY_MB, MAX_WORKERS
    original_limits = (MAX_FILES, MAX_MEMORY_MB, MAX_WORKERS)
    
    if directory.startswith('/proc'):
        print(f"Using relaxed limits for /proc directory with streaming", file=sys.stderr)
        MAX_FILES = 5000     # Higher limit with streaming
        MAX_MEMORY_MB = 512  # Higher memory limit
        max_workers = 2       # Allow more threads with streaming
    
    # Collect all file paths first with safety checks
    file_paths = []
    processed_dirs = 0
    
    try:
        for root, dirs, files in os.walk(directory):
            processed_dirs += 1
            
            # Safety check: limit directory traversal (much higher with streaming)
            if processed_dirs > 2000:
                print(f"Directory traversal limit reached (2000 dirs)", file=sys.stderr)
                break
                
            # Skip dangerous subdirectories
            dirs[:] = [d for d in dirs if not any(
                os.path.join(root, d).startswith(dangerous) for dangerous in DANGEROUS_PATHS
            )]
            
            for file in files:
                if len(file_paths) >= MAX_FILES:
                    print(f"File count limit reached ({MAX_FILES} files)", file=sys.stderr)
                    break
                    
                filepath = os.path.join(root, file)
                if is_safe_path(filepath):
                    file_paths.append(filepath)
                    
            if len(file_paths) >= MAX_FILES:
                break
                
    except KeyboardInterrupt:
        print("\nInterrupted by user during file collection", file=sys.stderr)
        return 0, 0
    except Exception as e:
        print(f"Error collecting files: {e}", file=sys.stderr)
        return 0, 0
    
    if not file_paths:
        return 0, 0
    
    print(f"Found {len(file_paths)} safe files to process", file=sys.stderr)
    
    # Process files in batches for better performance
    batch_size = 50  # Larger batch size with streaming
    file_batches = [file_paths[i:i + batch_size] for i in range(0, len(file_paths), batch_size)]
    
    total_size = 0
    file_count = 0
    processed_files = 0
    lock = threading.Lock()
    results = {'total_size': 0, 'file_count': 0}
    
    try:
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            # Submit batches for processing
            future_to_batch = {
                executor.submit(process_file_batch, batch, results, lock): batch 
                for batch in file_batches
            }
            
            # Process completed batches and show progress
            for future in as_completed(future_to_batch, timeout=TIMEOUT_SECONDS):
                try:
                    future.result()  # This will raise exceptions if any occurred
                    processed_files += len(future_to_batch[future])
                    
                    # Show progress every 100 files
                    if processed_files % 100 == 0:
                        with lock:
                            current_size = results['total_size']
                            current_count = results['file_count']
                        print(f"Processed {current_count} files, current total: {current_size} bytes", file=sys.stderr)
                        
                except Exception as e:
                    print(f"Error processing batch: {e}", file=sys.stderr)
                    continue
                    
    except KeyboardInterrupt:
        print("\nInterrupted by user", file=sys.stderr)
    except Exception as e:
        print(f"Error in parallel processing: {e}", file=sys.stderr)
    finally:
        # Restore original limits if we changed them for /proc
        if directory.startswith('/proc'):
            MAX_FILES, MAX_MEMORY_MB, MAX_WORKERS = original_limits
    
    return results['total_size'], results['file_count']

def format_size(size_bytes):
    """Format bytes into human readable format efficiently"""
    if size_bytes == 0:
        return "0 B"
    
    for unit in _SIZE_FORMATS:
        if size_bytes < 1024.0:
            return f"{size_bytes:.1f} {unit}"
        size_bytes /= 1024.0
    return f"{size_bytes:.1f} TB"

if __name__ == "__main__":
    # Setup safety measures
    setup_memory_limit()
    signal.signal(signal.SIGALRM, signal_handler)
    signal.alarm(TIMEOUT_SECONDS)
    
    if len(sys.argv) != 2:
        print(f"Usage: {sys.argv[0]} <pseudo-filesystem-directory>", file=sys.stderr)
        print("Example: pseudo_du.py /proc/", file=sys.stderr)
        print(f"Safety limits: max {MAX_MEMORY_MB}MB memory, {MAX_FILES} files, {MAX_WORKERS} workers", file=sys.stderr)
        sys.exit(1)
    
    target_dir = sys.argv[1]
    
    if not os.path.isdir(target_dir):
        print(f"Error: {target_dir} is not a valid directory", file=sys.stderr)
        sys.exit(1)
    
    print(f"Calculating data size for {target_dir}...", file=sys.stderr)
    print(f"Safety limits: max {MAX_MEMORY_MB}MB memory, {MAX_FILES} files, {MAX_WORKERS} workers", file=sys.stderr)
    
    total_size, file_count = calculate_pseudo_dir_size(target_dir)
    
    # Cancel timeout alarm
    signal.alarm(0)
    
    print(f"\nResults for {target_dir}:")
    print(f"Total files processed: {file_count}")
    print(f"Total data size: {format_size(total_size)}")
    print(f"Raw bytes: {total_size}")