#!/usr/bin/env python3
"""
Archive Audit System

A comprehensive solution for archiving and auditing both structured and unstructured data
with permission controls.

Features:
1. Archive process for structured and unstructured data
2. Audit process with SQL search capabilities
3. Permission control system with owner and auditor roles
"""

import os
import json
import sqlite3
import datetime
import hashlib
import shutil
import argparse
import pandas as pd
from tabulate import tabulate
from pathlib import Path
from typing import Dict, List, Any, Union, Optional


class PermissionManager:
    """Manages permissions for users accessing the archive system."""
    
    def __init__(self, permission_file: str = "permissions.json"):
        self.permission_file = permission_file
        self.permissions = self._load_permissions()
        
    def _load_permissions(self) -> Dict:
        """Load permissions from the JSON file or create default if not exists."""
        if os.path.exists(self.permission_file):
            with open(self.permission_file, 'r') as f:
                return json.load(f)
        else:
            # Create default permissions with admin user
            default_permissions = {
                "users": {
                    "admin": {
                        "role": "admin",
                        "password_hash": self._hash_password("admin"),
                        "databases": {"*": ["read", "write", "archive", "audit"]}
                    }
                },
                "roles": {
                    "admin": ["read", "write", "archive", "audit"],
                    "owner": ["read", "write", "archive"],
                    "auditor": ["read", "audit"]
                }
            }
            self._save_permissions(default_permissions)
            return default_permissions
    
    def _save_permissions(self, permissions: Dict = None) -> None:
        """Save permissions to the JSON file."""
        if permissions is None:
            permissions = self.permissions
            
        with open(self.permission_file, 'w') as f:
            json.dump(permissions, f, indent=4)
    
    def _hash_password(self, password: str) -> str:
        """Create a hash of the password."""
        return hashlib.sha256(password.encode()).hexdigest()
    
    def authenticate(self, username: str, password: str) -> bool:
        """Authenticate a user with username and password."""
        if username in self.permissions["users"]:
            stored_hash = self.permissions["users"][username]["password_hash"]
            return stored_hash == self._hash_password(password)
        return False
    
    def add_user(self, username: str, password: str, role: str, databases: Dict = None) -> bool:
        """Add a new user with specified role and database permissions."""
        if username in self.permissions["users"]:
            return False
            
        if role not in self.permissions["roles"]:
            return False
            
        if databases is None:
            databases = {}
            
        self.permissions["users"][username] = {
            "role": role,
            "password_hash": self._hash_password(password),
            "databases": databases
        }
        
        self._save_permissions()
        return True
    
    def check_permission(self, username: str, database: str, action: str) -> bool:
        """Check if a user has permission to perform an action on a database."""
        if username not in self.permissions["users"]:
            return False
            
        user = self.permissions["users"][username]
        
        # Admin has all permissions
        if user["role"] == "admin":
            return True
            
        # Check database-specific permissions
        if database in user["databases"] and action in user["databases"][database]:
            return True
            
        # Check wildcard permissions
        if "*" in user["databases"] and action in user["databases"]["*"]:
            return True
            
        # Check role-based permissions
        if user["role"] in self.permissions["roles"] and action in self.permissions["roles"][user["role"]]:
            return True
            
        return False


class StructuredDataArchiver:
    """Handles archiving and retrieval of structured data (SQL databases)."""
    
    def __init__(self, archive_dir: str = "archives/structured"):
        self.archive_dir = archive_dir
        os.makedirs(self.archive_dir, exist_ok=True)
        
        # Create archive index database
        self.index_db = os.path.join(self.archive_dir, "archive_index.db")
        self._init_index_db()
        
    def _init_index_db(self) -> None:
        """Initialize the archive index database."""
        conn = sqlite3.connect(self.index_db)
        cursor = conn.cursor()
        
        cursor.execute('''
        CREATE TABLE IF NOT EXISTS archives (
            id INTEGER PRIMARY KEY,
            original_db TEXT NOT NULL,
            archive_path TEXT NOT NULL,
            archive_date TEXT NOT NULL,
            archived_by TEXT NOT NULL,
            description TEXT,
            metadata TEXT
        )
        ''')
        
        conn.commit()
        conn.close()
        
    def archive_database(self, db_path: str, description: str, user: str) -> str:
        """Archive a SQLite database file."""
        if not os.path.exists(db_path):
            raise FileNotFoundError(f"Database file not found: {db_path}")
            
        # Create archive filename with timestamp
        timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
        db_name = os.path.basename(db_path)
        archive_name = f"{os.path.splitext(db_name)[0]}_{timestamp}.db"
        archive_path = os.path.join(self.archive_dir, archive_name)
        
        # Copy the database file
        shutil.copy2(db_path, archive_path)
        
        # Get database metadata
        conn = sqlite3.connect(archive_path)
        cursor = conn.cursor()
        cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
        tables = cursor.fetchall()
        
        table_info = {}
        for table in tables:
            table_name = table[0]
            cursor.execute(f"PRAGMA table_info({table_name})")
            columns = cursor.fetchall()
            cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
            row_count = cursor.fetchone()[0]
            
            table_info[table_name] = {
                "columns": [col[1] for col in columns],
                "row_count": row_count
            }
        
        conn.close()
        
        # Store archive information in the index
        metadata = json.dumps(table_info)
        
        conn = sqlite3.connect(self.index_db)
        cursor = conn.cursor()
        
        cursor.execute('''
        INSERT INTO archives (original_db, archive_path, archive_date, archived_by, description, metadata)
        VALUES (?, ?, ?, ?, ?, ?)
        ''', (db_path, archive_path, datetime.datetime.now().isoformat(), user, description, metadata))
        
        conn.commit()
        conn.close()
        
        return archive_path
    
    def list_archives(self) -> List[Dict]:
        """List all archived databases."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("SELECT * FROM archives ORDER BY archive_date DESC")
        archives = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return archives
    
    def search_archives(self, query: str) -> List[Dict]:
        """Search archives by original database name or description."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("""
        SELECT * FROM archives 
        WHERE original_db LIKE ? OR description LIKE ?
        ORDER BY archive_date DESC
        """, (f"%{query}%", f"%{query}%"))
        
        archives = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return archives
    
    def execute_query(self, archive_id: int, sql_query: str) -> List[Dict]:
        """Execute a SQL query against an archived database."""
        conn = sqlite3.connect(self.index_db)
        cursor = conn.cursor()
        
        cursor.execute("SELECT archive_path FROM archives WHERE id = ?", (archive_id,))
        result = cursor.fetchone()
        conn.close()
        
        if not result:
            raise ValueError(f"Archive with ID {archive_id} not found")
            
        archive_path = result[0]
        
        if not os.path.exists(archive_path):
            raise FileNotFoundError(f"Archive file not found: {archive_path}")
            
        # Execute query on the archived database
        conn = sqlite3.connect(archive_path)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        try:
            cursor.execute(sql_query)
            results = [dict(row) for row in cursor.fetchall()]
        except sqlite3.Error as e:
            conn.close()
            raise ValueError(f"SQL error: {str(e)}")
            
        conn.close()
        return results


class UnstructuredDataArchiver:
    """Handles archiving and retrieval of unstructured data (files and directories)."""
    
    def __init__(self, archive_dir: str = "archives/unstructured"):
        self.archive_dir = archive_dir
        os.makedirs(self.archive_dir, exist_ok=True)
        
        # Create index database for unstructured archives
        self.index_db = os.path.join(self.archive_dir, "unstructured_index.db")
        self._init_index_db()
        
    def _init_index_db(self) -> None:
        """Initialize the unstructured archive index database."""
        conn = sqlite3.connect(self.index_db)
        cursor = conn.cursor()
        
        cursor.execute('''
        CREATE TABLE IF NOT EXISTS archives (
            id INTEGER PRIMARY KEY,
            original_path TEXT NOT NULL,
            archive_path TEXT NOT NULL,
            archive_date TEXT NOT NULL,
            archived_by TEXT NOT NULL,
            description TEXT,
            file_count INTEGER,
            total_size INTEGER,
            file_types TEXT
        )
        ''')
        
        cursor.execute('''
        CREATE TABLE IF NOT EXISTS archived_files (
            id INTEGER PRIMARY KEY,
            archive_id INTEGER NOT NULL,
            relative_path TEXT NOT NULL,
            file_type TEXT,
            file_size INTEGER,
            hash TEXT,
            FOREIGN KEY (archive_id) REFERENCES archives (id)
        )
        ''')
        
        conn.commit()
        conn.close()
    
    def _calculate_file_hash(self, file_path: str) -> str:
        """Calculate SHA-256 hash of a file."""
        hasher = hashlib.sha256()
        with open(file_path, 'rb') as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hasher.update(chunk)
        return hasher.hexdigest()
    
    def archive_files(self, source_path: str, description: str, user: str) -> str:
        """Archive a file or directory."""
        if not os.path.exists(source_path):
            raise FileNotFoundError(f"Source path not found: {source_path}")
            
        # Create archive directory with timestamp
        timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
        source_name = os.path.basename(source_path)
        archive_name = f"{source_name}_{timestamp}"
        archive_path = os.path.join(self.archive_dir, archive_name)
        
        os.makedirs(archive_path, exist_ok=True)
        
        # Copy files
        file_count = 0
        total_size = 0
        file_types = {}
        
        conn = sqlite3.connect(self.index_db)
        cursor = conn.cursor()
        
        # Insert archive record first to get the ID
        cursor.execute('''
        INSERT INTO archives (original_path, archive_path, archive_date, archived_by, description, file_count, total_size, file_types)
        VALUES (?, ?, ?, ?, ?, ?, ?, ?)
        ''', (source_path, archive_path, datetime.datetime.now().isoformat(), user, description, 0, 0, "{}"))
        
        archive_id = cursor.lastrowid
        
        if os.path.isfile(source_path):
            # Archive a single file
            dest_path = os.path.join(archive_path, source_name)
            shutil.copy2(source_path, dest_path)
            
            file_size = os.path.getsize(dest_path)
            file_type = os.path.splitext(source_name)[1].lower()
            file_hash = self._calculate_file_hash(dest_path)
            
            cursor.execute('''
            INSERT INTO archived_files (archive_id, relative_path, file_type, file_size, hash)
            VALUES (?, ?, ?, ?, ?)
            ''', (archive_id, source_name, file_type, file_size, file_hash))
            
            file_count = 1
            total_size = file_size
            file_types = {file_type: 1}
            
        else:
            # Archive a directory
            for root, _, files in os.walk(source_path):
                for file in files:
                    src_file = os.path.join(root, file)
                    rel_path = os.path.relpath(src_file, source_path)
                    dest_file = os.path.join(archive_path, rel_path)
                    
                    # Create destination directory if it doesn't exist
                    os.makedirs(os.path.dirname(dest_file), exist_ok=True)
                    
                    # Copy the file
                    shutil.copy2(src_file, dest_file)
                    
                    file_size = os.path.getsize(dest_file)
                    file_type = os.path.splitext(file)[1].lower()
                    file_hash = self._calculate_file_hash(dest_file)
                    
                    cursor.execute('''
                    INSERT INTO archived_files (archive_id, relative_path, file_type, file_size, hash)
                    VALUES (?, ?, ?, ?, ?)
                    ''', (archive_id, rel_path, file_type, file_size, file_hash))
                    
                    file_count += 1
                    total_size += file_size
                    
                    if file_type in file_types:
                        file_types[file_type] += 1
                    else:
                        file_types[file_type] = 1
        
        # Update archive record with file statistics
        cursor.execute('''
        UPDATE archives 
        SET file_count = ?, total_size = ?, file_types = ?
        WHERE id = ?
        ''', (file_count, total_size, json.dumps(file_types), archive_id))
        
        conn.commit()
        conn.close()
        
        return archive_path
    
    def list_archives(self) -> List[Dict]:
        """List all unstructured data archives."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("SELECT * FROM archives ORDER BY archive_date DESC")
        archives = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return archives
    
    def search_archives(self, query: str) -> List[Dict]:
        """Search unstructured archives by path or description."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("""
        SELECT * FROM archives 
        WHERE original_path LIKE ? OR description LIKE ?
        ORDER BY archive_date DESC
        """, (f"%{query}%", f"%{query}%"))
        
        archives = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return archives
    
    def search_files(self, query: str) -> List[Dict]:
        """Search for files within archives by filename."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("""
        SELECT af.*, a.archive_path, a.archive_date, a.archived_by
        FROM archived_files af
        JOIN archives a ON af.archive_id = a.id
        WHERE af.relative_path LIKE ?
        ORDER BY a.archive_date DESC
        """, (f"%{query}%",))
        
        files = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return files
    
    def get_archive_files(self, archive_id: int) -> List[Dict]:
        """Get all files in a specific archive."""
        conn = sqlite3.connect(self.index_db)
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        
        cursor.execute("""
        SELECT * FROM archived_files
        WHERE archive_id = ?
        ORDER BY relative_path
        """, (archive_id,))
        
        files = [dict(row) for row in cursor.fetchall()]
        
        conn.close()
        return files


class ArchiveAuditSystem:
    """Main class that integrates all components of the Archive Audit System."""
    
    def __init__(self):
        self.permission_manager = PermissionManager()
        self.structured_archiver = StructuredDataArchiver()
        self.unstructured_archiver = UnstructuredDataArchiver()
        self.current_user = None
    
    def login(self, username: str, password: str) -> bool:
        """Authenticate a user and set as current user."""
        if self.permission_manager.authenticate(username, password):
            self.current_user = username
            return True
        return False
    
    def logout(self) -> None:
        """Log out the current user."""
        self.current_user = None
    
    def check_permission(self, database: str, action: str) -> bool:
        """Check if current user has permission for an action."""
        if not self.current_user:
            return False
        return self.permission_manager.check_permission(self.current_user, database, action)
    
    def archive_structured_data(self, db_path: str, description: str) -> str:
        """Archive a structured database if user has permission."""
        db_name = os.path.basename(db_path)
        
        if not self.check_permission(db_name, "archive"):
            raise PermissionError(f"User {self.current_user} does not have archive permission for {db_name}")
            
        return self.structured_archiver.archive_database(db_path, description, self.current_user)
    
    def archive_unstructured_data(self, path: str, description: str) -> str:
        """Archive unstructured data if user has permission."""
        data_name = os.path.basename(path)
        
        if not self.check_permission(data_name, "archive"):
            raise PermissionError(f"User {self.current_user} does not have archive permission for {data_name}")
            
        return self.unstructured_archiver.archive_files(path, description, self.current_user)
    
    def audit_structured_data(self, archive_id: int, sql_query: str) -> pd.DataFrame:
        """Audit structured data with SQL query if user has permission."""
        # Get archive info first to check permissions
        conn = sqlite3.connect(self.structured_archiver.index_db)
        cursor = conn.cursor()
        
        cursor.execute("SELECT original_db FROM archives WHERE id = ?", (archive_id,))
        result = cursor.fetchone()
        conn.close()
        
        if not result:
            raise ValueError(f"Archive with ID {archive_id} not found")
            
        db_name = os.path.basename(result[0])
        
        if not self.check_permission(db_name, "audit"):
            raise PermissionError(f"User {self.current_user} does not have audit permission for {db_name}")
            
        results = self.structured_archiver.execute_query(archive_id, sql_query)
        return pd.DataFrame(results)
    
    def audit_unstructured_data(self, archive_id: int) -> pd.DataFrame:
        """Audit unstructured data if user has permission."""
        # Get archive info first to check permissions
        conn = sqlite3.connect(self.unstructured_archiver.index_db)
        cursor = conn.cursor()
        
        cursor.execute("SELECT original_path FROM archives WHERE id = ?", (archive_id,))
        result = cursor.fetchone()
        conn.close()
        
        if not result:
            raise ValueError(f"Archive with ID {archive_id} not found")
            
        path_name = os.path.basename(result[0])
        
        if not self.check_permission(path_name, "audit"):
            raise PermissionError(f"User {self.current_user} does not have audit permission for {path_name}")
            
        files = self.unstructured_archiver.get_archive_files(archive_id)
        return pd.DataFrame(files)
    
    def list_structured_archives(self) -> pd.DataFrame:
        """List all structured archives."""
        archives = self.structured_archiver.list_archives()
        return pd.DataFrame(archives)
    
    def list_unstructured_archives(self) -> pd.DataFrame:
        """List all unstructured archives."""
        archives = self.unstructured_archiver.list_archives()
        return pd.DataFrame(archives)
    
    def add_user(self, username: str, password: str, role: str, databases: Dict = None) -> bool:
        """Add a new user if current user is admin."""
        if not self.current_user or not self.permission_manager.check_permission(self.current_user, "*", "admin"):
            raise PermissionError("Only admin users can add new users")
            
        return self.permission_manager.add_user(username, password, role, databases)


def display_table(df: pd.DataFrame, title: str = None) -> None:
    """Display a DataFrame as a formatted table."""
    if title:
        print(f"\n{title}")
        print("=" * len(title))
    
    if df.empty:
        print("No data available.")
    else:
        print(tabulate(df, headers='keys', tablefmt='grid', showindex=False))


def main():
    """Main function to run the Archive Audit System CLI."""
    parser = argparse.ArgumentParser(description="Archive Audit System")
    subparsers = parser.add_subparsers(dest="command", help="Command to execute")
    
    # Login parser
    login_parser = subparsers.add_parser("login", help="Login to the system")
    login_parser.add_argument("username", help="Username")
    login_parser.add_argument("password", help="Password")
    
    # Add user parser
    add_user_parser = subparsers.add_parser("add-user", help="Add a new user")
    add_user_parser.add_argument("username", help="New username")
    add_user_parser.add_argument("password", help="New password")
    add_user_parser.add_argument("role", choices=["admin", "owner", "auditor"], help="User role")
    
    # Archive structured data parser
    archive_db_parser = subparsers.add_parser("archive-db", help="Archive a structured database")
    archive_db_parser.add_argument("db_path", help="Path to the database file")
    archive_db_parser.add_argument("description", help="Description of the archive")
    
    # Archive unstructured data parser
    archive_files_parser = subparsers.add_parser("archive-files", help="Archive unstructured data")
    archive_files_parser.add_argument("path", help="Path to file or directory")
    archive_files_parser.add_argument("description", help="Description of the archive")
    
    # List archives parsers
    subparsers.add_parser("list-db-archives", help="List all structured database archives")
    subparsers.add_parser("list-file-archives", help="List all unstructured data archives")
    
    # Audit structured data parser
    audit_db_parser = subparsers.add_parser("audit-db", help="Audit a structured database archive")
    audit_db_parser.add_argument("archive_id", type=int, help="Archive ID")
    audit_db_parser.add_argument("sql_query", help="SQL query to execute")
    
    # Audit unstructured data parser
    audit_files_parser = subparsers.add_parser("audit-files", help="Audit an unstructured data archive")
    audit_files_parser.add_argument("archive_id", type=int, help="Archive ID")
    
    # Search parsers
    search_db_parser = subparsers.add_parser("search-db-archives", help="Search structured database archives")
    search_db_parser.add_argument("query", help="Search query")
    
    search_files_parser = subparsers.add_parser("search-file-archives", help="Search unstructured data archives")
    search_files_parser.add_argument("query", help="Search query")
    
    search_in_files_parser = subparsers.add_parser("search-in-files", help="Search for files within archives")
    search_in_files_parser.add_argument("query", help="Search query")
    
    # Parse arguments
    args = parser.parse_args()
    
    # Initialize the system
    system = ArchiveAuditSystem()
    
    if args.command == "login":
        if system.login(args.username, args.password):
            print(f"Logged in as {args.username}")
        else:
            print("Login failed: Invalid username or password")
            
    elif args.command == "add-user":
        if not system.current_user:
            print("Error: You must login first")
        else:
            if system.add_user(args.username, args.password, args.role):
                print(f"User {args.username} added successfully")
            else:
                print(f"Failed to add user {args.username}")
                
    elif args.command == "archive-db":
        if not system.current_user:
            print("Error: You must login first")
        else:
            try:
                archive_path = system.archive_structured_data(args.db_path, args.description)
                print(f"Database archived successfully to {archive_path}")
            except Exception as e:
                print(f"Error: {str(e)}")
                
    elif args.command == "archive-files":
        if not system.current_user:
            print("Error: You must login first")
        else:
            try:
                archive_path = system.archive_unstructured_data(args.path, args.description)
                print(f"Files archived successfully to {archive_path}")
            except Exception as e:
                print(f"Error: {str(e)}")
                
    elif args.command == "list-db-archives":
        if not system.current_user:
            print("Error: You must login first")
        else:
            df = system.list_structured_archives()
            display_table(df, "Structured Database Archives")
            
    elif args.command == "list-file-archives":
        if not system.current_user:
            print("Error: You must login first")
        else:
            df = system.list_unstructured_archives()
            display_table(df, "Unstructured Data Archives")
            
    elif args.command == "audit-db":
        if not system.current_user:
            print("Error: You must login first")
        else:
            try:
                df = system.audit_structured_data(args.archive_id, args.sql_query)
                display_table(df, f"Audit Results for Archive ID {args.archive_id}")
            except Exception as e:
                print(f"Error: {str(e)}")
                
    elif args.command == "audit-files":
        if not system.current_user:
            print("Error: You must login first")
        else:
            try:
                df = system.audit_unstructured_data(args.archive_id)
                display_table(df, f"Files in Archive ID {args.archive_id}")
            except Exception as e:
                print(f"Error: {str(e)}")
                
    elif args.command == "search-db-archives":
        if not system.current_user:
            print("Error: You must login first")
        else:
            archives = system.structured_archiver.search_archives(args.query)
            display_table(pd.DataFrame(archives), f"Database Archives Matching '{args.query}'")
            
    elif args.command == "search-file-archives":
        if not system.current_user:
            print("Error: You must login first")
        else:
            archives = system.unstructured_archiver.search_archives(args.query)
            display_table(pd.DataFrame(archives), f"File Archives Matching '{args.query}'")
            
    elif args.command == "search-in-files":
        if not system.current_user:
            print("Error: You must login first")
        else:
            files = system.unstructured_archiver.search_files(args.query)
            display_table(pd.DataFrame(files), f"Files Matching '{args.query}'")
            
    else:
        parser.print_help()


if __name__ == "__main__":
    main()
