#!/usr/bin/env python3
import sqlite3
import json
import sys
from datetime import datetime

def extract_requests_and_responses(db_path='data/crawler.db'):
    """
    Extract requests and responses data from the SQLite database and output as JSON
    """
    try:
        # Connect to database
        conn = sqlite3.connect(db_path)
        conn.row_factory = sqlite3.Row  # This allows accessing columns by name
        cursor = conn.cursor()

        # Get all table names
        cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
        tables = [row[0] for row in cursor.fetchall()]

        print(f"Found tables: {tables}")

        # Initialize result structure
        result = {
            "extraction_timestamp": datetime.now().isoformat(),
            "database_path": db_path,
            "tables": {}
        }

        # Look for common request/response table patterns
        possible_request_tables = ['requests', 'request', 'http_requests', 'api_requests']
        possible_response_tables = ['responses', 'response', 'http_responses', 'api_responses']

        # Find actual table names
        requests_table = None
        responses_table = None

        for table in tables:
            table_lower = table.lower()
            if any(pattern in table_lower for pattern in possible_request_tables):
                requests_table = table
            if any(pattern in table_lower for pattern in possible_response_tables):
                responses_table = table

        # If no specific tables found, use all tables
        if not requests_table and not responses_table:
            print("No specific request/response tables found, extracting from all tables...")
            for table in tables:
                result["tables"][table] = extract_table_data(cursor, table)
        else:
            # Extract requests data
            if requests_table:
                print(f"Extracting requests from table: {requests_table}")
                result["tables"]["requests"] = extract_table_data(cursor, requests_table)

            # Extract responses data
            if responses_table:
                print(f"Extracting responses from table: {responses_table}")
                result["tables"]["responses"] = extract_table_data(cursor, responses_table)

            # Extract any other tables too
            for table in tables:
                if table != requests_table and table != responses_table:
                    result["tables"][table] = extract_table_data(cursor, table)

        # Close connection
        conn.close()

        # Output as JSON
        json_output = json.dumps(result, indent=2, ensure_ascii=False, default=str)

        # Save to file
        output_file = f"extracted_data_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
        with open(output_file, 'w', encoding='utf-8') as f:
            f.write(json_output)

        print(f"\nData extracted and saved to: {output_file}")
        print(f"Total tables extracted: {len(result['tables'])}")

        # Print summary
        for table_name, table_data in result["tables"].items():
            print(f"  - {table_name}: {len(table_data.get('data', []))} records")

        return result

    except Exception as e:
        print(f"Error extracting data: {e}")
        return None

def extract_table_data(cursor, table_name):
    """
    Extract all data from a specific table
    """
    try:
        # Get table structure
        cursor.execute(f"PRAGMA table_info({table_name});")
        columns_info = cursor.fetchall()
        column_names = [col[1] for col in columns_info]

        # Get all data
        cursor.execute(f"SELECT * FROM {table_name};")
        rows = cursor.fetchall()

        # Convert to list of dictionaries
        data = []
        for row in rows:
            row_dict = {}
            for i, col_name in enumerate(column_names):
                # Handle different data types appropriately
                value = row[i]
                if isinstance(value, bytes):
                    # Try to decode bytes as text, fallback to base64
                    try:
                        row_dict[col_name] = value.decode('utf-8')
                    except:
                        import base64
                        row_dict[col_name] = base64.b64encode(value).decode('ascii')
                else:
                    row_dict[col_name] = value
            data.append(row_dict)

        return {
            "table_name": table_name,
            "columns": column_names,
            "row_count": len(data),
            "data": data
        }

    except Exception as e:
        print(f"Error extracting data from table {table_name}: {e}")
        return {
            "table_name": table_name,
            "error": str(e),
            "data": []
        }

def main():
    if len(sys.argv) > 1:
        db_path = sys.argv[1]
    else:
        db_path = 'data/crawler.db'

    print(f"Extracting data from: {db_path}")
    result = extract_requests_and_responses(db_path)

    if result:
        print("\nExtraction completed successfully!")
    else:
        print("\nExtraction failed!")
        sys.exit(1)

if __name__ == "__main__":
    main()