#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Token Expander for UMA/UKM Metrics

This module provides functionality to expand histogram names with tokens
and create reverse lookups for efficient querying.

Copyright 2024 Chromium Authors
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""

import sqlite3
import re
import logging
from typing import Dict, List, Optional, Set, Tuple
from pathlib import Path

# Configure logging
logger = logging.getLogger(__name__)


class TokenExpander:
    """Expands histogram names with tokens and provides reverse lookup."""

    def __init__(self, db_path: str):
        self.db_path = db_path
        self.conn = None

    def connect(self):
        """Connect to the database."""
        self.conn = sqlite3.connect(self.db_path)
        self.conn.row_factory = sqlite3.Row

    def close(self):
        """Close database connection."""
        if self.conn:
            self.conn.close()

    def create_expansion_tables(self):
        """Create tables for expanded histogram names."""
        cursor = self.conn.cursor()

        # Table to store expanded histogram names
        cursor.execute('''
            CREATE TABLE IF NOT EXISTS expanded_histograms (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                original_metric_id INTEGER NOT NULL,
                expanded_name TEXT NOT NULL,
                token_values TEXT,  -- JSON string of token key-value pairs
                FOREIGN KEY (original_metric_id) REFERENCES metrics (id) ON DELETE CASCADE
            )
        ''')

        # Index for fast lookup
        cursor.execute('''
            CREATE INDEX IF NOT EXISTS idx_expanded_histograms_name
            ON expanded_histograms(expanded_name)
        ''')

        cursor.execute('''
            CREATE INDEX IF NOT EXISTS idx_expanded_histograms_original_id
            ON expanded_histograms(original_metric_id)
        ''')

        self.conn.commit()
        print("Created expansion tables successfully")

    def extract_tokens_from_name(self, name: str) -> List[str]:
        """Extract token placeholders from histogram name."""
        # Find all {TokenName} patterns
        pattern = r'\{([^}]+)\}'
        return re.findall(pattern, name)

    def expand_single_histogram(self, metric_id: int, name: str) -> List[Tuple[str, Dict[str, str]]]:
        """Expand a single histogram name with all possible token combinations."""
        tokens = self.extract_tokens_from_name(name)
        if not tokens:
            return [(name, {})]  # No tokens, return as-is

        # Get token information for this histogram
        cursor = self.conn.cursor()
        cursor.execute("""
            SELECT ht.token_key, ht.token_name
            FROM histogram_tokens ht
            WHERE ht.metric_id = ?
        """, (metric_id,))

        token_map = {row['token_key']: row['token_name'] for row in cursor.fetchall()}

        # Get variants for all tokens at once to improve performance
        all_variants = {}
        for token_key, token_name in token_map.items():
            cursor.execute("""
                SELECT v.variant_name
                FROM variants v
                JOIN tokens t ON v.token_id = t.id
                WHERE t.token_name = ?
            """, (token_name,))

            all_variants[token_key] = [row['variant_name'] for row in cursor.fetchall()]

        # Use iterative approach to avoid recursion issues
        return self._generate_expansions_iterative(name, tokens, token_map, all_variants)

    def _generate_expansions_iterative(self, name: str, tokens: List[str],
                                      token_map: Dict[str, str],
                                      all_variants: Dict[str, List[str]]) -> List[Tuple[str, Dict[str, str]]]:
        """Iteratively generate all possible expanded names to avoid recursion issues."""
        if not tokens:
            return [(name, {})]

        # Start with base case
        expansions = [("", {})]  # (processed_part, token_values)

        for token in tokens:
            new_expansions = []

            # Get variants for this token
            variants = all_variants.get(token, [])
            if not variants:
                # Token not found, treat as literal
                for processed_part, token_values in expansions:
                    new_token_values = token_values.copy()
                    new_token_values[token] = f'{{{token}}}'
                    new_expansions.append((processed_part, new_token_values))
            else:
                # Process each variant
                for processed_part, token_values in expansions:
                    for variant in variants:
                        new_token_values = token_values.copy()
                        new_token_values[token] = variant
                        new_expansions.append((processed_part, new_token_values))

            expansions = new_expansions

            # Early termination if number of expansions gets too large
            if len(expansions) > 10000:
                logger.warning(f"Too many expansions for {name}: {len(expansions)}, truncating")
                expansions = expansions[:10000]

        # Generate final expanded names
        results = []
        for _, token_values in expansions:
            expanded_name = name
            for token_key, token_value in token_values.items():
                expanded_name = expanded_name.replace(f'{{{token_key}}}', token_value)
            results.append((expanded_name, token_values))

        return results

    def _generate_expansions(self,
                           original_name: str,
                           remaining_tokens: List[str],
                           token_map: Dict[str, str],
                           current_values: Dict[str, str],
                           results: List[Tuple[str, Dict[str, str]]]):
        """Recursively generate all possible expanded names."""
        if not remaining_tokens:
            # All tokens processed, generate final name
            expanded_name = original_name
            for token_key, token_value in current_values.items():
                expanded_name = expanded_name.replace(f'{{{token_key}}}', token_value)
            results.append((expanded_name, current_values.copy()))
            return

        # Process next token
        current_token = remaining_tokens[0]
        had_token = current_token in current_values

        if current_token in token_map:
            token_name = token_map[current_token]

            # Get variants for this token
            cursor = self.conn.cursor()
            cursor.execute("""
                SELECT v.variant_name
                FROM variants v
                JOIN tokens t ON v.token_id = t.id
                WHERE t.token_name = ?
            """, (token_name,))

            variants = [row['variant_name'] for row in cursor.fetchall()]

            for variant in variants:
                current_values[current_token] = variant
                self._generate_expansions(original_name, remaining_tokens[1:],
                                      token_map, current_values, results)
        else:
            # Token not found, skip it (treat as literal)
            current_values[current_token] = f'{{{current_token}}}'
            self._generate_expansions(original_name, remaining_tokens[1:],
                                  token_map, current_values, results)

        # Backtrack - only remove if we added it
        if not had_token and current_token in current_values:
            del current_values[current_token]

    def expand_all_histograms(self):
        """Expand all histograms that contain tokens."""
        cursor = self.conn.cursor()

        # Get all histograms with tokens
        cursor.execute("""
            SELECT id, name FROM metrics
            WHERE name LIKE '%{%' AND type = 'histogram'
        """)

        histograms_with_tokens = cursor.fetchall()
        total_expansions = 0

        print(f"Found {len(histograms_with_tokens)} histograms with tokens")

        for hist in histograms_with_tokens:
            metric_id = hist['id']
            name = hist['name']

            expansions = self.expand_single_histogram(metric_id, name)

            # Store expansions
            for expanded_name, token_values in expansions:
                import json
                cursor.execute("""
                    INSERT OR REPLACE INTO expanded_histograms
                    (original_metric_id, expanded_name, token_values)
                    VALUES (?, ?, ?)
                """, (metric_id, expanded_name, json.dumps(token_values)))

            total_expansions += len(expansions)

            if len(expansions) > 1:
                print(f"  {name}: {len(expansions)} expansions")

        self.conn.commit()
        print(f"Generated {total_expansions} total expansions")

    def find_original_histogram(self, expanded_name: str) -> Optional[Dict]:
        """Find the original histogram definition for an expanded name."""
        cursor = self.conn.cursor()
        cursor.execute("""
            SELECT m.id, m.name as original_name, m.summary, m.units, m.expires_after, m.component,
                   m.file_path, m.line_number, eh.token_values
            FROM expanded_histograms eh
            JOIN metrics m ON eh.original_metric_id = m.id
            WHERE eh.expanded_name = ?
        """, (expanded_name,))

        result = cursor.fetchone()
        if result:
            import json
            return {
                'id': result['id'],
                'original_name': result['original_name'],
                'expanded_name': expanded_name,
                'summary': result['summary'],
                'units': result['units'],
                'expires_after': result['expires_after'],
                'component': result['component'],
                'file_path': result['file_path'],
                'line_number': result['line_number'],
                'token_values': json.loads(result['token_values']) if result['token_values'] else {}
            }
        return None

    def find_by_pattern(self, pattern: str, limit: int = 10) -> List[Dict]:
        """Find expanded histograms matching a pattern."""
        cursor = self.conn.cursor()
        cursor.execute("""
            SELECT m.id, m.name as original_name, eh.expanded_name, eh.token_values
            FROM expanded_histograms eh
            JOIN metrics m ON eh.original_metric_id = m.id
            WHERE eh.expanded_name LIKE ?
            LIMIT ?
        """, (f'%{pattern}%', limit))

        results = []
        for row in cursor.fetchall():
            import json
            results.append({
                'id': row['id'],
                'original_name': row['original_name'],
                'expanded_name': row['expanded_name'],
                'token_values': json.loads(row['token_values']) if row['token_values'] else {}
            })

        return results

    def get_expansion_statistics(self) -> Dict:
        """Get statistics about histogram expansions."""
        cursor = self.conn.cursor()

        stats = {}

        # Total histograms with tokens
        cursor.execute("""
            SELECT COUNT(DISTINCT original_metric_id) FROM expanded_histograms
        """)
        stats['histograms_with_tokens'] = cursor.fetchone()[0]

        # Total expanded names
        cursor.execute("""
            SELECT COUNT(*) FROM expanded_histograms
        """)
        stats['total_expansions'] = cursor.fetchone()[0]

        # Average expansions per histogram
        if stats['histograms_with_tokens'] > 0:
            stats['avg_expansions'] = stats['total_expansions'] / stats['histograms_with_tokens']
        else:
            stats['avg_expansions'] = 0

        # Histogram with most expansions
        cursor.execute("""
            SELECT m.name, COUNT(*) as expansion_count
            FROM expanded_histograms eh
            JOIN metrics m ON eh.original_metric_id = m.id
            GROUP BY eh.original_metric_id, m.name
            ORDER BY expansion_count DESC
            LIMIT 1
        """)
        row = cursor.fetchone()
        if row:
            stats['most_expansions'] = {
                'histogram': row[0],
                'count': row[1]
            }

        return stats

    def create_reverse_lookup_function(self):
        """Create a custom SQL function for reverse lookup."""
        def find_original_func(expanded_name):
            result = self.find_original_histogram(expanded_name)
            if result:
                return result['original_name']
            return None

        # Register the function
        self.conn.create_function("find_original", 1, find_original_func)
        print("Created find_original() SQL function")


def main():
    """Main function for token expansion."""
    import argparse

    parser = argparse.ArgumentParser(description='Expand histogram names with tokens')
    parser.add_argument('--db', default='./uma_ukm_metrics.db',
                        help='Database file path')
    parser.add_argument('--expand', action='store_true',
                        help='Expand all histograms with tokens')
    parser.add_argument('--lookup',
                        help='Lookup original histogram by expanded name')
    parser.add_argument('--pattern',
                        help='Find histograms matching pattern')
    parser.add_argument('--stats', action='store_true',
                        help='Show expansion statistics')

    args = parser.parse_args()

    expander = TokenExpander(args.db)
    expander.connect()

    try:
        if args.expand:
            expander.create_expansion_tables()
            expander.expand_all_histograms()

        elif args.lookup:
            result = expander.find_original_histogram(args.lookup)
            if result:
                print(f"Found original histogram:")
                print(f"  Original: {result['original_name']}")
                print(f"  Expanded:  {result['expanded_name']}")
                print(f"  Tokens:    {result['token_values']}")
                print(f"  Summary:   {result['summary']}")
            else:
                print(f"No histogram found for: {args.lookup}")

        elif args.pattern:
            results = expander.find_by_pattern(args.pattern)
            if results:
                print(f"Found {len(results)} histograms matching '{args.pattern}':")
                for result in results:
                    print(f"  {result['expanded_name']}")
                    print(f"    Original: {result['original_name']}")
                    print(f"    Tokens: {result['token_values']}")
            else:
                print(f"No histograms found matching: {args.pattern}")

        elif args.stats:
            expander.create_expansion_tables()
            stats = expander.get_expansion_statistics()
            print("Expansion Statistics:")
            print(f"  Histograms with tokens: {stats['histograms_with_tokens']}")
            print(f"  Total expansions: {stats['total_expansions']}")
            print(f"  Average expansions per histogram: {stats['avg_expansions']:.2f}")
            if 'most_expansions' in stats:
                print(f"  Most expansions: {stats['most_expansions']['histogram']} ({stats['most_expansions']['count']} expansions)")
        else:
            print("Use --help to see available options")

    finally:
        expander.close()


if __name__ == '__main__':
    main()