import requests
import json
import sys
import os
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading
from collections import defaultdict

def load_config(config_file='config.json'):
    """Load configuration from JSON file"""
    try:
        with open(config_file, 'r', encoding='utf-8') as f:
            return json.load(f)
    except FileNotFoundError:
        print(f"Configuration file {config_file} not found!")
        sys.exit(1)
    except json.JSONDecodeError:
        print(f"Invalid JSON in configuration file {config_file}!")
        sys.exit(1)

def get_environment_config(config, env_name):
    """Get configuration for specified environment"""
    if env_name not in config:
        print(f"Environment '{env_name}' not found in configuration!")
        print(f"Available environments: {list(config.keys())}")
        sys.exit(1)
    return config[env_name]

class GitLabCommitStats:
    def __init__(self, base_url, token):
        self.base_url = base_url
        self.headers = {'PRIVATE-TOKEN': token}
        self.session = requests.Session()
        self.session.headers.update(self.headers)
        self.lock = threading.Lock()
        self.progress_counter = 0
        self.total_projects = 0
        
    def get_all_projects(self):
        """Get all projects from GitLab instance with pagination"""
        all_projects = []
        page = 1
        per_page = 100
        
        print("Fetching all projects...")
        
        while True:
            projects_url = f'{self.base_url}/projects?page={page}&per_page={per_page}&membership=false&owned=false'
            
            try:
                response = self.session.get(projects_url)
                if response.status_code != 200:
                    print(f"Failed to fetch projects: {response.status_code}")
                    print(response.text)
                    break
                
                projects = response.json()
                if not projects:
                    break
                    
                all_projects.extend(projects)
                
                if len(projects) < per_page:
                    break
                    
                page += 1
                
            except Exception as e:
                print(f"Error fetching projects: {e}")
                break
        
        print(f"Found {len(all_projects)} projects")
        return all_projects
    
    def get_project_contributors(self, project_id, project_name):
        """Get contributors statistics for a specific project"""
        contributors_url = f'{self.base_url}/projects/{project_id}/repository/contributors'
        
        try:
            response = self.session.get(contributors_url)
            
            with self.lock:
                self.progress_counter += 1
                print(f"Progress: {self.progress_counter}/{self.total_projects} - Processing: {project_name}")
            
            if response.status_code == 200:
                contributors = response.json()
                return project_id, project_name, contributors
            elif response.status_code == 404:
                # Project might not have a repository or be empty
                return project_id, project_name, []
            else:
                print(f"Failed to get contributors for project {project_name}: {response.status_code}")
                return project_id, project_name, []
                
        except Exception as e:
            print(f"Error getting contributors for project {project_name}: {e}")
            return project_id, project_name, []
    
    def calculate_commit_statistics(self, projects, max_workers=5):
        """Calculate commit statistics for all users across all projects"""
        user_stats = defaultdict(lambda: {
            'name': '',
            'email': '',
            'total_commits': 0,
            'projects': {},
            'project_count': 0
        })
        
        self.total_projects = len(projects)
        self.progress_counter = 0
        
        print(f"\nAnalyzing commit statistics across {len(projects)} projects...")
        print("This may take a while depending on the number of projects...\n")
        
        # Use ThreadPoolExecutor for concurrent processing
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            # Submit all tasks
            future_to_project = {
                executor.submit(self.get_project_contributors, project['id'], project['name']): project
                for project in projects
            }
            
            # Process completed tasks
            for future in as_completed(future_to_project):
                project_id, project_name, contributors = future.result()
                
                # Process contributors for this project
                for contributor in contributors:
                    email = contributor.get('email', '').lower()
                    name = contributor.get('name', '')
                    commits = contributor.get('commits', 0)
                    
                    if not email:
                        continue
                    
                    # Use email as the primary key for user identification
                    user_key = email
                    
                    # Update user statistics
                    user_stats[user_key]['email'] = email
                    if not user_stats[user_key]['name'] and name:
                        user_stats[user_key]['name'] = name
                    
                    user_stats[user_key]['total_commits'] += commits
                    user_stats[user_key]['projects'][project_name] = commits
                    user_stats[user_key]['project_count'] = len(user_stats[user_key]['projects'])
        
        return dict(user_stats)
    
    def display_statistics(self, user_stats, detailed=False, top_n=None, by_project=False):
        """Display commit statistics"""
        if not user_stats:
            print("No commit statistics found.")
            return
        
        # Sort users by total commits (descending)
        sorted_users = sorted(user_stats.items(), key=lambda x: x[1]['total_commits'], reverse=True)
        
        if top_n:
            sorted_users = sorted_users[:top_n]
        
        print(f"\n{'='*80}")
        print(f"COMMIT STATISTICS SUMMARY")
        print(f"{'='*80}")
        print(f"Total users analyzed: {len(user_stats)}")
        print(f"Displaying top {len(sorted_users)} users")
        print(f"{'='*80}")
        
        if detailed:
            self._display_detailed_stats(sorted_users, by_project)
        else:
            self._display_simple_stats(sorted_users)
    
    def _display_simple_stats(self, sorted_users):
        """Display simple statistics table"""
        print(f"{'Rank':<6} {'Email':<30} {'Name':<25} {'Commits':<10} {'Projects':<10}")
        print("-" * 80)
        
        for rank, (email, stats) in enumerate(sorted_users, 1):
            name = stats['name'][:24] if stats['name'] else 'N/A'
            print(f"{rank:<6} {email[:29]:<30} {name:<25} {stats['total_commits']:<10} {stats['project_count']:<10}")
    
    def _display_detailed_stats(self, sorted_users, by_project=False):
        """Display detailed statistics"""
        for rank, (email, stats) in enumerate(sorted_users, 1):
            print(f"\n{rank}. User: {email}")
            print(f"   Name: {stats['name'] or 'N/A'}")
            print(f"   Total Commits: {stats['total_commits']}")
            print(f"   Active Projects: {stats['project_count']}")
            
            if by_project and stats['projects']:
                print(f"   Project Breakdown:")
                # Sort projects by commit count
                sorted_projects = sorted(stats['projects'].items(), key=lambda x: x[1], reverse=True)
                for project_name, commits in sorted_projects[:10]:  # Show top 10 projects
                    print(f"     - {project_name}: {commits} commits")
                if len(sorted_projects) > 10:
                    print(f"     ... and {len(sorted_projects) - 10} more projects")
            
            print("-" * 60)
    
    def save_statistics_to_file(self, user_stats, filename, env_name):
        """Save statistics to JSON file"""
        # Convert defaultdict to regular dict for JSON serialization
        output_data = {
            'environment': env_name,
            'export_time': datetime.now().isoformat(),
            'total_users': len(user_stats),
            'statistics': {}
        }
        
        # Sort users by total commits
        sorted_users = sorted(user_stats.items(), key=lambda x: x[1]['total_commits'], reverse=True)
        
        for rank, (email, stats) in enumerate(sorted_users, 1):
            output_data['statistics'][email] = {
                'rank': rank,
                'name': stats['name'],
                'email': stats['email'],
                'total_commits': stats['total_commits'],
                'project_count': stats['project_count'],
                'projects': dict(stats['projects'])
            }
        
        try:
            with open(filename, 'w', encoding='utf-8') as f:
                json.dump(output_data, f, indent=2, ensure_ascii=False)
            print(f"\nStatistics saved to: {filename}")
        except Exception as e:
            print(f"Failed to save statistics: {e}")

def main():
    # Get script directory to find config file
    script_dir = os.path.dirname(os.path.abspath(__file__))
    config_file = os.path.join(script_dir, 'config.json')
    
    # Load configuration
    config = load_config(config_file)
    
    # Parse command line arguments
    if len(sys.argv) < 2:
        print("Usage: python get_commit_stats.py <environment> [options]")
        print(f"Available environments: {list(config.keys())}")
        print("\nOptions:")
        print("  --detailed     Show detailed statistics")
        print("  --save         Save results to JSON file")
        print("  --top N        Show only top N users")
        print("  --by-project   Show project breakdown in detailed view")
        sys.exit(1)
    
    env_name = sys.argv[1]
    
    # Parse options
    detailed = '--detailed' in sys.argv
    save_to_file = '--save' in sys.argv
    by_project = '--by-project' in sys.argv
    
    top_n = None
    if '--top' in sys.argv:
        try:
            top_index = sys.argv.index('--top')
            if top_index + 1 < len(sys.argv):
                top_n = int(sys.argv[top_index + 1])
        except (ValueError, IndexError):
            print("Invalid --top parameter. Please provide a number.")
            sys.exit(1)
    
    # Get environment configuration
    env_config = get_environment_config(config, env_name)
    base_url = env_config['url']
    token = env_config['token']
    
    print(f"GitLab Commit Statistics Analyzer")
    print(f"Environment: {env_name}")
    print(f"API URL: {base_url}")
    print(f"Options: detailed={detailed}, save={save_to_file}, by_project={by_project}, top={top_n}")
    print()
    
    try:
        # Initialize GitLab stats analyzer
        stats_analyzer = GitLabCommitStats(base_url, token)
        
        # Get all projects
        projects = stats_analyzer.get_all_projects()
        
        if not projects:
            print("No projects found or failed to retrieve projects.")
            sys.exit(1)
        
        # Calculate commit statistics
        user_stats = stats_analyzer.calculate_commit_statistics(projects)
        
        # Display results
        stats_analyzer.display_statistics(user_stats, detailed, top_n, by_project)
        
        # Save to file if requested
        if save_to_file:
            timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
            filename = f"commit_stats_{env_name}_{timestamp}.json"
            stats_analyzer.save_statistics_to_file(user_stats, filename, env_name)
        
    except KeyboardInterrupt:
        print("\nOperation cancelled by user.")
        sys.exit(1)
    except Exception as e:
        print(f"Error occurred: {e}")
        sys.exit(1)

if __name__ == "__main__":
    main()
