# This program monitors the progress of group projects in
# software engineering class.
import os
import subprocess
import pandas as pd
import datetime
import matplotlib.pyplot as plt


# Repo for testing:
test_repos = [
    {'name': 'demo2025', 'url': 'https://gitee.com/hobbiton/demo2025.git'}
]

# For existing synched repos, 
# set default_local_repos_dir and let need_synching be False.

# Local directory.
default_local_repos_dir = '/home/lhuang/work1/SE2025S_repos'

# need_synching = False: local repos already synched with remote.
need_synching = False

# Where to store output files.
output_dir = 'output'

# Date format.
date_format = '%Y-%m-%d'  # This is same as 'short' in 'git log --date=short --stat' 
# --date=format:'' is not supported by old-version git.


def get_local_branches():
    # Get local branch names.
    command = 'git branch'
    output = subprocess.check_output(command, shell=True, text=True)
    local_branches = output.split()
    local_branches.remove('*')  # '*' denotes the current branch.
    return local_branches


def get_remote_branches():
    command = 'git branch -r'
    output = subprocess.check_output(command, shell=True, text=True)
    remote_branches = output.split()
    remote_branches.remove('->')
    remote_branches.remove('origin/HEAD')
    # There may be repeated names, use set to drop duplicates.
    remote_branches = list(set(remote_branches))
    # Remove 'origin/' in each branch.
    remote_branches = [ x.replace('origin/', '') for x in remote_branches]
    return remote_branches


def switch_to_branch(branch):
    command = 'git checkout ' + branch
    subprocess.check_output(command, shell=True, text=True)
    return


# Sync repos with git commands.

def update_all_branches():
    print('In dir: ' + os.getcwd())

    # Get local branch names.
    local_branches = get_local_branches()
    print('local_branches are: ' + ', '.join(local_branches))

    # Get remote branch names.
    remote_branches = get_remote_branches()
    print('remote_branches are: ' + ', '.join(remote_branches))

    # Synchronize.
    for b in local_branches:
        if b in remote_branches:
            switch_to_branch(b)
            command = 'git pull'
            subprocess.check_output(command, shell=True, text=True)
        else:
            # The branch has been deleted in remote repo.
            pass

    for b in remote_branches:
        if b not in local_branches:
            # New branch in the remote repo, set up tracking.
            command = 'git checkout --track origin/' + b
            subprocess.check_output(command, shell=True, text=True)
    
    return 

    
def sync_repos(repos, local_dir):
    print('======== Synching repos ========')
    for repo in repos:
        repo_name = repo['name']
        repo_url = repo['url']
        if not os.path.exists(os.path.join(local_dir, repo_name)):
            # Do a clone.
            print('Cloning ' + repo_url + ' as ' + repo_name)
            command = 'git clone ' + repo_url + ' ' + os.path.join(local_dir, repo_name)
            output = subprocess.check_output(command, shell=True, text=True)
            print(output.strip())
        # Repo exists, update all local branches to sync with remote branches.
        saved_dir = os.getcwd()
        os.chdir(os.path.join(local_dir, repo_name))
        update_all_branches()
        os.chdir(saved_dir)
    return


# After synching with remote repos, each local repo contains all commits.

def collect_stats_for_repo(repo_name):
    print('Collecting stats from repo: ' + repo_name)
    commits_dict = {
        'repo': [],
        'branch': [], # For debugging. A commit may appear in multiple branches.
        'id': [],
        'date': [],
        'author': [],
        'email': [],
        'files': [],
        'insertions': [],
        'deletions': [],
    }
    local_branches = get_local_branches()
    for b in local_branches:
        print('Collecting stats from branch: ' + b)
        switch_to_branch(b)
        command = "git log --date=short --stat"
        output = subprocess.check_output(command, shell=True, text=True, encoding='utf-8', errors='ignore')
        lines = output.split('\n')
        i = 0
        while i < len(lines):
            # Each commit starts with 'commit <id>'.
            # 'Date' line should be the last line of the header.
            while i < len(lines):
                if lines[i].startswith('commit'):
                    tokens = lines[i].strip().split()
                    commit_id = tokens[1]
                elif lines[i].startswith('Author:'):
                    tokens = lines[i].strip().split(':')
                    tokens1 = tokens[1].strip().split('<')
                    author = tokens1[0]
                    email = tokens1[1][:-1] # Exclude the ending '>' in the email.
                elif lines[i].startswith('Date:'):
                    tokens = lines[i].strip().split()
                    date_str = tokens[1].replace('\'', '') # Date is surrounded by ''.
                    commit_date = datetime.datetime.strptime(date_str, date_format)
                    i += 1
                    break
                i += 1

            num_files = 0
            num_insertions = 0
            num_deletions = 0
            while i < len(lines):
                if lines[i].startswith('commit'): # Next commit starts.
                    break
                else:
                    if ('file changed' in lines[i]) or ('files changed' in lines[i]):
                        changes = lines[i].split(',')
                        for c in changes:
                            if 'file' in c:
                                tokens = c.strip().split()
                                num_files = tokens[0]
                            elif 'insertion' in c:
                                tokens = c.strip().split()
                                num_insertions = tokens[0]
                            elif 'deletion' in c:
                                tokens = c.strip().split()
                                num_deletions = tokens[0]
                    i += 1

            # OK, we have collected enough info for one commit.
            # Same commit may appear in different branches, we should count only once.
            if commit_id not in commits_dict['id']:
                commits_dict['repo'].append(repo_name)
                commits_dict['branch'].append(b)
                commits_dict['id'].append(commit_id)
                commits_dict['date'].append(commit_date)
                commits_dict['author'].append(author)
                commits_dict['email'].append(email)
                commits_dict['files'].append(num_files)
                commits_dict['insertions'].append(num_insertions)
                commits_dict['deletions'].append(num_deletions)
    
    return pd.DataFrame(commits_dict)
        

# Collect stats of the repos in the repos dir.
# Assume the repos dir contains repos only, i.e., each subdir is a repo.
# This way we can separate the synching process and stats collection process.
def collect_stats(local_repos_dir):
    print('======== Collecting stats of repos in %s ========' % local_repos_dir)
    all_commits_df = None
    repos = []
    for subdir in os.listdir(local_repos_dir):
        if os.path.isdir(os.path.join(local_repos_dir, subdir)):
            repos.append(subdir)
    for repo_name in repos:
        saved_dir = os.getcwd()
        os.chdir(os.path.join(local_repos_dir, repo_name))
        commits_df = collect_stats_for_repo(repo_name)
        if all_commits_df is None:
            all_commits_df = commits_df
        else:
            all_commits_df = pd.concat([all_commits_df, commits_df], axis=0, ignore_index=True)
        os.chdir(saved_dir)

    return all_commits_df



def plot_stats_for_repo(repo_name, stats_df):
    plt.rcParams['figure.figsize'] = (20, 8)
    fig, axs = plt.subplots(2, 1, sharex=False)
    axs[0].bar(stats_df.index, stats_df['commits'], color='blue', width=0.2, label='Commits')
    axs[0].legend()
    axs[0].set_xticks(stats_df.index, stats_df['email'])
    axs[1].bar(stats_df.index, stats_df['edits'], color='red', width=0.2, label='Edits')
    axs[1].legend()
    axs[1].set_xticks(stats_df.index, stats_df['email'])
    plt.tight_layout()
    plt.savefig(os.path.join(output_dir, repo_name+".png"))
    return
    

def analyze_repos_stats(all_commits_df):
    final_stats_dict = {
        'repo': [],
        'total_commits': [],
        'commits_std': [],
        'total_edits': [],
        'edits_std': [],
    }
    # Use email to denote author, since an author may have different names
    # on different computers. We require the email should be consistent.
    grouping = all_commits_df.groupby(['repo', 'email'])
    aggregated_df = grouping.agg(
        commits = ('id', 'count'),
        insertions = ('insertions', 'sum'),
        deletions = ('deletions', 'sum')
    )
    group_df = aggregated_df.reset_index()
    # Now group_df has columns: repo, email, commits, insertions, deletions.
    # Combine insertions and deletions as edits.
    group_df['edits'] = group_df['insertions'] + group_df['deletions']
    for name, df in group_df.groupby('repo'):
        final_stats_dict['repo'].append(name)
        final_stats_dict['total_commits'].append(df['commits'].sum())
        final_stats_dict['commits_std'].append(df['commits'].std()/df['commits'].mean())
        final_stats_dict['total_edits'].append(df['edits'].sum())
        final_stats_dict['edits_std'].append(df['edits'].std()/df['edits'].mean())
        # Draw a plot for each repo.
        plot_stats_for_repo(name, df)

    final_stats_df = pd.DataFrame(final_stats_dict)
    # Rank: the worse, the lower the rank is.
    final_stats_df['total_commits_rank'] = final_stats_df['total_commits'].rank()
    final_stats_df['commits_std_rank'] = final_stats_df['commits_std'].rank(ascending=False)
    final_stats_df['total_edits_rank'] = final_stats_df['total_edits'].rank()
    final_stats_df['edits_std_rank'] = final_stats_df['edits_std'].rank(ascending=False)
    # Currently, we just add up all ranks.
    final_stats_df['ranks_sum'] = \
        final_stats_df['total_commits_rank'] + \
        final_stats_df['commits_std_rank'] + \
        final_stats_df['total_edits_rank'] + \
        final_stats_df['edits_std_rank']
    final_stats_df['overall_rank'] = final_stats_df['ranks_sum'].rank()
    sorted_df = final_stats_df.sort_values(by='overall_rank')
    sorted_df.to_csv(os.path.join(output_dir, 'repos_ranking.csv'), index=False)
    return


if __name__ == '__main__':
    if need_synching:
        sync_repos(test_repos, default_local_repos_dir)

    # Collect stats.
    all_commits_df = collect_stats(default_local_repos_dir)
    os.makedirs(output_dir, exist_ok=True)
    all_commits_df.to_csv(os.path.join(output_dir, 'all_commits.csv'), index=False)

    # Analyze the stats.
    all_commits_df = pd.read_csv(os.path.join(output_dir, 'all_commits.csv'), \
        parse_dates=['date'])
    analyze_repos_stats(all_commits_df)


