import requests
import mysql.connector
from mysql.connector import Error
import json
import os
import sys
from datetime import datetime, timedelta
import argparse

def load_config(config_path):
    try:
        with open(config_path, 'r', encoding='utf-8') as f:
            return json.load(f)
    except FileNotFoundError:
        print(f"Configuration file {config_path} not found!")
        sys.exit(1)
    except json.JSONDecodeError:
        print(f"Invalid JSON in configuration file {config_path}!")
        sys.exit(1)

def connect_to_db():
    try:
        connection = mysql.connector.connect(
            host='localhost',
            port=3307,
            database='gitlog',
            user='gitlog_admin',
            password='gitlog_admin'
        )
        if connection.is_connected():
            print("Database connection successful!")
            return connection
    except Error as e:
        print(f"Database connection failed: {e}")
        return None

def get_last_sync_info(connection, gitlab_instance, repo_id):
    cursor = connection.cursor()
    query = """
    SELECT last_synced_at, last_synced_sha
    FROM transmission_log
    WHERE gitlab_instance = %s AND repository_id = %s
    ORDER BY start_time DESC LIMIT 1
    """
    cursor.execute(query, (gitlab_instance, repo_id))
    result = cursor.fetchone()
    cursor.close()
    return result if result else (None, None)

def get_project_web_url(base_url, token, repo_id):
    headers = {'PRIVATE-TOKEN': token}
    response = requests.get(f"{base_url}/projects/{repo_id}", headers=headers)
    if response.status_code == 200:
        return response.json().get('web_url')
    else:
        print(f"Failed to get project web_url: {response.text}")
        return None

def get_gitlab_branches(base_url, token, repo_id):
    headers = {'PRIVATE-TOKEN': token}
    branches_url = f'{base_url}/projects/{repo_id}/repository/branches'
    params = {'per_page': 100}
    branches = []
    page = 1
    while True:
        params['page'] = page
        response = requests.get(branches_url, headers=headers, params=params)
        if response.status_code != 200:
            raise Exception(f"Failed to fetch branches: {response.status_code} - {response.text}")
        data = response.json()
        if not data:
            break
        branches.extend([branch['name'] for branch in data])
        page += 1
    print(f"Fetched {len(branches)} branches for repo {repo_id}.")
    return branches

def get_gitlab_commits(base_url, token, repo_id, ref_name=None, start_date=None, end_date=None, since=None):
    headers = {'PRIVATE-TOKEN': token}
    commits_url = f'{base_url}/projects/{repo_id}/repository/commits'
    params = {'per_page': 100}
    if ref_name:
        params['ref_name'] = ref_name
    if start_date:
        params['since'] = start_date
    if end_date:
        params['until'] = end_date
    if since:
        params['since'] = since
    commits = []
    page = 1
    while True:
        params['page'] = page
        response = requests.get(commits_url, headers=headers, params=params)
        if response.status_code != 200:
            raise Exception(f"Failed to fetch commits: {response.status_code} - {response.text}")
        data = response.json()
        if not data:
            break
        commits.extend(data)
        page += 1
    print(f"Fetched {len(commits)} commits for repo {repo_id} on branch '{ref_name or 'default'}'.")
    return commits

def get_all_repo_ids(base_url, token, filter_updated_since=None):
    headers = {'PRIVATE-TOKEN': token}
    projects_url = f'{base_url}/projects'
    params = {'per_page': 100, 'simple': True}
    if filter_updated_since:
        params['last_activity_after'] = filter_updated_since
    repo_ids = []
    page = 1
    while True:
        params['page'] = page
        response = requests.get(projects_url, headers=headers, params=params)
        if response.status_code != 200:
            raise Exception(f"Failed to fetch projects: {response.status_code} - {response.text}")
        data = response.json()
        if not data:
            break
        repo_ids.extend([project['id'] for project in data])
        page += 1
    print(f"Fetched {len(repo_ids)} repository IDs.")
    return repo_ids

def insert_commits_batch(cursor, batch, repo_id, gitlab_instance, project_web_url, dry_run=False, base_url=None, token=None):
    # Add base_url and token parameters for API calls in merge handling
    insert_query = """
    INSERT INTO commit_log 
    (id, gitlab_instance, repository_id, branch_id, email, committer_fullname, created_at, additions, deletions, total_changes, message, web_url, is_merge, updated_at) 
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
    ON DUPLICATE KEY UPDATE
    email = VALUES(email),
    committer_fullname = VALUES(committer_fullname),
    created_at = VALUES(created_at),
    additions = VALUES(additions),
    deletions = VALUES(deletions),
    total_changes = VALUES(total_changes),
    message = VALUES(message),
    web_url = VALUES(web_url),
    is_merge = VALUES(is_merge),
    updated_at = VALUES(updated_at)
    """
    inserted = 0
    headers = {'PRIVATE-TOKEN': token} if token else {}
    for commit in batch:
        try:
            sha = commit['id']
            email = commit['committer_email']
            fullname = commit['committer_name']
            created_at_str = commit['created_at']
            created_at = datetime.strptime(created_at_str, '%Y-%m-%dT%H:%M:%S.%f%z').replace(tzinfo=None)
            branch_id = commit.get('branch_id', '')
            parent_ids = commit.get('parent_ids', [])

            is_merge = len(parent_ids) > 1

            # Fetch full commit details to get stats (compatible with v9.5)
            commit_url = f'{base_url}/projects/{repo_id}/repository/commits/{sha}'
            commit_response = requests.get(commit_url, headers=headers, timeout=10)
            if commit_response.status_code == 200:
                full_commit = commit_response.json()
                original_additions = full_commit.get('stats', {}).get('additions', 0)
                original_deletions = full_commit.get('stats', {}).get('deletions', 0)
                print(f'Fetched stats from /commits/{sha}: additions={original_additions}, deletions={original_deletions}')
            else:
                print(f'Failed to fetch commit details: {commit_response.status_code}. Falling back to diff.')
                original_additions = 0
                original_deletions = 0

            # If stats are zero, fallback to diff calculation
            if original_additions == 0 and original_deletions == 0:
                print(f'Stats are zero, fetching diff for {sha}')
                diff_url = f'{base_url}/projects/{repo_id}/repository/commits/{sha}/diff'
                response = requests.get(diff_url, headers=headers, timeout=10)
                if response.status_code == 200:
                    diff_data = response.json()
                    additions = sum(f.get('additions', 0) for f in diff_data if 'additions' in f)
                    deletions = sum(f.get('deletions', 0) for f in diff_data if 'deletions' in f)
                    print(f'Diff calculated: additions={additions}, deletions={deletions}')
                else:
                    print(f'Failed to fetch diff: {response.status_code}. Using zeros.')
                    additions = 0
                    deletions = 0
            else:
                additions = original_additions
                deletions = original_deletions

            total = additions + deletions
            message = commit['message']
            web_url = f"{project_web_url}/commit/{sha}" if project_web_url else None
            updated_at = datetime.now()
            
            if dry_run:
                print(f"Would insert commit {sha} for repo {repo_id} with additions={additions}, deletions={deletions}, is_merge={is_merge}")
                inserted += 1
            else:
                cursor.execute(insert_query, (sha, gitlab_instance, repo_id, branch_id, email, fullname, created_at, additions, deletions, total, message, web_url, is_merge, updated_at))
                inserted += 1
        except ValueError as ve:
            print(f"Date parse error for commit {commit.get('id')}: {ve}")
        except Exception as e:
            print(f"Error inserting commit {commit.get('id')}: {e}")
    if dry_run:
        print(f"Would insert {inserted} commits in this batch for repo {repo_id}")
    return inserted

def log_transmission(connection, gitlab_instance, repo_id, start_time, end_time, status, last_synced_at, last_synced_sha, transferred_count, error_message, dry_run=False):
    if dry_run:
        print(f"Would log transmission: repo={repo_id}, status={status}, count={transferred_count}, error={error_message}")
        return
    cursor = connection.cursor()
    insert_query = """
    INSERT INTO transmission_log 
    (gitlab_instance, repository_id, start_time, end_time, status, last_synced_at, last_synced_sha, transferred_count, error_message, updated_at) 
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
    """
    cursor.execute(insert_query, (gitlab_instance, repo_id, start_time, end_time, status, last_synced_at, last_synced_sha, transferred_count, error_message, datetime.now()))
    connection.commit()
    cursor.close()

def main():
    parser = argparse.ArgumentParser(description='Import GitLab commits to DB')
    parser.add_argument('--repo_ids', help='Comma-separated repository IDs (required unless --all_repos is used)')
    parser.add_argument('--all_repos', action='store_true', help='Import from all repositories (overrides --repo_ids)')
    parser.add_argument('--filter_updated_since', help='Filter repositories updated since this date (YYYY-MM-DD), only with --all_repos')
    parser.add_argument('--stats_only', action='store_true', help='Only print repository stats, no import')
    parser.add_argument('--start_date', help='Start date (YYYY-MM-DD)')
    parser.add_argument('--end_date', help='End date (YYYY-MM-DD)')
    parser.add_argument('--batch_size', type=int, default=100, help='Batch size for insertion')
    parser.add_argument('--dry_run', action='store_true', help='Dry run without inserting')
    parser.add_argument('--env_name', choices=['local', 'jwzh'], default='jwzh', help='Environment name: local or jwzh')
    args = parser.parse_args()
    
    gitlab_instance = 'beijing' if args.env_name == 'local' else 'chengdu'

    if not args.all_repos and not args.repo_ids:
        parser.error('--repo_ids is required unless --all_repos is used')
    if args.filter_updated_since and not args.all_repos:
        parser.error('--filter_updated_since can only be used with --all_repos')
    
    script_dir = os.path.dirname(os.path.abspath(__file__))
    config_path = os.path.join(script_dir, '..', 'gitlab-api-test', 'config.json')
    config = load_config(config_path)
    env_name = args.env_name
    if env_name not in config:
        print(f"Environment '{env_name}' not found!")
        sys.exit(1)
    env_config = config[env_name]
    base_url = env_config['url']
    token = env_config['token']
    
    connection = connect_to_db() if not args.dry_run else None
    
    if args.all_repos:
        repo_ids = get_all_repo_ids(base_url, token, args.filter_updated_since)
    else:
        repo_ids = [int(id_str.strip()) for id_str in args.repo_ids.split(',')]
    
    if args.stats_only:
        print("Repository IDs to process:")
        for repo_id in repo_ids:
            print(f"- {repo_id}")
        print(f"Total: {len(repo_ids)}")
        if connection:
            connection.close()
        return
    
    for repo_id in repo_ids:
        print(f"\nProcessing repository {repo_id}")
        start_time = datetime.now()

        try:
            last_synced_at, last_synced_sha = get_last_sync_info(connection, gitlab_instance, repo_id) if connection else (None, None)

            project_web_url = get_project_web_url(base_url, token, repo_id)
            if not project_web_url:
                raise Exception("Could not get project web_url")

            branches = get_gitlab_branches(base_url, token, repo_id)
            all_commits = []
            sync_since = (last_synced_at + timedelta(seconds=1)).isoformat() if last_synced_at else None
            for branch in branches:
                branch_commits = get_gitlab_commits(base_url, token, repo_id, ref_name=branch, start_date=args.start_date, end_date=args.end_date, since=sync_since)
                for commit in branch_commits:
                    commit['branch_name'] = branch
                all_commits.extend(branch_commits)
            
            if not all_commits:
                log_transmission(connection, gitlab_instance, repo_id, start_time, datetime.now(), 'success', last_synced_at, last_synced_sha, 0, 'No commits found', args.dry_run)
                print(f"No commits found for repo {repo_id}")
                continue
            
            # Deduplicate by SHA and collect branches
            commit_dict = {}
            for commit in all_commits:
                sha = commit['id']
                if sha not in commit_dict:
                    commit_dict[sha] = commit
                    commit_dict[sha]['branches'] = set()
                commit_dict[sha]['branches'].add(commit['branch_name'])
            
            unique_commits = list(commit_dict.values())
            for commit in unique_commits:
                commit['branch_id'] = ','.join(sorted(commit['branches']))
            
            # Sort by created_at ascending
            unique_commits.sort(key=lambda x: datetime.strptime(x['created_at'], '%Y-%m-%dT%H:%M:%S.%f%z'))
            
            total_inserted = 0
            cursor = connection.cursor() if connection else None
            for i in range(0, len(unique_commits), args.batch_size):
                batch = unique_commits[i:i + args.batch_size]
                inserted = insert_commits_batch(cursor, batch, repo_id, gitlab_instance, project_web_url, args.dry_run, base_url, token)
                total_inserted += inserted
                if connection:
                    connection.commit()
                print(f"Processed batch {i//args.batch_size + 1} for repo {repo_id}, inserted {inserted} commits")
            
            # Determine the latest commit for syncing info
            latest_commit = max(all_commits, key=lambda x: datetime.strptime(x['created_at'], '%Y-%m-%dT%H:%M:%S.%f%z'))
            new_last_synced_at = datetime.strptime(latest_commit['created_at'], '%Y-%m-%dT%H:%M:%S.%f%z').replace(tzinfo=None)
            new_last_synced_sha = latest_commit['id']
            
            log_transmission(connection, gitlab_instance, repo_id, start_time, datetime.now(), 'success', new_last_synced_at, new_last_synced_sha, total_inserted, None, args.dry_run)
            print(f"Imported {total_inserted} commits for repo {repo_id}")

        except Exception as e:
            print(f"Error processing repo {repo_id}: {e}")
            log_transmission(connection, gitlab_instance, repo_id, start_time, datetime.now(), 'failure', last_synced_at, last_synced_sha, 0, str(e), args.dry_run)

    if connection:
        connection.close()

if __name__ == "__main__":
    main() 