# -*- coding: utf-8 -*-
import os.path
import sys  ###
import requests  ###
import csv  ###
import codecs  ### used for text encoding in config parser
import argparse  ### good argument parser
import logging  ###
from configparser import ConfigParser  ### able to read configuration file
from datetime import datetime, date         ### used for manipulations with dates
from openpyxl import load_workbook  ###
import openpyxl
from pytz import common_timezones
from tqdm import tqdm  # show progress
import time
import pymysql
from check_missing_utils import check_missing_days_main, dump_all_prs_to_excel  # missing days util
from check_missing_utils import upload_to_nexus


#from pkg_resources import empty_provider

CONFIG_POINT_LOCAL = "gitcode"
CONFIG_POINT_GLOBAL = "global"
CONFIG_FILE = "config.ini"
CONFIG_BASE_URL = "gitcode-url"
# get new token https://gitcode.com/setting/token-classic
CONFIG_TOKEN = "token"
CONFIG_MEMBER_LIST = "member-list"
CONFIG_BRANCH = "branch"
CONFIG_REPOSITORY = "repository"
CONFIG_UNTIL = "date_until"
# Default values, can be overridden by config
PER_PAGE = 50
PAUSE_AFTER_REQUESTS = 20
PAUSE_DURATION = 7  # seconds
# see https://docs.gitcode.com/docs/apis/get-api-v-5-repos-owner-repo-pulls
GET_LIST_PR = '{}/api/v5/repos/{}/{}/pulls?base={}&state=all&since={}&per_page={}&page={}'
#  see https://docs.gitcode.com/docs/apis/get-api-v-5-repos-owner-repo-pulls-number-files
GET_PR_FILES = '{}/api/v5/repos/{}/{}/pulls/{}/files'
# should be set as first date when statistic started to be collected
DEFAULT_CHECK_MISS_DATE_START = "2025-09-11"

def main():
    ### argument parsing section
    tool_description = 'Track commit by using Gitcode API'
    parser = argparse.ArgumentParser(description=tool_description,
                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-t', '--token', dest='token', help='token')
    parser.add_argument('-l', '--url-link', dest='base_url', help='the gitcode url')
    parser.add_argument('-f', '--file', dest='member_list_file', help='Group member list file')
    parser.add_argument('-b', '--branch', dest='branch',
                        help='Branch to get report for all projects. Can be comma separated list like "master,dev,main"')
    parser.add_argument('-p', '--project', dest='project',
                        help='Specify project for report path with namespace. Possible to specify several project separated by comma"')
    parser.add_argument('-d', '--date', dest='date_since', required=True,
                        help='query date string since, eg. 2019-06-25')
    parser.add_argument('-u', '--until', dest='date_until', required=False,
                        help='query date string until, eg. 2019-12-25')
    parser.add_argument('-v', '--verbose', dest='verbose', help='enable verbose (debug) logging')
    parser.add_argument('--config', dest='config_path', required=False,
                        help='Path to config.ini (default: ./config.ini or ~/.gitcodestat_config.ini)')
    parser.add_argument('--check-missing', type=bool, dest='check_missing', action=argparse.BooleanOptionalAction, default=False,
                        help='Check missing days in logs (table gitcode_logs), e.g. --check-missing True, default False')
    parser.add_argument('--check-miss-date-start', dest='check_miss_start', default=None,
                        help='Starting date for --check-missing (format YYYY-MM-DD). By default see DEFAULT_CHECK_MISS_DATE_START.')
    parser.add_argument('--check-miss-date-end', dest='check_miss_end', default=None,
                        help='End date for --check-missing (format YYYY-MM-DD). By default - today.')
    parser.add_argument('--dump-all-prs-excel', type=bool, action=argparse.BooleanOptionalAction, dest='dump_all_prs_excel', default=False,
                        help='Export all PRs from MySQL into Excel-file, e.g. --dump-all-prs-excel True, default False')
    parser.add_argument('--upload-to-nexus', type=bool, action=argparse.BooleanOptionalAction, dest='upload_to_nexus', default=False,
                        help='Upload generated file to Nexus (True/False)')
    parser.add_argument('--skip-stat', dest='skip_stat', action=argparse.BooleanOptionalAction, default=False,
                        help='Skip statistics collection (True/False)')

    options = parser.parse_args()
    level = logging.DEBUG if options.verbose else logging.INFO
    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=level)

    base_url = options.base_url
    token = options.token
    since = options.date_since.strip()
    branch_list = options.branch

    member_list_file = options.member_list_file
    ###
    ### config file section
    config = ConfigParser(allow_no_value=False, comment_prefixes=('#', ';'),inline_comment_prefixes='#')
    # detect where config.ini
    if options.config_path:
        config_path = options.config_path
    elif os.path.exists(os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.ini")):
        config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.ini")
    else:
        config_path = os.path.expanduser("~/.gitcodestat_config.ini")

    if not os.path.isfile(config_path):
        raise FileNotFoundError(f"Configuration file not found: {config_path}")

    logging.info(f"Using config file: {config_path}")
    config.read_file(codecs.open(config_path, 'r', encoding='utf-8-sig'))

    if base_url is None:
        base_url = config.get(CONFIG_POINT_LOCAL, CONFIG_BASE_URL)
    if token is None:
        token = config.get(CONFIG_POINT_LOCAL, CONFIG_TOKEN)
    if branch_list is None:
        branch_list = config.get(CONFIG_POINT_LOCAL, CONFIG_BRANCH, fallback="").split(",")
    if member_list_file is None:
        member_list_file = config.get(CONFIG_POINT_GLOBAL, CONFIG_MEMBER_LIST)
    ###
    global PER_PAGE, PAUSE_DURATION, PAUSE_AFTER_REQUESTS
    if config.has_option(CONFIG_POINT_LOCAL, 'per_page'):
        PER_PAGE = int(config.get(CONFIG_POINT_LOCAL, 'per_page'))
    if config.has_option(CONFIG_POINT_LOCAL, 'pause_after_requests'):
        PAUSE_AFTER_REQUESTS = int(config.get(CONFIG_POINT_LOCAL, 'pause_after_requests'))
    if config.has_option(CONFIG_POINT_LOCAL, 'pause_duration'):
        PAUSE_DURATION = int(config.get(CONFIG_POINT_LOCAL, 'pause_duration'))

    if base_url is None or token is None or member_list_file is None or branch_list is None:
        raise ValueError("url or token or file is invalid")
    until = options.date_until or config.get(CONFIG_POINT_LOCAL, CONFIG_UNTIL, fallback=None) or datetime.today().strftime('%Y-%m-%d')
    repositories = options.project or config.get(CONFIG_POINT_LOCAL, CONFIG_REPOSITORY)
    repo_string = repositories.split('/')
    repo_owner = repo_string[0]
    repo = repo_string[1]

    mysql_config = {
        'host': config.get("mysql", "host"),
        'user': config.get("mysql", "user"),
        'password': config.get("mysql", "password"),
        'database': config.get("mysql", "database")
    }
    enable_csv = config.get(CONFIG_POINT_LOCAL, "enable_csv", fallback="no").lower() == "yes"
    enable_excel = config.get(CONFIG_POINT_LOCAL, "enable_excel", fallback="no").lower() == "yes"
    enable_mysql = config.get(CONFIG_POINT_LOCAL, "enable_mysql", fallback="no").lower() == "yes"


    print(f"Starting to prepare report from Gitcode for branch: {branch_list}, repositories {repositories}, date since {since}, date until {until}")
    # member_list = read_member_list("members.xlsx") ### TODO add report for set members only
    s = requests.Session()
    s.headers = {'Private-Token': token}
    ### bundle-ca is a text file with certificate in Base64 format of intermediate CA and root CA. Used for self-signed certificates which does not exist in certifi
    s.verify = 'bundle-ca' if os.path.exists("bundle-ca") else True
    project_report = []
    # get comma-separated repositories with projects
    branches = branch_list if isinstance(branch_list, list) else branch_list.split(",")
    # TODO implement multiple repositories
    repo_string = repositories.split('/')
    repo_owner = repo_string[0]
    repo = repo_string[1]

    # collect statistics
    if not options.skip_stat:
        for branch in tqdm(branches, desc=f"Processing Branches in {repositories}", position=0, leave=False, dynamic_ncols=True):
            prs = get_all_prs(s, base_url, repo_owner, repo, branch, since)

            # Прогресс-бар для страниц (в get_all_prs)
            with tqdm(total=len(prs), desc=f"Processing PRs in {branch}", position=2, leave=False, dynamic_ncols=True) as pbar:
                for c in prs:
                    pr_id = c['id']
                    user_name = c['user']['name'] if 'user' in c else "Anonymous"
                    user_login = c['user']['login'] if 'user' in c else "Anonymous"
                    pr_title = c['title']
                    pr_url = c['html_url']
                    pr_state = c['state']
                    pr_date = c['created_at']
                    pr_merged_date = c['merged_at'] if c['merged_at'] != '' else None
                    description = c['body']
                    #size = get_pr_size(s, base_url, repo_owner, repo, c['number'])
                    size = [c['added_lines'], c['removed_lines']]
                    #size = [0,0]
                    reviewers = [item['login'] for item in c['assignees'] if item['accept']]
                    ### combining all data into array
                    project_report.append({
                        'pr_id': pr_id,
                        'Name': user_name,
                        'Login': user_login,
                        'PR_Name': pr_title,
                        'PR_URL': pr_url,
                        'PR_State': pr_state,
                        'PR_Created_Date': pr_date,
                        'PR_Merged_Date': pr_merged_date,
                        'PR_Description': description,
                        'branch': branch,
                        'Repo': repo_owner + "/" + repo,
                        'additions': size[0],
                        'deletions': size[1],
                        'reviewer': ', '.join(reviewers)
                    })
                    pbar.update(1)
    ### TODO comments in other team member's code and KLOCs reviewed
    # get_users_comments
    ### Create a report file with headlines

    file_name = ("gitcode-prs-since-" + since + "-until-" + until +
                 ", repo " + repositories +
                 ", branch " + ", ".join(branch_list) ).replace("/","-")

    # csv file
    if enable_csv:
        create_csv_file(file_name, project_report)

    # Create an Excel report file with headlines
    if enable_excel:
        create_excel_file(file_name, project_report)  # Call the new function

    # insert data into mysql table
    if enable_mysql:
        update_mysql_table(mysql_config, project_report)
        execution_start = datetime.now()
        try:
            conn = create_mysql_connection(mysql_config)
            ensure_log_table_exists(conn)  # создаём таблицу, если надо
            log_script_execution(
                conn,
                start_time=execution_start,
                repo=repositories,
                branch=branch_list,
                owner=repo_owner,
                url=base_url,
                date_param=since,
                result="success"
            )
        except Exception as e:
            logging.error(f"Failed to log execution to DB: {e}")
    # check missing days and create scripts for missing days
    if options.check_missing:
        check_start = options.check_miss_start or DEFAULT_CHECK_MISS_DATE_START
        check_end = options.check_miss_end or datetime.today().strftime("%Y-%m-%d")
        start_date = datetime.strptime(check_start, "%Y-%m-%d").date()
        end_date = datetime.strptime(check_end, "%Y-%m-%d").date()

        branches = branch_list if isinstance(branch_list, list) else branch_list.split(",")

        with create_mysql_connection(mysql_config) as conn:
            check_missing_days_main(conn, repositories, branches, start_date, end_date)


    if options.dump_all_prs_excel:
        file_name = f"all_prs_{datetime.today().strftime('%Y-%m-%d')}"
        date_since = options.date_since
        date_until = options.date_until
        date_since = ""
        date_until = ""
        if date_since:
            file_name += f"_since-{date_since}"
        if date_until:
            file_name += f"_until-{date_until}"

        file_name += ".xlsx"
        with create_mysql_connection(mysql_config) as conn:
            dump_all_prs_to_excel(conn, file_name, date_since, date_until)
        if options.upload_to_nexus:
            repository_path = config.get("nexus", "repository-path", fallback="")
            nexus_url = config.get("nexus", "url")
            repository = config.get("nexus", "repository")
            username = config.get("nexus", "username", fallback="")
            password = config.get("nexus", "password", fallback="")
            upload_to_nexus(file_name, nexus_url, repository, username, password, repository_path)

    print("All done!")
    return 0


def get_all_prs(session, base_url, project_id, repository, branch, since):
    global PER_PAGE
    global PAUSE_AFTER_REQUESTS
    global PAUSE_DURATION
    res = []
    next_page = 1
    url_format = GET_LIST_PR
    request_count = 0

    # Предварительное получение общего количества страниц
    url = url_format.format(base_url, project_id, repository, branch, since + "T00:00:00Z", PER_PAGE, next_page)
    initial_resp = session.get(url)
    total_pages = int(initial_resp.headers.get('total_page', 1))  # Дефолтное значение - 1

    # Прогресс-бар для страниц
    with tqdm(total=total_pages, desc=f"Processing Pages for {repository}/{branch}", position=1, leave=False, dynamic_ncols=True) as pbar:
        while next_page <= total_pages:
            url = url_format.format(base_url, project_id, repository, branch, since + "T00:00:00Z", PER_PAGE, next_page)
            resp = session.get(url)
            request_count += 1

            if resp.status_code == 429:
                logging.warning(f"Rate limit exceeded. Waiting for {PAUSE_DURATION} second...")
                time.sleep(PAUSE_DURATION)
                continue
            elif resp.status_code != 200:
                logging.error(f"Failed to fetch data for page {next_page}. Status code: {resp.status_code}")
                break

            res.extend(resp.json())
            pbar.update(1)
            next_page += 1

            # Пауза после заданного количества запросов
            if request_count >= PAUSE_AFTER_REQUESTS:
                logging.info(f"Pausing for {PAUSE_DURATION} seconds after {PAUSE_AFTER_REQUESTS} requests.")
                time.sleep(PAUSE_DURATION)
                request_count = 0

    return res


def get_pr_files(session, base_url, project_id, repository, pr):
    url_format = GET_PR_FILES
    url = url_format.format(base_url, project_id, repository, pr)
    resp = session.get(url)
    return resp.json()

def get_pr_size(session, base_url, project_id, repository, pr):
    additions = 0
    deletions = 0
    files_changed = get_pr_files(session, base_url, project_id, repository, pr)
    for file in files_changed:
        additions += int(file['additions'])
        deletions += int(file['deletions'])
    return [additions,deletions]


def read_member_list(member_list_file):
    wb = load_workbook(member_list_file)
    sheet = wb.active
    has_name = "name" == sheet.cell(row=1, column=1).value
    has_mail = "email" == sheet.cell(row=1, column=2).value
    has_username = "username" == sheet.cell(row=1, column=3).value
    has_gitcode_account = "gitcode_account" == sheet.cell(row=1, column=4).value

    member_list = []
    if has_name and has_mail and has_username and has_gitcode_account:
        for rx in range(2, sheet.max_row + 1):
            name = sheet.cell(row=rx, column=1).value
            mail = sheet.cell(row=rx, column=2).value
            username = sheet.cell(row=rx, column=3).value
            gitcode_account = sheet.cell(row=rx, column=4).value
            member_list.append({'Name': name, 'Email': mail, 'Username': username, 'GitcodeAccount': gitcode_account})
        return member_list
    else:
        raise ValueError("The table format is incorrect")


def create_csv_file(file_name, project_report):
    # Write Title
    file_name = file_name  + ".csv"
    with open(file_name, "w", encoding='utf-8-sig', newline='') as csvfile:
        writer = csv.writer(csvfile)
        writer.writerow(["PR ID", "Name", "Login", "PR_Name", "PR_URL", "PR_State", "PR_Created_Date",
                         "PR_Merged_Date", "branch", "Repo", "Additions", "Deletions", "Reviewers"])

    with open(file_name, "a", encoding='utf-8-sig', newline='') as csvfile:
        writer = csv.writer(csvfile)
        for commit in project_report:
            writer.writerow([commit['pr_id'],
                             commit['Name'],
                             commit['Login'],
                             commit['PR_Name'],
                             commit['PR_URL'],
                             commit['PR_State'],
                             str(commit['PR_Created_Date']),
                             commit['PR_Merged_Date'],
                             #commit['PR_Description'],
                             commit['branch'],
                             commit['Repo'],
                             commit['additions'],
                             commit['deletions'],
                             commit['reviewer']
                             ])


def create_excel_file(file_name, project_report):
    file_name = file_name  + ".xlsx"
    # Create a workbook and select the active worksheet
    wb = openpyxl.Workbook()
    ws = wb.active
    
    # Write headers
    headers = ["PR ID", "Name", "Login", "PR_Name", "PR_URL", "PR_State",
               "PR_Created_Date", "PR_Merged_Date", "branch", "Repo", 
               "Additions", "Deletions", "Reviewers"]
    ws.append(headers)
    
    # Write data rows
    for commit in project_report:
        row_data = [commit['pr_id'],
                    commit['Name'],
                    commit['Login'],
                    commit['PR_Name'],
                    commit['PR_URL'],
                    commit['PR_State'],
                    str(commit['PR_Created_Date']),
                    commit['PR_Merged_Date'],
                    commit['branch'],
                    commit['Repo'],
                    commit['additions'],
                    commit['deletions'],
                    commit['reviewer']
                   ]
        ws.append(row_data)
    
    # Save the workbook
    wb.save(file_name)


def update_mysql_table(mysql_config, project_report):
    mysql_conn = None
    try:
        mysql_conn = create_mysql_connection(mysql_config)
        ensure_table_exists(mysql_conn)
    except Exception as e:
        logging.error(f"MySQL connection or table creation failed: {e}")

    for pr_line in project_report:
        insert_to_mysql(pr_line, mysql_conn)

def insert_to_mysql(pr_line, connection):
    try:
        cursor = connection.cursor()
        sql = """
        INSERT INTO pull_requests (
            ID, Name, Login, PR_Name, PR_URL, PR_State,
            PR_Created_Date, PR_Merged_Date, Branch, Repo,
            Additions, Deletions, Reviewers
        ) VALUES (
            %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s
        )
        ON DUPLICATE KEY UPDATE
            Name = VALUES(Name),
            Login = VALUES(Login),
            PR_Name = VALUES(PR_Name),
            PR_State = VALUES(PR_State),
            PR_Created_Date = VALUES(PR_Created_Date),
            PR_Merged_Date = VALUES(PR_Merged_Date),
            Branch = VALUES(Branch),
            Repo = VALUES(Repo),
            Additions = VALUES(Additions),
            Deletions = VALUES(Deletions),
            Reviewers = VALUES(Reviewers)
        """
        insert_line = (pr_line['pr_id'], pr_line['Name'], pr_line['Login'], pr_line['PR_Name'], pr_line['PR_URL'],
                       pr_line['PR_State'], pr_line['PR_Created_Date'], pr_line['PR_Merged_Date'],
                       pr_line['branch'], pr_line['Repo'], pr_line['additions'], pr_line['deletions'],
                       pr_line['reviewer'])
        cursor.execute(sql, insert_line)
        connection.commit()
    except Exception as e:
        logging.error(f"MySQL insert failed for PR #{pr_line['pr_id']}: {e}")


def create_mysql_connection(cfg):
    return pymysql.connect(
        host=cfg['host'],
        user=cfg['user'],
        password=cfg['password'],
        database=cfg['database'],
        charset='utf8mb4',
        #cursorclass=pymysql.cursors.DictCursor
    )


def ensure_table_exists(connection):
    create_table_sql = """
    CREATE TABLE IF NOT EXISTS pull_requests (
        ID INT NOT NULL,
        Name VARCHAR(255),
        Login VARCHAR(255),
        PR_Name TEXT,
        PR_URL TEXT NOT NULL,
        PR_State VARCHAR(20),
        PR_Created_Date DATETIME,
        PR_Merged_Date DATETIME,
        Branch VARCHAR(255),
        Repo VARCHAR(255),
        Additions INT,
        Deletions INT,
        Reviewers TEXT,
        PRIMARY KEY (ID, PR_URL(255))
    )
    """
    with connection.cursor() as cursor:
        cursor.execute(create_table_sql)
    connection.commit()


def ensure_log_table_exists(connection):
    try:
        with connection.cursor() as cursor:
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS gitee_logs (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    start_time DATETIME NOT NULL,
                    repo VARCHAR(255),
                    branch VARCHAR(255),
                    owner VARCHAR(255),
                    url TEXT,
                    date_param DATE,
                    result TEXT
                )
            """)
        connection.commit()
    except Exception as e:
        logging.error(f"Failed to ensure gitee_logs table exists: {e}")


def log_script_execution(connection, start_time, repo, branch, owner, url, date_param, result):
    try:
        with connection.cursor() as cursor:
            insert_sql = """
                INSERT INTO gitee_logs (
                    start_time, repo, branch, owner, url, date_param, result
                ) VALUES (%s, %s, %s, %s, %s, %s, %s)
            """
            cursor.execute(insert_sql, (
                start_time, repo, branch, owner, url, date_param, result
            ))
        connection.commit()
    except Exception as e:
        logging.error(f"Logging to DB failed: {e}")


if __name__ == '__main__':
    sys.exit(main())  ### TODO check exit code

## TODO: add notification to telegram bot about success/fail requests
## TODO: refactor, break on modules, classes etc