from git import Repo
from pathlib import Path
from datetime import datetime
import ast  
import numpy as np 

gitee_link = "https://gitee.com/"
repo_name = "APIJSON"
repo_link = gitee_link + "tencent/APIJSON"
commits_link = gitee_link + "Tencent/APIJSON/commits/master"
authors_link = gitee_link + "Tencent/APIJSON/contributors"
branches_link = gitee_link + "Tencent/APIJSON/branches"
branch_link = gitee_link + "Tencent/APIJSON/tree/"

md = f"# {repo_name} Project Data Analysis\n\n"
md += "---\n\n"

if Path(repo_name).exists():
    repo = Repo(repo_name)
else:
    print("Cloning Repo...")
    repo = Repo.clone_from(repo_link, repo_name)
    print("Finished Cloning!")

repo_commits = list(repo.iter_commits())
first_commit = repo_commits[-1]
latest_commit = repo_commits[0]
start_date = datetime.fromtimestamp(first_commit.authored_date)
latest_date = datetime.fromtimestamp(latest_commit.authored_date)

md += f"**Start Date:** {start_date}\n\n"
md += f"**Latest Update:** {latest_date}\n\n"
md += f"[**Commits:** {len(repo_commits)}]({commits_link})\n\n"

gitee = repo.remotes[0]
gitee_branches = [
    ref.name[7:] for ref in gitee.refs if ref.name != "origin/HEAD"
]
gitee_branches.remove("master")
repo_branches = ["master"] + gitee_branches
md += f"[**Branches:** {len(repo_branches)}]({branches_link})\n\n"

author_data = {}
for commit in repo_commits:
    author = commit.author.name
    author_data[author] = author_data.get(author, 0) + 1

author_data = sorted(author_data.items(), key=lambda x: x[1], reverse=True)
md += f"[**Authors:** {len(author_data)}]({authors_link})\n\n"

md += "## Branches\n\n"
for branch in repo_branches:
    md += f"- [{branch}]({branch_link + branch})\n\n"

md += "## Top 10 Authors\n\n"
for i in range(min(10, len(author_data))):
    author = author_data[i]
    name = author[0].replace(" ", "")
    commits = author[1]
    md += f"{i + 1}. [{name}]({gitee_link + name}): {commits} commits\n\n"

lines_changed = []
files_changed = []
commit_stats = []

if Path("commit_stats.txt").exists():
    print("READING FROM CACHE")
    with open("commit_stats.txt", "r") as file:
        commit_stats = file.readlines()
else:
    print("WRITING TO CACHE")
    with open("commit_stats.txt", "w") as file:
        for commit in repo_commits:
            stats = str(commit.stats.total)
            commit_stats.append(stats)
            file.write(f"{stats}\n")

for stats in commit_stats:
    stat = ast.literal_eval(stats)
    lines_changed.append(stat['lines'])
    files_changed.append(stat['files'])

md += "## Commit Stats\n\n"

lines_changed = np.array(lines_changed)
files_changed = np.array(files_changed)
mean_lines_changed = int(np.mean(lines_changed))
median_lines_changed = int(np.median(lines_changed))
mean_files_changed = int(np.mean(files_changed))
median_files_changed = int(np.median(files_changed))

md += f"**Average Lines Changed / Commit:** {mean_lines_changed}\n\n"
md += f"**Median Lines Changed / Commit:** {median_lines_changed}\n\n"
md += f"**Average Files Changed / Commit:** {mean_files_changed}\n\n"
md += f"**Median Files Changed / Commit:** {median_files_changed}\n\n"

Path("report.md").write_text(md, "UTF-8")