"""GitLab API 数据操作类，所有需要通过 API 的访问，都通过这个类."""

import configparser
import datetime
from typing import List

import requests

from src.git_log_import.models.account import Account
from src.git_log_import.models.commit_log import CommitLog
from src.git_log_import.models.repository import Repository

# 假设有 CommitData, DiffData, Branch, RepoId 等类型，根据需要定义或导入


class GitReader:
    """GitLab API 数据操作类，所有需要通过 API 的访问，都通过这个类."""

    def __init__(self, source: str):
        """初始化 GitLab API 配置."""
        self.source = source
        config = configparser.ConfigParser()
        with open("config.ini", "r", encoding="utf-8") as f:
            config.read_file(f)

        self.base_url = config["gitlab-" + source]["url"]
        self.token = config["gitlab-" + source]["token"]
        self.headers = {
            "PRIVATE-TOKEN": self.token,
        }

    def get_users_from_gitlab(
        self, per_page: int = 100, page: int = 1
    ) -> List[Account]:  # 返回 Account 数据
        """从 API 获取用户.

        Args:
            per_page: 每页用户数量
            page: 页码
        Returns:
            List[Account]: 用户列表
        """
        users_url = f"{self.base_url}/users"
        params = {"per_page": per_page, "page": page, "simple": True}
        response = requests.get(users_url, headers=self.headers, params=params)
        if response.status_code != 200:
            raise Exception(f"Failed to fetch users: {response.status_code}")
        users_json = response.json()
        inserted_count = 0
        accounts = []
        for user in users_json:
            user_id = int(user.get("id", 0))
            engineer_id = None  # As per table, allow NULL
            username = user.get("username")
            acct_fullname = user.get("name")
            email = user.get("email")
            state = user.get("state")

            # Parse dates
            created_at = None
            if user.get("created_at"):
                try:
                    created_at = datetime.datetime.strptime(
                        user["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ"
                    )
                except ValueError:
                    print(f"Invalid created_at format for user {username}")

            last_activity_on = None
            if user.get("last_activity_on"):
                try:
                    last_activity_on = datetime.datetime.strptime(
                        user["last_activity_on"], "%Y-%m-%d"
                    ).date()
                except ValueError:
                    print(f"Invalid last_activity_on format for user {username}")

            updated_at = datetime.datetime.now()
            inserted_count += 1
            accounts.append(
                Account(
                    id=user_id,
                    git_instance=self.source,
                    engineer_id=engineer_id,
                    username=username,
                    acct_fullname=acct_fullname,
                    email=email,
                    state=state,
                    created_at=created_at,
                    last_activity_on=last_activity_on,
                    updated_at=updated_at,
                )
            )
        print(
            f"query {inserted_count} users from {self.source} by page {page} per_page {per_page}."
        )
        return accounts

    def get_projects_from_gitlab(
        self, per_page: int = 100, page: int = 1, since_date: datetime.datetime = None
    ) -> List[Repository]:  # 返回 Repository 数据
        """从 API 获取所有项目.

        Args:
            per_page: 每页项目数量
            page: 页码
            since_date: 开始日期
        Returns:
            List[Repository]: 项目列表
        """
        projects_url = f"{self.base_url}/projects"
        params = {"per_page": per_page, "page": page, "simple": True}
        if since_date:
            params["last_activity_after"] = since_date
        response = requests.get(projects_url, headers=self.headers, params=params)
        if response.status_code != 200:
            raise Exception(f"Failed to fetch projects: {response.status_code}")
        projects_json = response.json()
        inserted_count = 0
        repositories = []
        for project in projects_json:
            repo_id = int(project.get("id", 0))
            repo_fullname = project.get("name")
            path_with_namespace = project.get("path_with_namespace")
            description = project.get("description")
            web_url = project.get("web_url")
            created_at = None
            if project.get("created_at"):
                try:
                    created_at = datetime.datetime.strptime(
                        project["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ"
                    )
                except ValueError:
                    print(f"Invalid created_at format for project {repo_fullname}")
            updated_at = datetime.datetime.now()
            repositories.append(
                Repository(
                    id=repo_id,
                    git_instance=self.source,
                    repo_fullname=repo_fullname,
                    project_id=None,  # As per table, allow NULL
                    path_with_namespace=path_with_namespace,
                    description=description,
                    created_at=created_at,
                    web_url=web_url,
                    updated_at=updated_at,
                )
            )
            inserted_count += 1
        print(
            f"query {inserted_count} projects from {self.source} by page {page} per_page {per_page}."
        )
        return repositories

    def get_all_active_project_ids(self, since_date: datetime.datetime) -> List[int]:
        """获取所有活跃项目ID.

        Args:
            since_date: 开始日期
        Returns:
            List[int]: 项目 ID 列表
        """
        repo_ids = []
        page = 1
        while True:
            projects = self.get_projects_from_gitlab(
                per_page=100, page=page, since_date=since_date
            )
            if not projects:
                break
            repo_ids.extend([project.id for project in projects])
            page += 1
        print(f"Fetched {len(repo_ids)} active repository IDs.")
        return repo_ids

    def get_branches(self, repo_id: str) -> List[str]:  # List<Branch>
        """获取指定仓库的所有分支."""
        branches_url = f"{self.base_url}/projects/{repo_id}/repository/branches"
        params = {"per_page": 100}
        branches = []
        page = 1
        while True:
            params["page"] = page
            response = requests.get(branches_url, headers=self.headers, params=params)
            if response.status_code != 200:
                raise Exception(f"Failed to fetch branches: {response.status_code}")
            data = response.json()
            if not data:
                break
            branches.extend([branch["name"] for branch in data])
            page += 1
        print(f"Fetched {len(branches)} branches for repo {repo_id}.")
        return branches

    def get_commits(
        self,
        repo_id: str,
        branch: str,
        start_date: datetime.datetime,
        end_date: datetime.datetime,
    ) -> List[str]:  # List<CommitId>
        """旧版本专用 gitlab version < 10.0，获取 commits 列表.

        Args:
            repo_id: 仓库 ID
            branch: 分支名称
            start_date: 开始日期
            end_date: 结束日期
        Returns:
            List[str]: 提交 ID 列表.
        """
        commits_url = f"{self.base_url}/projects/{repo_id}/repository/commits"
        params = {"per_page": 100}
        if branch:
            params["ref_name"] = branch
        if start_date:
            params["since"] = start_date
        if end_date:
            params["until"] = end_date

        project_web_url = self.get_project_web_url(repo_id)
        commits = []
        page = 1
        while True:
            params["page"] = page
            response = requests.get(commits_url, headers=self.headers, params=params)
            if response.status_code != 200:
                raise Exception(f"Failed to fetch commits: {response.status_code}")
            data = response.json()
            if not data:
                break

            for item in data:
                log = self.build_commit_log(item, repo_id)
                if log.total_changes == 0:
                    log = self.get_commit_by_sha(repo_id, item["id"])
                log.branch_id = branch
                if log.web_url is None or log.web_url == "":
                    log.web_url = project_web_url + "/commit/" + item["id"]
                commits.append(log)
            page += 1
        print(
            f"Fetched {len(commits)} commits for repo {repo_id} on branch '{branch}'."
        )
        return commits

    def get_commit_by_sha(self, repo_id: str, sha: str) -> CommitLog:  # CommitData
        """根据 commit sha 获取 commit 数据.

        Args:
            repo_id: 仓库 ID
            sha: 提交 SHA
        Returns:
            CommitLog: 提交日志
        """
        commit_url = f"{self.base_url}/projects/{repo_id}/repository/commits/{sha}"
        commit_response = requests.get(commit_url, headers=self.headers, timeout=10)
        if commit_response.status_code == 200:
            log = self.build_commit_log(commit_response.json(), repo_id)
            if log.total_changes == 0:
                ## 由于 gitlab 旧版的 api 限制(version < 10.0)，无法获取 merge commit 的 stats 数据，所以需要补全
                self.patch_changes(log)
            return log
        else:
            raise Exception(
                f"Failed to fetch commit details: {commit_response.status_code}."
            )

    def build_commit_log(self, data: dict, repo_id: str) -> CommitLog:
        """构建 commit 日志对象."""
        log = CommitLog(
            id=data["id"],
            git_instance=self.source,
            repository_id=repo_id,
            created_at=datetime.datetime.strptime(
                data["created_at"], "%Y-%m-%dT%H:%M:%S.%f%z"
            ).replace(tzinfo=None),
            branch_id=data.get("branch_id", ""),
            email=data["committer_email"],
            committer_fullname=data["committer_name"],
            web_url=data.get("web_url", ""),
            message=data["message"],
            updated_at=datetime.datetime.now(),
        )

        # 根据 parent_ids 判断 commit 类型
        parent_ids = data.get("parent_ids", [])
        if len(parent_ids) == 0:
            log.commit_type = 0  # root commit
        elif len(parent_ids) == 1:
            log.commit_type = 1  # normal commit
        else:
            log.commit_type = 2  # merge commit

        # 获取 stats 数据并计算 additions 和 deletions 以及 total_changes
        try:
            log.additions = data["stats"]["additions"]
            log.deletions = data["stats"]["deletions"]
            log.total_changes = data["stats"]["total"]
        except Exception:
            log.additions = 0
            log.deletions = 0
            log.total_changes = 0
        return log

    def patch_changes(self, log: CommitLog):
        """补全 commit 的 changes 数据."""
        # 如果是一个merge commit，但是没有stats数据，则需要获取diff数据
        # 获取 diff 数据并计算 additions 和 deletions 以及 total_changes
        diff_url = f"{self.base_url}/projects/{log.repository_id}/repository/commits/{log.id}/diff"
        diff_response = requests.get(diff_url, headers=self.headers, timeout=10)
        if diff_response.status_code == 200:
            diff_data = diff_response.json()
            log.additions = sum(
                f.get("additions", 0) for f in diff_data if "additions" in f
            )
            log.deletions = sum(
                f.get("deletions", 0) for f in diff_data if "deletions" in f
            )
            log.total_changes = log.additions + log.deletions
        else:
            print(f"Failed to fetch diff: {diff_response.status_code}. Using zeros.")
            log.additions = 0
            log.deletions = 0
            log.total_changes = 0

    def get_project_web_url(self, repo_id: str) -> str:
        """获取项目 web_url."""
        response = requests.get(
            f"{self.base_url}/projects/{repo_id}", headers=self.headers
        )
        if response.status_code == 200:
            return response.json().get("web_url")
        else:
            print(f"Failed to get project web_url: {response.text}")
            return None
