import logging
import re
from typing import List

from dotenv import load_dotenv

from openai_api import ChatCompletionModel
from data_interface import get_repository_info, get_commit_info
from prompt import generate_inference_prompt
from github_api import get_issue_or_pr_github

SUPPORT_PLATFORMS = [
    "github"
]


def _summarize_readme(repository_info):
    model = ChatCompletionModel()
    results = model.chat(query=repository_info.content, task="summarize", n=1)
    repository_info.readme_abstract = results[0]


def _issue_pr_number(commit_info):
    commit_info_str = commit_info.info
    pattern = re.compile(r"#\d+")
    result = pattern.search(commit_info_str)
    if result is not None:
        return int(result.group()[1:])
    return None


def inference(repository_id: str, commit_ids: List[str]):
    repository_info = get_repository_info(repository_id)
    _summarize_readme(repository_info)
    results = []
    for commit_id in commit_ids:
        commit_info = get_commit_info(repository_id, commit_id)
        issue_or_pr = None
        if repository_info.platform not in SUPPORT_PLATFORMS:
            logging.log(logging.WARN, f"{repository_info.platform} 不是受支持的Platform,无法检索相关Issue或PR.")
        else:
            issue_or_pr_num = _issue_pr_number(commit_info)
            if issue_or_pr_num is not None:
                if repository_info.platform == "github":
                    issue_or_pr = get_issue_or_pr_github(repository_info.title, issue_or_pr_num)
        query = generate_inference_prompt(repository_info, commit_info, issue_or_pr)
        model = ChatCompletionModel()
        result = model.chat(query=query, task="inference", n=1)
        logging.log(logging.DEBUG, result[0])
        results.append(result[0])
        # 下方代码为Mock
        # results.append("arm")
    return results


if __name__ == '__main__':
    load_dotenv()
    logging.basicConfig(level=logging.DEBUG)
    print(inference("", [""]))
