import uuid

from opencann.build_access.enums import PipeLinePlatform
from opencann.build_access.models import RepoTaskConfig
from opencann.build_access.sync_build_access.build_access_syncer import RepoTaskRecorder
from opencann.common.clients.gitcode import GitCodeApiClient
import re

from opencann.common.utils.time_zone import to_iso, time_have_zone
from opencann.environ import GITCODE_TOKEN


class SyncTaskRecord:
    pipeline_pattern = "| Check Name | Build Result | Log Report | Package Link"

    status_map = {
        'failure': 'failed',
        'success': 'success',
        'aborted': 'abort'
    }

    def __init__(self, page, start_time, end_time):
        self.page = page
        self.start_time = start_time
        self.end_time = end_time
        self.enterprise = 'CANN'
        self.client = GitCodeApiClient()

    @property
    def pull_requests(self):
        res = []
        for page in range(self.page+1):
            params = {'access_token': GITCODE_TOKEN, 'page': page, 'per_page': 90, 'created_after': to_iso(self.start_time), 'created_before': to_iso(self.end_time)}
            response = self.client.query_enterprise_pull_requests(self.enterprise, params)
            if not response.json():
                print(f'page {page} finish')
                break
            res.extend(response.json())
        return res

    def pipeline_tasks_detail(self,  text):
        lines = text.split('\n')

        # 提取表头
        headers = [header.strip() for header in lines[0].split('|') if header.strip()]

        # 初始化结果列表
        result = []

        # 定义正则表达式模式
        check_name_pattern = re.compile(r'\[(.*?)\]\(.*?\)')
        build_result_pattern = re.compile(r'\*\*(.*?)\*\*')
        build_result_pattern2 = re.compile(r":white_check_mark:\s*(\S+)")
        build_result_pattern3 = re.compile(r":x: NO\s*(\S+)")
        link_pattern = re.compile(r'\[.*?\]\((.*?)\)')

        # 遍历数据行
        for line in lines[2:]:  # 跳过表头和分隔行
            if line.strip():  # 确保行不为空
                # 分割每一列
                columns = [column.strip() for column in line.split('|') if column.strip()]
                # 构建字典
                row_dict = {}
                for i, header in enumerate(headers):
                    if i < len(columns):  # 检查索引是否在范围内
                        if header == 'Check Name':
                            match = check_name_pattern.search(columns[i])
                            row_dict[header] = match.group(1) if match else columns[i]
                        elif header == 'Build Result':
                            match1 = build_result_pattern.search(columns[i])
                            match2 = build_result_pattern2.search(columns[i])
                            match3 = build_result_pattern3.search(columns[i])
                            values = None
                            if match1:
                                values = match1.group(1).lower()
                            elif match2:
                                values = match2.group(1).lower()
                            elif match3:
                                values = match3.group(1).lower()
                            row_dict[header] = values
                        elif header in ['Log Report', 'Package Link']:
                            match = link_pattern.search(columns[i])
                            row_dict[header] = match.group(1) if match else None
                    else:
                        row_dict[header] = None  # 如果列不存在，填充None
                result.append(row_dict)
        return result

    def sync(self):
        fail = []
        for pull_detail in self.pull_requests:
            target, source = pull_detail['base'], pull_detail['head']
            repo_name = target['repo']['path']
            full_name = f'{self.enterprise}/{repo_name}'
            commits = self.client.page_query_pull_request_comments(full_name, pull_detail['number'])
            compile_commit = {}
            tasks_detail = {}
            for commit in commits:
                if bool(self.pipeline_pattern in commit['body']):
                    _detail = self.pipeline_tasks_detail(commit['body'])
                    for d in _detail:
                        tasks_detail[d['Check Name']] = d
                if not compile_commit and commit['body'] in ['compile', '/compile', 'compile/']:
                    compile_commit = commit

            if tasks_detail and compile_commit:
                for task in tasks_detail.values():
                    task_config = RepoTaskConfig.objects.filter(
                        task_name=task['Check Name']
                    ).query_repo_branch_configs(full_name, target['ref']).first()
                    if not task['Build Result']:
                        fail.append(pull_detail)
                        continue
                    task_detail = {
                        'repo_name': full_name,
                        'target_branch': target['ref'],
                        'commit_iid': source['sha'],
                        'pr_number': pull_detail['number'],
                        'task_name': task['Check Name'],
                        'pipeline_platform': task_config.pipeline_platform if task_config else PipeLinePlatform.CODEARTS.value,
                        'status': self.status_map.get(task['Build Result']) or task['Build Result'],
                        'log_url': task['Log Report'] or '',
                        'pkg_url': task['Package Link'] or '',
                        'start_time': time_have_zone(compile_commit['created_at']),
                        'end_time': time_have_zone(compile_commit['created_at']),
                        'build_id': str(uuid.uuid4()),
                    }

                    RepoTaskRecorder(task_detail).sync()
        return fail
