#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import requests
import time
from typing import List, Dict
from utils.utils import setup_logging
from config import REPO_OWNER, REPO_NAME, ACCESS_TOKEN, GITEE_API_BASE, REQUEST_TIMEOUT, DEFAULT_PER_PAGE

logger = setup_logging("fetch_prs")

def fetch_pull_requests(access_token: str = ACCESS_TOKEN, state: str = "all", 
                       per_page: int = 100, max_pages: int = None) -> List[Dict]:
    """获取PR列表"""
    url = f"{GITEE_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/pulls"
    headers = {
        "Authorization": f"token {access_token}",
        "Accept": "application/json"
    }
    params = {
        "state": state,
        "per_page": per_page
    }
    all_pull_requests = []
    page = 1
    
    while True:
        params["page"] = page
        retry_count = 0
        max_retries = 3
        page_success = False
        
        while retry_count <= max_retries:
            try:
                response = requests.get(url, headers=headers, params=params, timeout=REQUEST_TIMEOUT)
                if response.status_code != 200:
                    logger.error(f"请求失败，状态码: {response.status_code}")
                    if retry_count < max_retries:
                        retry_count += 1
                        delay = 2 ** retry_count
                        logger.warning(f"状态码错误，第{retry_count}次重试，{delay}秒后重试...")
                        time.sleep(delay)
                        continue
                    else:
                        logger.error(f"页面 {page} 请求失败，已达到最大重试次数，跳过此页")
                        break
                
                pull_requests = response.json()
                if not pull_requests:
                    logger.info(f"页面 {page} 无数据，数据获取完成")
                    return all_pull_requests  # 数据获取完成，直接返回
                
                all_pull_requests.extend(pull_requests)
                if len(all_pull_requests) % 100 == 0:
                    logger.info(f"已获取 {len(all_pull_requests)} 个PR...")
                page_success = True
                break  # 成功获取数据，跳出重试循环
                
            except Exception as e:
                retry_count += 1
                if retry_count <= max_retries:
                    delay = 2 ** retry_count  # 指数退避: 2, 4, 8 秒
                    logger.warning(f"请求异常: {e}，第{retry_count}次重试，{delay}秒后重试...")
                    time.sleep(delay)
                else:
                    logger.exception(f"请求异常: {e}，已达到最大重试次数({max_retries})，跳过此页")
                    break
        
        # 如果当前页面处理失败（状态码错误或异常），但还没到最大页数限制，继续下一页
        if not page_success:
            logger.warning(f"页面 {page} 处理失败，继续处理下一页")
            
        if max_pages is not None and page >= max_pages:
            break
        page += 1
    
    return all_pull_requests

def extract_pr_info(pr: Dict) -> Dict:
    """提取PR关键信息"""
    return {
        "number": pr["number"],
        "title": pr["title"],
        "created_at": pr["created_at"],
        "merge_status": pr.get("state", "N/A"),
        "labels": [label["name"] for label in pr.get("labels", [])],
        "user": pr["user"]["login"],
        "body": pr.get("body", "")
    } 