#!/usr/bin/env python3
import sys
import os
import re
import subprocess
import requests
import argparse
import logging
import shutil
from datetime import datetime
from commit import generate_oh_patch


MAINLINE_DIR = "/Volumes/Coding/linux-torvalds"
STABLE_DIR = "/Volumes/Coding/linux-stable"
OH_LINUX_DIR = "/Volumes/Coding/kernel_linux_5.10"

BRANCH_MASTER = "master"
BRANCH_503 = "OpenHarmony-5.0.3-Release"
BRANCH_60 = "OpenHarmony-6.0-Release"


def setup_logger(log_file=None):
    """配置日志记录器"""
    logger = logging.getLogger('CVE Bot')
    logger.setLevel(logging.DEBUG)  # 设置最基础的日志级别
    
    # 清除现有的handler，防止重复
    if logger.handlers:
        for handler in logger.handlers[:]:
            logger.removeHandler(handler)
    
    # 设置日志格式
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S'
    )
    
    # 控制台handler
    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)
    console_handler.setFormatter(formatter)
    logger.addHandler(console_handler)
    
    # 文件handler（如果指定了日志文件）
    if log_file:
        try:
            file_handler = logging.FileHandler(log_file)
            file_handler.setLevel(logging.DEBUG)  # 文件记录所有级别的日志
            file_handler.setFormatter(formatter)
            logger.addHandler(file_handler)
        except Exception as e:
            logger.error(f"Failed to set up file logging: {e}")
    
    return logger


logger = setup_logger(log_file='cvebot.log')


class PatchApplyFailed(Exception): ...
class RequestFailed(Exception): ...
class CveSubjectGetFailed(Exception): ...
class IssueNotFound(Exception): ...


def read_cves_from_file(file_path):
    """从文件中读取CVE列表"""
    try:
        with open(file_path, 'r') as f:
            cves = [line.strip() for line in f if line.strip()]
            # 过滤掉可能的空行和非CVE格式的行（简单验证）
            cves = [cve for cve in cves if cve.upper().startswith('CVE-')]
            logger.debug(f"Read {len(cves)} CVEs from file")
            return cves
    except FileNotFoundError:
        logger.error(f"File not found - {file_path}")
        sys.exit(1)
    except Exception as e:
        logger.error(f"Error reading file: {e}", exc_info=True)
        sys.exit(1)


def parse_args():
    parser = argparse.ArgumentParser(
        description="CVE Bot - Process CVE entries from comandline or file",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter
    )
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('-c', '--cve', help='Single CVE ID to process (e.g. CVE-2023-1234)')
    group.add_argument('-f', '--file', help='File containing list of CVE IDs (one per line)')
    args = parser.parse_args()

    cves_to_process = []
    if args.cve:
        if not args.cve.upper().startswith('CVE-'):
            logger.error(f"'{args.cve}' doesn't look like a standard CVE ID")
            sys.exit(1)
        cves_to_process.append(args.cve.upper())
        logger.debug(f"Added single CVE: {args.cve}")
    elif args.file:
        cves_to_process = read_cves_from_file(args.file)
    
    if not cves_to_process:
        logger.error("No valid CVEs found to process")
        logger.error()
    
    logger.info(f"Found {len(cves_to_process)} CVE(s) to process")
    return cves_to_process


def request_get(url, params=None):
    resp = requests.get(url, params=params)
    if resp.status_code != 200:
        raise RequestFailed(f"[GET] {url} failed: {resp.status_code}")
    return resp.json()


def _parse_subject_from_desc(desc: str):
    lines = desc.splitlines()
    if len(lines) < 3 or not lines[2].strip():
        raise ValueError(f'Can not get subject from desc:\n  {desc}')
    return lines[2].strip()
        

def get_cve_patch_subject(cve, retry=10) -> str:
    total_retry = retry
    while retry != 0:
        retry -= 1
        try:
            params = {'cveId': cve}
            resp = request_get('https://services.nvd.nist.gov/rest/json/cves/2.0', params)
        except requests.exceptions.ConnectionError:
            logger.warning(f"Get subject failed due to connection reset by peer, retry: {retry}")
            continue
        else:
            for vuln in resp.get('vulnerabilities', []):
                desc = vuln['cve']['descriptions'][0]['value']
                return _parse_subject_from_desc(desc)
            raise CveSubjectGetFailed(f"The {cve} subject not found")
    raise CveSubjectGetFailed(f"Retried {total_retry} times but still can not get subject for {cve}")


def get_issue(branch) -> str:
    now = datetime.now()
    current_date = now.strftime("%Y%m%d")
    with open('issue.txt', 'r') as f:
        for line in f.readlines():
            line = line.strip()
            if not line:
                continue
            parts = line.split(',')
            if parts[0] == current_date:
                if branch == BRANCH_MASTER:
                    return parts[1]
                elif branch == BRANCH_503:
                    return parts[2]
                elif branch == BRANCH_60:
                    return parts[3]
                else:
                    raise ValueError(f"Invalid branch: {branch}")
    raise IssueNotFound(f"The issue of branch {branch} is not found in issue.txt, date: {current_date}")


def _has_git_log_cache(repo_dir, branch):
    return os.path.exists(f'{repo_dir}/{_git_log_cache_file(branch)}')


def _git_log_cache_file(branch):
    return f'.git-log-oneline-{branch}.cache'


def _generate_git_log_cache(repo_dir, branch):
    cur_branch = _current_branch(repo_dir)
    if cur_branch != branch:
        _checkout_branch(repo_dir, branch)
    try:
        cachef = _git_log_cache_file(branch)
        subprocess.run(f'git log --oneline --no-merges > {cachef}',
                    cwd=repo_dir, shell=True, check=True)
    except (subprocess.CalledProcessError, KeyboardInterrupt) as e:
        logger.error(f"Generate cache file {repo_dir}/{cachef} failed: {e}")
        if os.path.exists(cachef):
            os.remove(cachef)
            logger.debug(f"The cache file {repo_dir}/{cachef} is removed due to error")
        raise e
    if not _is_on_branch(repo_dir, cur_branch):
        _checkout_branch(repo_dir, cur_branch)


def _find_comhash_from_cache(subject, repo_dir, branch):
    with open(f'{repo_dir}/{_git_log_cache_file(branch)}', 'r', encoding='latin1') as f:
        for line in f.readlines():
            sha = line.split()[0]
            sub = line.replace(sha, '', 1).strip()
            if sub == subject:
                return sha
            if subject in sub and 'Revert' not in sub:
                return sha
    return None


def get_comhash(subject, repo_dir, branch) -> str:
    if not _has_git_log_cache(repo_dir, branch):
        logger.info(f"Generate git log cache for branch {branch} in {repo_dir}")
        _generate_git_log_cache(repo_dir, branch)
    return _find_comhash_from_cache(subject, repo_dir, branch)


def check_affected_branches(subject) -> list:
    repo_dir = OH_LINUX_DIR
    branches = [BRANCH_MASTER, BRANCH_503, BRANCH_60]
    affected = []
    for branch in branches:
        checkout_branch(repo_dir, branch)
        comhash = get_comhash(subject, repo_dir, branch)
        if not comhash:
            affected.append(branch)
    return affected


cve_patch_path = {}


def _rename_patch(patch_path, issue):
    patch_name = os.path.basename(patch_path)
    patch_dir = os.path.dirname(patch_path)
    name_without_ext, ext = os.path.splitext(patch_name)
    patch_name_update = f"{name_without_ext}.{issue}{ext}"
    updated_patch_path = f"{patch_dir}/{patch_name_update}"
    return updated_patch_path


def _modify_issue_id(patch_path, issue):
    with open(patch_path, 'r') as f:
        content = f.read()

    updated_content = re.sub(
        r'^(issue:\s*)(.*)$',  # 匹配 `issue: ` 开头的行
        fr'\1{f"#{issue}"}',  # 替换为 `issue: {new_issue_id}`
        content,
        flags=re.MULTILINE  # 多行模式
    )
    
    updated_patch_path = _rename_patch(patch_path, issue)
    with open(updated_patch_path, 'w') as f:
        f.write(updated_content)
    return updated_patch_path


def generate_patch(cve, issue, subject) -> str:
    patch_path = cve_patch_path.get(cve)
    if patch_path:
        return _modify_issue_id(patch_path, issue)
    comhash = get_comhash(subject, MAINLINE_DIR, BRANCH_MASTER)
    patch_path = generate_oh_patch(comhash, issue, cve)
    cve_patch_path[cve] = patch_path
    return patch_path


def _exist_branch(repo_dir, branch):
    ret = subprocess.run(['git', 'branch'], cwd=repo_dir, check=True, capture_output=True, text=True)
    return branch in ret.stdout.strip()


def _current_branch(repo_dir):
    ret = subprocess.run(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], cwd=repo_dir, check=True,
                         capture_output=True, text=True)
    return ret.stdout.strip()


def _is_on_branch(repo_dir, branch):
    cur_branch = _current_branch(repo_dir)
    return branch == cur_branch


def _create_branch(repo_dir, branch):
    subprocess.run(['git', 'branch', '-c', branch], cwd=repo_dir, check=True)


def _checkout_branch(repo_dir, branch):
    subprocess.run(['git', 'checkout', branch], cwd=repo_dir, check=True)


def checkout_branch(repo_dir, branch, create=False):
    if _is_on_branch(repo_dir, branch):
        logger.info(f"The repo {repo_dir} is already on branch {branch}")
        return
    if not _exist_branch(repo_dir, branch):
        if not create:
            logger.info(f"The branch {branch} is not in repo {repo_dir}")
            return
        _create_branch(repo_dir, branch)
    _checkout_branch(repo_dir, branch)


def apply_patch(patch_path, repo_dir) -> bool:
    ret = subprocess.run(['git', 'am', patch_path], cwd=repo_dir)
    if ret.returncode:
        subprocess.run(['git', 'am', '--abort'], cwd=repo_dir)
        return False
    return True


def fix_branch(branch, patch_path) -> bool:
    now = datetime.now()
    date_str = now.strftime("%Y%m%d")
    branch_fix_name = f"{branch}-fix-{date_str}"
    # from branch
    checkout_branch(OH_LINUX_DIR, branch, create=False)
    # to branch-fix-xxxx
    checkout_branch(OH_LINUX_DIR, branch_fix_name, create=True)
    return apply_patch(patch_path, OH_LINUX_DIR)


first_write = True


def save_patch_apply_result(cve, patch_path, result):
    result_file = 'patch_apply_result.log'
    if os.path.exists(result_file):
        backup_file = f'{result_file}.old'
        shutil.copy2(result_file, backup_file)
        logger.debug(f'Backup file: {result_file} -> {backup_file}')
    
    global first_write
    action = 'w' if first_write else 'a'
    with open(result_file, action) as f:
        f.write(f"{cve},{patch_path},{result}\n")
    first_write = False


def process_cve(cve):
    try:
        subject = get_cve_patch_subject(cve)
        branches = check_affected_branches(subject)
        if not branches:
            logger.info(f"The '{cve}' is already fixed")
            return
        for branch in branches:
            try:
                issue = get_issue(branch)
                patch_path = generate_patch(cve, issue, subject)
                success = fix_branch(branch, patch_path)
                if success:
                    logger.info(f"Patch apply success: {patch_path}")
                    save_patch_apply_result(cve, patch_path, 'success')
                else:
                    logger.warning(f"Patch apply failed: {patch_path}")
                    save_patch_apply_result(cve, patch_path, 'fail')
            except PatchApplyFailed as e:
                logger.error(f"{cve}: The branch {branch} apply patch failed, {e}")
    except CveSubjectGetFailed as e:
        logger.error(f"{cve}: {e}")
    except IssueNotFound as e:
        logger.error(f"{cve}: {e}")
    except RequestFailed as e:
        logger.error(f"{cve}: {e}")


def main():
    cves = parse_args()
    for cve in cves:
        process_cve(cve)


if __name__ == '__main__':
    main()