'''
Author: error: error: git config user.name & please set dead value or install git && error: git config user.email & please set dead value or install git & please set dead value or install git
Date: 2024-10-26 21:57:18
LastEditors: LHL 1214652981@qq.com
LastEditTime: 2024-11-21 21:49:03
FilePath: \Code  form Spyder\codeFileGeneralComment\code\verify\verify_filesgenercommet.py
Description: 这是默认设置,请设置`customMade`, 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE%E8%AE%BE%E7%BD%AE
'''
import difflib
import os
import logging
import re
import json
from datetime import datetime
import shutil

# 获取当前文件所在目录
current_dir = os.path.dirname(os.path.abspath(__file__))
result_file_path = os.path.join(os.path.dirname(current_dir), "result")

def setup_verify_logging():
    try:
        # 获取当前文件所在目录的上级目录，然后创建log子目录
        main_log_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "log")
        
        # 打印日志目录路径用于调试
        print(f"Attempting to create log directory at: {main_log_dir}")
        
        # 确保日志目录存在
        os.makedirs(main_log_dir, exist_ok=True)
        
        # 配置日志
        logger = logging.getLogger('verify')
        logger.setLevel(logging.DEBUG)
        
        # 创建处理器并设置级别
        info_handler = logging.FileHandler(os.path.join(main_log_dir, 'verify_info.log'), encoding='utf-8')
        error_handler = logging.FileHandler(os.path.join(main_log_dir, 'verify_error.log'), encoding='utf-8')
        
        # 设置格式
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        info_handler.setFormatter(formatter)
        error_handler.setFormatter(formatter)
        
        # 设置级别过滤
        info_handler.setLevel(logging.INFO)
        error_handler.setLevel(logging.ERROR)
        
        # 添加处理器
        logger.addHandler(info_handler)
        logger.addHandler(error_handler)
        
        return logger
        
    except Exception as e:
        print(f"Error setting up logging: {str(e)}")
        # 如果创建日志目录失败，使用当前目录作为备选
        fallback_log_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
        os.makedirs(fallback_log_dir, exist_ok=True)
        
        # 创建基本的日志记录器
        basic_logger = logging.getLogger('verify')
        basic_handler = logging.FileHandler(os.path.join(fallback_log_dir, 'verify.log'), encoding='utf-8')
        basic_logger.addHandler(basic_handler)
        
        return basic_logger

logger = setup_verify_logging()

def remove_comments_and_whitespace(content, file_extension):
    """移除不同类型文件中的注释和空白字符"""
    # 根据文件类型选择不同的注释处理规则
    if file_extension in ['.java', '.js', '.jsx', '.ts', '.tsx', '.c', '.cpp', '.cs']:
        # C-style comments
        content = re.sub(r'//.*?\n', '', content)
        content = re.sub(r'/\*.*?\*/', '', content, flags=re.DOTALL)
    elif file_extension in ['.py']:
        # Python comments
        content = re.sub(r'#.*?\n', '', content)
        content = re.sub(r'\'\'\'.*?\'\'\'', '', content, flags=re.DOTALL)
        content = re.sub(r'""".*?"""', '', content, flags=re.DOTALL)
    elif file_extension in ['.html', '.xml']:
        # HTML/XML comments
        content = re.sub(r'<!--.*?-->', '', content, flags=re.DOTALL)
    elif file_extension in ['.php']:
        # PHP comments (includes both C-style and shell-style)
        content = re.sub(r'//.*?\n', '', content)
        content = re.sub(r'/\*.*?\*/', '', content, flags=re.DOTALL)
        content = re.sub(r'#.*?\n', '', content)
    
    # 移除空白字符和换行
    content = re.sub(r'\s+', '', content)
    return content

def read_file(file_path):
    """读取文件内容"""
    try:
        with open(file_path, 'r', encoding='utf-8') as file:
            content = file.read()
            # 获取文件扩展名
            file_extension = os.path.splitext(file_path)[1].lower()
            # 如果是支持的代码文件类型，则移除注释
            supported_extensions = [
                '.java', '.js', '.jsx', '.ts', '.tsx', 
                '.py', '.html', '.xml', '.php',
                '.c', '.cpp', '.cs', '.go', '.rb'
            ]
            if file_extension in supported_extensions:
                content = remove_comments_and_whitespace(content, file_extension)
            return content
    except Exception as e:
        logger.error(f"读取文件出错 {file_path}: {str(e)}")
        return None

def calculate_similarity(text1, text2):
    """计算两个文本的相似度"""
    if not text1 or not text2:
        return 0
    
    matcher = difflib.SequenceMatcher(None, text1, text2)
    return matcher.ratio()

def verify_file_general_comments(original_file_path, modified_file_path, replaced_files, similarity_threshold=1.0):
    """比较并修复文件"""
    # 读取文件内容
    original_content = read_file(original_file_path)
    modified_content = read_file(modified_file_path)
    
    if not original_content or not modified_content:
        return False
    
    # 计算相似度
    similarity = calculate_similarity(original_content, modified_content)
    
    # 相似度为100%记录到info日志
    if similarity == 1.0:
        logger.info(f"文件完全匹配 (相似度: 100%): {modified_file_path}")
    else:
        # 相似度不为100%记录到error日志
        logger.error(f"文件不完全匹配 (相似度: {similarity:.2%}): {modified_file_path}")
    
    # 如果相似度低于阈值，用原始文件覆盖修改后的文件
    if similarity < similarity_threshold:
        try:
            # 读取原始文件的原始内容（包含注释和格式）
            with open(original_file_path, 'r', encoding='utf-8') as file:
                original_raw_content = file.read()
            # 使用原始文件内容覆盖修改后的文件
            with open(modified_file_path, 'w', encoding='utf-8') as file:
                file.write(original_raw_content)
            logger.error(f"已用原始文件内容覆盖修改后文件: {modified_file_path}")
            # 记录被覆盖的文件
            replaced_files[modified_file_path] = {
                "original_file": original_file_path,
                "similarity": f"{similarity:.2%}",
                "replaced_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
            }
            return True
        except Exception as e:
            logger.error(f"替换文件时出错: {str(e)}")
            return False
    
    return False

def verify_directory_comments(modified_dir, file_patterns=None):
    """扫描目录下的所有支持的代码文件并进行比较"""
    if file_patterns is None:
        file_patterns = [
            '.java', '.js', '.jsx', '.ts', '.tsx',
            '.py', '.html', '.xml', '.php',
            '.c', '.cpp', '.cs', '.go', '.rb'
        ]
    
    # 获取原始文件目录
    original_dir = modified_dir + "_Original"
    if not os.path.exists(original_dir):
        shutil.copytree(modified_dir, original_dir)
        logger.info(f"已创建原始文件备份: {original_dir}")
    
    # 确保result目录存在
    os.makedirs(result_file_path, exist_ok=True)
    
    # 构建json文件路径
    sanitized_dir_name = modified_dir.replace("\\", "_").replace(":", "").replace("/", "_")
    json_file = os.path.join(result_file_path, f"{sanitized_dir_name}_replaced_files.json")
    
    # 如果json文件不存在，直接使用空字典
    replaced_files = {}
    if os.path.exists(json_file):
        try:
            with open(json_file, 'r', encoding='utf-8') as f:
                replaced_files = json.load(f)
        except json.JSONDecodeError:
            logger.warning(f"JSON文件格式错误，将使用新的空字典: {json_file}")
    
    logger.info(f"开始扫描目录: {modified_dir}")
    
    # 遍历修改后的目录
    for root, _, files in os.walk(modified_dir):
        for file in files:
            if any(file.endswith(pattern) for pattern in file_patterns):
                # 获取相对路径
                rel_path = os.path.relpath(root, modified_dir)
                
                # 构建原始文件和修改后文件的完整路径
                modified_file = os.path.join(root, file)
                original_file = os.path.join(original_dir, rel_path, file)
                
                if os.path.exists(original_file):
                    verify_file_general_comments(original_file, modified_file, replaced_files)
                else:
                    logger.error(f"找不到对应的原始文件: {original_file}")
    
    # 将被覆盖的文件信息保存到JSON文件
    with open(json_file, 'w', encoding='utf-8') as f:
        json.dump(replaced_files, f, ensure_ascii=False, indent=4)
    
    logger.info("目录扫描完成")
    logger.info(f"被覆盖的文件信息已保存到: {json_file}")

if __name__ == "__main__":
    # 使用示例
    directory = "E:\\ws_java\\xingyun-master"
    # 可以指定特定的文件类型，或使用默认支持的所有类型
    verify_directory_comments(directory)
    # 或者指定特定文件类型：
    # verify_directory_comments(directory, ['.java', '.py', '.js'])
