import os
import sys
import subprocess
import concurrent.futures
from flask import Flask, request, render_template, jsonify, send_file
import tempfile
import shutil
import time
import threading
from datetime import datetime
import re
import zipfile
import schedule

app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024  # 50MB max upload size

# 从环境变量获取路径，支持Docker容器内运行
UPLOAD_FOLDER = os.environ.get('UPLOAD_FOLDER', '/app/uploads')
RESULTS_FOLDER = os.environ.get('RESULTS_FOLDER', '/app/results')

# 确保目录存在
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
os.makedirs(RESULTS_FOLDER, exist_ok=True)

# 转换任务跟踪
tasks = {}

class ConversionTask:
    def __init__(self, file_paths):
        self.file_paths = file_paths
        self.status = "pending"
        self.progress = 0
        self.logs = []
        self.results = {}
        self.start_time = time.time()
        self.running = False
        self.error = None

    def log(self, message):
        timestamp = time.strftime("%H:%M:%S")
        self.logs.append(f"[{timestamp}] {message}")

    def update_progress(self, progress):
        self.progress = progress

    def update_status(self, status):
        self.status = status

    def update_result(self, file_path, result, output_path=None):
        self.results[file_path] = {
            "status": result,
            "output_path": output_path
        }

@app.route('/')
def index():
    return render_template('index.html')

# 健康检查端点
@app.route('/health')
def health_check():
    return jsonify({'status': 'healthy', 'timestamp': datetime.now().isoformat()}), 200

def safe_filename_with_preservation(filename):
    """安全地处理文件名，同时尽可能保留原始文件名的完整性"""
    # 移除路径分隔符和其他危险字符，但保留大多数非ASCII字符
    filename = re.sub(r'[<>"/\\|?*]+', '_', filename)
    # 移除控制字符
    filename = re.sub(r'[\x00-\x1f\x7f]', '', filename)
    # 确保文件名不为空
    if not filename or filename == '.' or filename == '..':
        filename = 'unnamed'
    # 限制文件名长度，但保持扩展名
    if len(filename) > 200:
        name, ext = os.path.splitext(filename)
        name = name[:200 - len(ext)]
        filename = name + ext
    return filename

@app.route('/convert', methods=['POST'])
def convert():    
    files = request.files.getlist('files')
    
    # 为每个任务创建唯一目录
    task_dir = os.path.join(UPLOAD_FOLDER, str(int(time.time())))
    os.makedirs(task_dir, exist_ok=True)
    
    file_paths = []
    # 保存上传的文件
    for file in files:
        if file and file.filename.endswith('.ipynb'):
            # 使用自定义函数保留原始文件名的完整性
            safe_filename = safe_filename_with_preservation(file.filename)
            file_path = os.path.join(task_dir, safe_filename)
            file.save(file_path)
            file_paths.append(file_path)
    
    if not file_paths:
        shutil.rmtree(task_dir)
        return jsonify({"error": "请上传.ipynb文件"}), 400
    
    # 创建任务
    task_id = str(int(time.time()))
    task = ConversionTask(file_paths)
    tasks[task_id] = task
    
    # 启动转换线程
    thread = threading.Thread(target=convert_files, args=(task_id, task_dir))
    thread.daemon = True
    thread.start()
    
    return jsonify({"task_id": task_id})

@app.route('/status/<task_id>')
def get_status(task_id):
    if task_id not in tasks:
        return jsonify({"error": "任务不存在"}), 404
    
    task = tasks[task_id]
    return jsonify({
        "status": task.status,
        "progress": task.progress,
        "logs": task.logs,
        "results": task.results
    })

@app.route('/download/<task_id>/<filename>')
def download_file(task_id, filename):
    if task_id not in tasks:
        return jsonify({"error": "任务不存在"}), 404
    
    task = tasks[task_id]
    
    # 查找对应的输出文件
    for file_path, result in task.results.items():
        if result.get('status') == 'success' and result.get('output_path'):
            output_path = result.get('output_path')
            if os.path.exists(output_path):
                return send_file(output_path, as_attachment=True, download_name=filename)
    
    return jsonify({"error": "文件不存在"}), 404

@app.route('/delete_task/<task_id>')
def delete_task(task_id):
    if task_id not in tasks:
        return jsonify({"error": "任务不存在"}), 404
    
    task = tasks[task_id]
    # 停止任务如果正在运行
    if task.running:
        task.running = False
    
    # 清理临时目录和结果文件
    for file_path in task.file_paths:
        if os.path.exists(file_path):
            temp_dir = os.path.dirname(file_path)
            shutil.rmtree(temp_dir, ignore_errors=True)
            # 同时清理对应的结果目录
            result_dir = os.path.join(RESULTS_FOLDER, os.path.basename(temp_dir))
            if os.path.exists(result_dir):
                shutil.rmtree(result_dir, ignore_errors=True)
    
    # 移除任务
    del tasks[task_id]
    
    return jsonify({"success": True})

@app.route('/download_all/<task_id>')
def download_all_files(task_id):
    if task_id not in tasks:
        return jsonify({"error": "任务不存在"}), 404
    
    task = tasks[task_id]
    
    # 检查是否有成功转换的文件
    success_results = [r for r in task.results.values() if r.get('status') == 'success' and r.get('output_path')]
    if not success_results:
        return jsonify({"error": "没有可下载的文件"}), 404
    
    # 创建临时zip文件
    with tempfile.NamedTemporaryFile(suffix='.zip', delete=False) as temp_zip:
        zip_filename = temp_zip.name
    
    try:
        # 创建zip文件并添加所有成功的转换结果
        with zipfile.ZipFile(zip_filename, 'w', zipfile.ZIP_DEFLATED) as zipf:
            for result in success_results:
                output_path = result.get('output_path')
                if os.path.exists(output_path):
                    # 使用原始文件名（不含路径）作为zip中的文件名
                    zipf.write(output_path, os.path.basename(output_path))
        
        # 发送zip文件
        return send_file(zip_filename, as_attachment=True, download_name=f'converted_files_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip')
    
    finally:
        # 清理临时zip文件
        if os.path.exists(zip_filename):
            try:
                os.unlink(zip_filename)
            except:
                pass

def cleanup_old_files():
    """清理超过1小时的文件"""
    current_time = time.time()
    one_hour_ago = current_time - 3600  # 1小时 = 3600秒
    
    # 清理uploads目录
    if os.path.exists(UPLOAD_FOLDER):
        for folder_name in os.listdir(UPLOAD_FOLDER):
            folder_path = os.path.join(UPLOAD_FOLDER, folder_name)
            if os.path.isdir(folder_path):
                # 检查目录创建时间
                try:
                    stat_info = os.stat(folder_path)
                    if stat_info.st_mtime < one_hour_ago:
                        shutil.rmtree(folder_path, ignore_errors=True)
                        print(f"清理旧上传目录: {folder_path}")
                except Exception as e:
                    print(f"清理上传目录出错: {e}")
    
    # 清理results目录
    if os.path.exists(RESULTS_FOLDER):
        for folder_name in os.listdir(RESULTS_FOLDER):
            folder_path = os.path.join(RESULTS_FOLDER, folder_name)
            if os.path.isdir(folder_path):
                # 检查目录创建时间
                try:
                    stat_info = os.stat(folder_path)
                    if stat_info.st_mtime < one_hour_ago:
                        shutil.rmtree(folder_path, ignore_errors=True)
                        print(f"清理旧结果目录: {folder_path}")
                except Exception as e:
                    print(f"清理结果目录出错: {e}")

# 设置定时任务，每隔1小时整点执行
def schedule_cleanup():
    schedule.every().hour.at(":00").do(cleanup_old_files)
    
    # 启动定时任务线程
    def run_schedule():
        while True:
            schedule.run_pending()
            time.sleep(60)  # 每分钟检查一次
    
    cleanup_thread = threading.Thread(target=run_schedule, daemon=True)
    cleanup_thread.start()
    print("定时清理任务已启动")

def convert_files(task_id, temp_dir):
    task = tasks[task_id]
    task.running = True
    task.update_status("running")
    task.log("开始转换任务...")
    
    try:
        completed = 0
        total = len(task.file_paths)
        
        with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
            future_to_file = {
                executor.submit(convert_single_file, file_path, task): file_path
                for file_path in task.file_paths
            }
            
            for future in concurrent.futures.as_completed(future_to_file):
                if not task.running:
                    task.log("任务已取消")
                    break
                
                file_path = future_to_file[future]
                try:
                    result, output_path = future.result()
                    task.update_result(file_path, result, output_path)
                except Exception as e:
                    task.log(f"✗ 处理文件时出错: {os.path.basename(file_path)}")
                    task.log(f"错误详情: {str(e)}")
                    task.update_result(file_path, "error")
                
                completed += 1
                progress = int((completed / total) * 100)
                task.update_progress(progress)
        
        if task.running:
            task.update_status("completed")
            task.log(f"转换完成！成功处理 {sum(1 for r in task.results.values() if r['status'] == 'success')} 个文件")
        else:
            task.update_status("cancelled")
    
    except Exception as e:
        task.error = str(e)
        task.update_status("error")
        task.log(f"任务发生错误: {str(e)}")
    finally:
        task.running = False

def convert_single_file(file_path, task):
    if not task.running:
        raise Exception("任务已取消")
    
    file_name = os.path.basename(file_path)
    file_dir = os.path.dirname(file_path)
    
    # 确保结果目录存在
    task_result_dir = os.path.join(RESULTS_FOLDER, os.path.basename(file_dir))
    os.makedirs(task_result_dir, exist_ok=True)
    
    task.log(f"开始转换: {file_name}")
    
    try:
        # 获取文件名（不包含扩展名）用于确保输出文件名与原文件名一致
        base_name = os.path.splitext(file_name)[0]
        output_path = os.path.join(task_result_dir, base_name + '.md')
        
        # 构造命令，使用--output参数明确指定输出文件名（不包含扩展名）
        # 以及--output-dir参数指定输出目录
        cmd = [sys.executable, "-m", "jupyter", "nbconvert", "--to", "markdown", 
               "--output", base_name,
               "--output-dir", task_result_dir, 
               file_path]
        
        process = subprocess.run(
            cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            text=True,
            timeout=300  # 5分钟超时
        )
        
        # 检查输出文件
        if process.returncode == 0 and os.path.exists(output_path):
            task.log(f"✓ 成功转换: {file_name}")
            return "success", output_path
        else:
            task.log(f"✗ 转换失败: {file_name}")
            if process.stderr:
                task.log(f"错误信息: {process.stderr}")
            return "failed", None
    
    except subprocess.TimeoutExpired:
        task.log(f"✗ 转换超时: {file_name}")
        return "timeout", None
    except Exception as e:
        task.log(f"✗ 转换出错: {file_name}")
        task.log(f"错误详情: {str(e)}")
        raise e

if __name__ == '__main__':
    # 启动定时清理任务
    schedule_cleanup()
    
    # 启动应用（仅在直接运行时使用，Docker中使用gunicorn）
    app.run(host='0.0.0.0', port=5000, debug=False)