import os
import time
import io
from datetime import datetime, timezone
from flask import Response # Make sure Response is imported from flask
import traceback # For detailed error logging
from flask import Flask, request, jsonify, send_from_directory, abort
from sqlalchemy.orm import scoped_session
from flask import request, jsonify, Response
from datetime import datetime, timezone
import traceback
import io
# 为Excel导出功能添加的新导入语句
import pandas as pd
from io import BytesIO

from .database import db_session, init_db
from .models import Account
from .tasks import batch_register
from .log import logger, log_info, log_error, log_warning
from .register_api import check_send_code as api_check_send_code # Rename to avoid conflict

# Determine static folder path relative to this file's location
BASE_APP_DIR = os.path.dirname(os.path.abspath(__file__))
# Assuming frontend is one level up and then inside 'frontend'
FRONTEND_DIR = os.path.abspath(os.path.join(BASE_APP_DIR, '..', 'frontend'))

# Check if frontend directory exists, adjust static_folder accordingly
if os.path.exists(FRONTEND_DIR) and os.path.isdir(FRONTEND_DIR):
    app = Flask(__name__, static_folder=FRONTEND_DIR, static_url_path="/")
    log_info(f"Serving static files from: {FRONTEND_DIR}")
else:
    app = Flask(__name__) # No static files if directory doesn't exist
    log_warning(f"Frontend directory not found at {FRONTEND_DIR}. Static file serving disabled.")

@app.teardown_appcontext
def shutdown_session(exception=None):
    """Ensures the SQLAlchemy session is removed after each request."""
    db_session.remove()
    if exception:
        log_error(f"Tearing down session with exception: {exception}")

@app.route('/')
def index():
    """Serves the index.html from the static folder if configured."""
    if app.static_folder:
        # Check if index.html exists
        index_path = os.path.join(app.static_folder, "index.html")
        if os.path.exists(index_path):
             log_info("Serving index.html")
             return send_from_directory(app.static_folder, "index.html")
        else:
             log_warning("index.html not found in static folder.")
             return "Backend running, but index.html not found.", 404
    else:
        return "Backend running. Static file serving disabled.", 200


@app.route('/api/batch_register', methods=['POST'])
def handle_batch_register():
    """API endpoint to trigger batch registration."""
    if not request.is_json:
        return jsonify({"error": "Request must be JSON"}), 400
    data = request.get_json()
    count = data.get("count", 1)
    try:
        count = int(count)
        if count < 1 or count > 1000: # Add a reasonable upper limit
            raise ValueError("Count must be between 1 and 1000")
    except (ValueError, TypeError):
        return jsonify({"error": "Invalid 'count' parameter. Must be an integer between 1 and 1000."}), 400

    log_info(f"Received batch register request for {count} accounts.")
    # Note: This runs synchronously. For large batches, consider background tasks (Celery, RQ)
    logs, success, fail, skip = batch_register(count)
    log_info(f"Batch registration finished. Success: {success}, Fail: {fail}")

    return jsonify({
        "logs": logs,
        "success": success,
        "fail": fail,
        "skip": skip # skip is currently always 0 based on tasks.py
    })

@app.route('/api/accounts', methods=['GET'])
def get_accounts():
    """API endpoint to get paginated accounts (excluding soft-deleted)."""
    try:
        page = int(request.args.get("page", 1))
        size = int(request.args.get("size", 20)) # Default page size
        if page < 1: page = 1
        if size < 1: size = 1
        if size > 100: size = 100 # Limit page size
    except ValueError:
        return jsonify({"error": "Invalid pagination parameters."}), 400

    try:
        base_query = db_session.query(Account).filter(Account.is_deleted == False)
        total = base_query.count()
        accounts_page = base_query.order_by(Account.id.desc()).offset((page - 1) * size).limit(size).all()

        data = [{
            "id": acc.id,
            "email": acc.email,
            "username": acc.username,
            "password": acc.password, # Consider security implications of sending passwords
            "invite_code": acc.invite_code,
            "status": acc.status,
            "err_msg": acc.err_msg,
            "reg_time": acc.reg_time.isoformat() if acc.reg_time else None,
            "is_deleted": acc.is_deleted
        } for acc in accounts_page]

        return jsonify({
            "data": data,
            "total": total,
            "page": page,
            "size": size,
            "total_pages": (total + size - 1) // size # Calculate total pages
        })
    except Exception as e:
        log_error(f"Error fetching accounts: {e}")
        return jsonify({"error": "Failed to retrieve accounts."}), 500


@app.route('/api/account/<int:account_id>/delete', methods=['POST'])
def delete_account(account_id):
    """API endpoint to soft-delete an account."""
    try:
        acc = db_session.query(Account).filter(Account.id == account_id).first()
        if not acc:
            return jsonify({"ok": False, "msg": "Account not found"}), 404
        if acc.is_deleted:
            return jsonify({"ok": True, "msg": "Account already deleted"}), 200 # Idempotent

        acc.is_deleted = True
        db_session.commit()
        log_info(f"Soft-deleted account ID: {account_id}")
        return jsonify({"ok": True})
    except Exception as e:
        db_session.rollback()
        log_error(f"Error deleting account {account_id}: {e}")
        return jsonify({"ok": False, "msg": "Failed to delete account"}), 500


@app.route('/api/clear_failed', methods=['POST'])
def clear_failed_accounts():
    """API endpoint to soft-delete all 'fail' status accounts."""
    try:
        updated_count = db_session.query(Account).filter(
            Account.status == "fail",
            Account.is_deleted == False
        ).update({"is_deleted": True}, synchronize_session=False) # Use efficient bulk update
        db_session.commit()
        log_info(f"Soft-deleted {updated_count} failed accounts.")
        return jsonify({"ok": True, "deleted_count": updated_count})
    except Exception as e:
        db_session.rollback()
        log_error(f"Error clearing failed accounts: {e}")
        return jsonify({"ok": False, "msg": "Failed to clear failed accounts"}), 500


@app.route('/api/clear_used', methods=['POST'])
def clear_used_accounts():
    """API endpoint to soft-delete all 'used' status accounts."""
    # Note: 'used' status needs to be set by some other process (e.g., export)
    try:
        updated_count = db_session.query(Account).filter(
            Account.status == "used",
            Account.is_deleted == False
        ).update({"is_deleted": True}, synchronize_session=False)
        db_session.commit()
        log_info(f"Soft-deleted {updated_count} used accounts.")
        return jsonify({"ok": True, "deleted_count": updated_count})
    except Exception as e:
        db_session.rollback()
        log_error(f"Error clearing used accounts: {e}")
        return jsonify({"ok": False, "msg": "Failed to clear used accounts"}), 500


@app.route('/api/check_send_code', methods=['GET'])
def check_verification_api():
    """API endpoint to check the target site's verification code API."""
    # Use a distinct dummy email for checking
    dummy_email = f"13693176863a{int(time.time())}@qq.com"
    is_ok = api_check_send_code(dummy_email)
    return jsonify({"ok": is_ok, "status": "available" if is_ok else "unavailable"})


@app.route('/logs')
def serve_log_file():
    """Serves the log file for viewing/download."""
    from .config import LOG_FILE
    log_dir = os.path.dirname(LOG_FILE)
    log_filename = os.path.basename(LOG_FILE)
    if os.path.exists(LOG_FILE):
        try:
            # Use 'as_attachment=True' to force download
            return send_from_directory(log_dir, log_filename, as_attachment=False, mimetype='text/plain')
        except Exception as e:
            log_error(f"Error serving log file: {e}")
            abort(500)
    else:
        log_warning("Log file requested but not found.")
        abort(404)

@app.route('/api/export_accounts', methods=['GET'])
def export_accounts():
    """
    API endpoint to export a specified number of successful, non-deleted accounts
    as 'username password' per line, AND marks them as 'used'.
    Expects a 'count' query parameter.
    """
    try:
        # --- Parameter Validation (same as before) ---
        try:
            count_str = request.args.get('count')
            if count_str is None:
                 log_warning("Export request missing 'count' parameter.")
                 return jsonify({"error": "缺少 'count' 参数。"}), 400
            count = int(count_str)
            if count < 1:
                raise ValueError("Count must be positive")
            # Optional limit check
            # max_export_limit = 10000
            # if count > max_export_limit: ...

        except (ValueError, TypeError):
             log_warning(f"Invalid 'count' parameter received: {request.args.get('count')}")
             return jsonify({"error": "无效的 'count' 参数，必须是一个正整数。"}), 400

        log_info(f"收到导出并标记为'used'的请求，数量: {count}")

        # --- Query Accounts (same as before) ---
        # Use with_for_update() to lock the selected rows if high concurrency is expected,
        # although for this use case it might be overkill unless exports happen very frequently.
        accounts_to_export = db_session.query(Account).filter(
            Account.status == 'success',
            Account.is_deleted == False
        ).order_by(Account.id).limit(count).all() # .with_for_update() could go here

        actual_exported_count = len(accounts_to_export)

        if actual_exported_count == 0:
             log_info(f"请求导出 {count} 个账号，但没有符合条件的账号可导出。")
             return jsonify({"message": "没有状态为 'success' 且未删除的账号可供导出。"}), 200 # 200 OK is fine here

        # --- Generate Export Data (same as before) ---
        output = io.StringIO()
        account_ids_to_update = [] # Store IDs for updating status
        for acc in accounts_to_export:
            account_ids_to_update.append(acc.id) # Collect the ID
            clean_username = str(acc.username).replace('\n', '').replace('\r', '')
            clean_password = str(acc.password).replace('\n', '').replace('\r', '')
            output.write(f"{clean_username} {clean_password}\n")
        export_data = output.getvalue()
        output.close()

        # --- *** Update Status in Database *** ---
        if account_ids_to_update:
            try:
                log_info(f"准备将 {len(account_ids_to_update)} 个账号的状态更新为 'used': IDs {account_ids_to_update}")
                # Perform bulk update for efficiency
                db_session.query(Account).filter(
                    Account.id.in_(account_ids_to_update)
                ).update({"status": "used"}, synchronize_session=False) # Important: synchronize_session=False

                db_session.commit() # Commit the status change
                log_info(f"成功将 {len(account_ids_to_update)} 个账号的状态更新为 'used'")

            except Exception as update_err:
                db_session.rollback() # Rollback if update fails
                error_trace = traceback.format_exc()
                log_error(f"更新导出账号状态为 'used' 时失败: {update_err}\n{error_trace}")
                # Return an error, do not proceed with sending the file
                return jsonify({"error": "更新账号状态时发生内部错误，导出已取消。"}), 500
        else:
            # This case should technically not happen if actual_exported_count > 0, but good practice
            log_warning("No account IDs collected for status update, skipping DB update.")


        # --- Create Filename (same as before) ---
        timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
        filename = f"accounts_top{actual_exported_count}_marked_used_{timestamp}.txt" # Updated filename slightly

        log_info(f"准备发送 {actual_exported_count} 个已标记为'used'的账号到文件 {filename}")

        # --- Return Response (same as before) ---
        return Response(
            export_data,
            mimetype="text/plain",
            headers={"Content-Disposition": f"attachment;filename={filename}"}
        )

    except Exception as e:
        # General error handling (rollback in case something failed before commit)
        db_session.rollback()
        error_trace = traceback.format_exc()
        log_error(f"导出账号（并标记为used）时发生意外错误: {e}\n{error_trace}")
        return jsonify({"error": "导出账号时发生内部错误。"}), 500
# --- Main Execution ---

@app.route('/api/export_accounts_xlsx', methods=['GET'])
def export_accounts_xlsx():
    """
    API endpoint to export a specified number of successful, non-deleted accounts
    as Excel file with columns for username and password, AND marks them as 'used'.
    Expects a 'count' query parameter.
    """
    try:
        # --- Parameter Validation (same as before) ---
        try:
            count_str = request.args.get('count')
            if count_str is None:
                log_warning("Export request missing 'count' parameter.")
                return jsonify({"error": "缺少 'count' 参数。"}), 400
            count = int(count_str)
            if count < 1:
                raise ValueError("Count must be positive")
            # Optional limit check
            # max_export_limit = 10000
            # if count > max_export_limit: ...

        except (ValueError, TypeError):
            log_warning(f"Invalid 'count' parameter received: {request.args.get('count')}")
            return jsonify({"error": "无效的 'count' 参数，必须是一个正整数。"}), 400

        log_info(f"收到导出并标记为'used'的请求，数量: {count}")

        # --- Query Accounts (same as before) ---
        # Use with_for_update() to lock the selected rows if high concurrency is expected,
        # although for this use case it might be overkill unless exports happen very frequently.
        accounts_to_export = db_session.query(Account).filter(
            Account.status == 'success',
            Account.is_deleted == False
        ).order_by(Account.id).limit(count).all()  # .with_for_update() could go here

        actual_exported_count = len(accounts_to_export)

        if actual_exported_count == 0:
            log_info(f"请求导出 {count} 个账号，但没有符合条件的账号可导出。")
            return jsonify({"message": "没有状态为 'success' 且未删除的账号可供导出。"}), 200  # 200 OK is fine here

        # --- Generate Excel File ---
        from io import BytesIO
        import pandas as pd

        # 创建DataFrame用于Excel导出
        data = {
            '账号': [],
            '密码': []
        }

        account_ids_to_update = []  # Store IDs for updating status
        for acc in accounts_to_export:
            account_ids_to_update.append(acc.id)  # Collect the ID
            clean_username = str(acc.username).replace('\n', '').replace('\r', '')
            clean_password = str(acc.password).replace('\n', '').replace('\r', '')
            data['账号'].append(clean_username)
            data['密码'].append(clean_password)

        # 创建DataFrame并设置列顺序
        df = pd.DataFrame(data)

        # --- *** Update Status in Database *** ---
        if account_ids_to_update:
            try:
                log_info(f"准备将 {len(account_ids_to_update)} 个账号的状态更新为 'used': IDs {account_ids_to_update}")
                # Perform bulk update for efficiency
                db_session.query(Account).filter(
                    Account.id.in_(account_ids_to_update)
                ).update({"status": "used"}, synchronize_session=False)  # Important: synchronize_session=False

                db_session.commit()  # Commit the status change
                log_info(f"成功将 {len(account_ids_to_update)} 个账号的状态更新为 'used'")

            except Exception as update_err:
                db_session.rollback()  # Rollback if update fails
                error_trace = traceback.format_exc()
                log_error(f"更新导出账号状态为 'used' 时失败: {update_err}\n{error_trace}")
                # Return an error, do not proceed with sending the file
                return jsonify({"error": "更新账号状态时发生内部错误，导出已取消。"}), 500
        else:
            # This case should technically not happen if actual_exported_count > 0, but good practice
            log_warning("No account IDs collected for status update, skipping DB update.")

        # --- Create Filename ---
        timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
        filename = f"accounts_top{actual_exported_count}_marked_used_{timestamp}.xlsx"  # 更改文件扩展名为xlsx

        log_info(f"准备发送 {actual_exported_count} 个已标记为'used'的账号到Excel文件 {filename}")

        # --- Return Excel Response ---
        output = BytesIO()
        with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
            df.to_excel(writer, index=False, sheet_name='账号数据')
            # 可以在这里添加格式设置，如果需要的话

        output.seek(0)

        return Response(
            output.getvalue(),
            mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
            headers={"Content-Disposition": f"attachment;filename={filename}"}
        )

    except Exception as e:
        # General error handling (rollback in case something failed before commit)
        db_session.rollback()
        error_trace = traceback.format_exc()
        log_error(f"导出账号（并标记为used）时发生意外错误: {e}\n{error_trace}")
        return jsonify({"error": "导出账号时发生内部错误。"}), 500
def run_app():
    """Initializes DB and runs the Flask app."""
    log_info("Initializing database...")
    init_db()
    log_info("Starting Flask application...")
    # Use host='0.0.0.0' to make it accessible externally
    # Set debug=False for production
    app.run(host="0.0.0.0", port=5000, debug=True)

if __name__ == '__main__':
    run_app()