#!/bin/bash

# 设置项目目录
PROJECT_DIR="dictionary_part"
echo "Creating project directory: $PROJECT_DIR"
mkdir -p $PROJECT_DIR
cd $PROJECT_DIR

# 创建所有文件
echo "Generating project files..."

cat > requirements.txt << 'EOF'
requests==2.28.1
pyppeteer==1.0.2
beautifulsoup4==4.11.1
celery==5.2.7
mysql-connector-python==8.0.31
python-dotenv==0.21.0
redis==4.5.1
flower==1.0.0
flask==2.2.2
EOF

cat > config.py << 'EOF'
import os
from dotenv import load_dotenv

load_dotenv()

CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "redis://localhost:6379/0")
CELERY_RESULT_BACKEND = os.getenv("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")

DB_CONFIG = {
    "host": os.getenv("DB_HOST", "localhost"),
    "user": os.getenv("DB_USER", "root"),
    "password": os.getenv("DB_PASSWORD", ""),
    "database": os.getenv("DB_NAME", "wordpress"),
}

TRANSLATIONS_DIR = os.getenv("TRANSLATIONS_DIR", "translations")
EOF

cat > tasks.py << 'EOF'
from celery import Celery
from scanner import scan_static_page, scan_Ajax_content
from db import update_task_status
from config import CELERY_BROKER_URL, CELERY_RESULT_BACKEND

app = Celery("dictionary_tasks", broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND)

@app.task
def process_static_page(domain, page_url, task_id):
    try:
        dictionary = scan_static_page(page_url)
        update_task_status(task_id, "completed", dictionary_file=f"translations/pre_trans_{dictionary['hash']}.json")
        return dictionary
    except Exception as e:
        update_task_status(task_id, "failed", error=str(e))
        raise

@app.task
def process_Ajax_content(domain, page_url, task_id):
    try:
        dictionaries = scan_Ajax_content(page_url)
        for dictionary in dictionaries:
            update_task_status(task_id, "completed", dictionary_file=f"translations/pre_trans_{dictionary['hash']}.json")
        return dictionaries
    except Exception as e:
        update_task_status(task_id, "failed", error=str(e))
        raise
EOF

cat > scanner.py << 'EOF'
import requests
from bs4 import BeautifulSoup
import asyncio
from pyppeteer import launch
import hashlib
import json
import os
from config import TRANSLATIONS_DIR
from utils import normalize_url

def scan_static_page(page_url):
    response = requests.get(page_url, timeout=10)
    response.raise_for_status()
    html = response.text

    soup = BeautifulSoup(html, 'html.parser')
    text_nodes = []

    for text_node in soup.find_all(string=True):
        text = text_node.strip()
        if not text or text_node.parent.name in ['script', 'style']:
            continue
        node_id = f"text_{hashlib.md5(text.encode()).hexdigest()}"
        text_nodes.append({"id": node_id, "text": text})

    page_hash = hashlib.md5(normalize_url(page_url).encode()).hexdigest()
    dictionary = {"hash": page_hash, "type": "page", "texts": text_nodes}

    os.makedirs(TRANSLATIONS_DIR, exist_ok=True)
    output_file = os.path.join(TRANSLATIONS_DIR, f"pre_trans_{page_hash}.json")
    with open(output_file, 'w', encoding='utf-8') as f:
        json.dump(dictionary, f, ensure_ascii=False, indent=2)

    return dictionary

async def scan_Ajax_content_async(page_url):
    browser = await launch(headless=True, args=['--no-sandbox', '--disable-dev-shm-usage'])
    page = await browser.newPage()
    ajax_requests = []

    async def capture_request(request):
        if request.resourceType == 'xhr':
            ajax_requests.append(request.url)

    page.on('request', capture_request)

    await page.goto(page_url, {'waitUntil': 'networkidle2', 'timeout': 10000})
    await asyncio.sleep(5)

    dictionaries = []
    for url in set(ajax_requests):
        try:
            response = requests.get(url, timeout=10)
            if response.status_code != 200:
                continue

            content_type = response.headers.get('content-type', '')
            text_nodes = []

            if 'json' in content_type.lower():
                data = response.json()
                text_nodes = extract_json_texts(data)
            elif 'html' in content_type.lower():
                soup = BeautifulSoup(response.text, 'html.parser')
                for text_node in soup.find_all(string=True):
                    text = text_node.strip()
                    if not text or text_node.parent.name in ['script', 'style']:
                        continue
                    if not is_dynamic_text(text):
                        node_id = f"text_{hashlib.md5(text.encode()).hexdigest()}"
                        text_nodes.append({"id": node_id, "text": text})

            if text_nodes:
                ajax_hash = hashlib.md5(normalize_url(url).encode()).hexdigest()
                dictionary = {"hash": ajax_hash, "type": "ajax", "texts": text_nodes}
                output_file = os.path.join(TRANSLATIONS_DIR, f"pre_trans_{ajax_hash}.json")
                with open(output_file, 'w', encoding='utf-8') as f:
                    json.dump(dictionary, f, ensure_ascii=False, indent=2)
                dictionaries.append(dictionary)
        except Exception as e:
            print(f"Error processing AJAX URL {url}: {e}")

    await browser.close()
    return dictionaries

def scan_Ajax_content(page_url):
    return asyncio.run(scan_Ajax_content_async(page_url))

def extract_json_texts(data, text_nodes=None):
    if text_nodes is None:
        text_nodes = []

    if isinstance(data, dict):
        for value in data.values():
            extract_json_texts(value, text_nodes)
    elif isinstance(data, list):
        for item in list(data):
            extract_json_texts(item, text_nodes)
    elif isinstance(data, str):
        text = data.strip()
        if text and not is_dynamic_text(text):
            node_id = f"text_{hashlib.md5(text.encode()).hexdigest()}"
            text_nodes.append({"id": node_id, "text": text})

    return text_nodes

def is_dynamic_text(text):
    import re
    return bool(re.match(r'^\d+$', text) or '{' in text or '%' in text)
EOF

cat > db.py << 'EOF'
import mysql.connector
from config import DB_CONFIG

def update_task_status(task_id, status, dictionary_file=None, error=None):
    conn = mysql.connector.connect(**DB_CONFIG)
    cursor = conn.cursor()
    try:
        query = """
        UPDATE wp_translation_tasks 
        SET status = %s, dictionary_file = %s, error = %s, updated_at = NOW()
        WHERE task_id = %s
        """
        cursor.execute(query, (status, dictionary_file, error, task_id))
        conn.commit()
    finally:
        cursor.close()
        conn.close()

def create_task_table():
    conn = mysql.connector.connect(**DB_CONFIG)
    cursor = conn.cursor()
    try:
        cursor.execute("""
        CREATE TABLE IF NOT EXISTS wp_translation_tasks (
            task_id VARCHAR(255) PRIMARY KEY,
            domain VARCHAR(255) NOT NULL,
            page_url TEXT NOT NULL,
            status ENUM('pending', 'completed', 'failed') DEFAULT 'pending',
            dictionary_file TEXT,
            error TEXT,
            created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
            updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
        )
        """)
        conn.commit()
    finally:
        cursor.close()
        conn.close()
EOF

cat > utils.py << 'EOF'
from urllib.parse import urlparse, parse_qs

def normalize_url(url):
    parsed = urlparse(url)
    params = parse_qs(parsed.query)
    core_params = {k: v[0] for k, v in params.items() if k not in ['time', 'nonce', '_']}
    query = '&'.join(f"{k}={v}" for k, v in sorted(core_params.items()))
    return f"{parsed.scheme}://{parsed.netloc}{parsed.path}{'?' + query if query else ''}"
EOF

cat > api.py << 'EOF'
from flask import Flask, request, jsonify
from tasks import process_static_page, process_Ajax_content
from celery import control

app = Flask(__name__)

@app.route('/add_static_task', methods=['POST'])
def add_static_task():
    data = request.get_json()
    domain = data.get('domain')
    page_url = data.get('page_url')
    task_id = data.get('task_id')

    if not all([domain, page_url, task_id]):
        return jsonify({"error": "Missing required fields"}), 400

    task = process_static_page.delay(domain, page_url, task_id)
    return jsonify({"task_id": task.id, "status": "Task submitted"}), 200

@app.route('/add_Ajax_task', methods=['POST'])
def add_Ajax_task():
    data = request.get_json()
    domain = data.get('domain')
    page_url = data.get('page_url')
    task_id = data.get('task_id')

    if not all([domain, page_url, task_id]):
        return jsonify({"error": "Missing required fields"}), 400

    task = process_Ajax_content.delay(domain, page_url, task_id)
    return jsonify({"task_id": task.id, "status": "Task submitted"}), 200

@app.route('/revoke_task/<task_id>', methods=['DELETE'])
def revoke_task(task_id):
    control.revoke(task_id, terminate=True)
    return jsonify({"task_id": task_id, "status": "Task revoked"}), 200

if __name__ == "__main__":
    app.run(host='0.0.0.0', port=5000)
EOF

# 设置脚本权限
chmod +x *.py setup_project.sh

echo "Project setup complete!"
echo "To install dependencies: pip install -r requirements.txt"
echo "To initialize database: python -c 'from db import create_task_table; create_task_table()'"
echo "To start Celery worker: celery -A tasks worker --loglevel=info --concurrency=4"
echo "To start Flower: celery -A tasks flower --port=5555"
echo "To start Flask API: python api.py"