import requests
import json
import os
import time
from base64 import b64decode
from Crypto.Cipher import AES
from hashlib import md5

# === 配置部分 ===
BASE_URL = "https://forum.duibiao.info/api/ugc_threads/?ugc_type=salary&start={}"
PASSWORD = "**duibiao.info**"  
DATA_FILE = "salary_data.json"
START_FILE = "last_start.txt"
PAGE_SIZE = 20
RETRY_LIMIT = 3
SLEEP_BETWEEN_REQUESTS = 1


def load_last_start():
    try:
        with open(START_FILE, "r") as f:
            return int(f.read().strip())
    except:
        return 0

def save_last_start(start):
    with open(START_FILE, "w") as f:
        f.write(str(start))

def load_existing_data():
    if not os.path.exists(DATA_FILE):
        return []
    with open(DATA_FILE, "r", encoding="utf-8") as f:
        return json.load(f)

def save_data(data):
    with open(DATA_FILE, "w", encoding="utf-8") as f:
        json.dump(data, f, ensure_ascii=False, indent=2)

def remove_padding(s):
    pad_len = s[-1]
    return s[:-pad_len]

def decrypt_payload(ciphertext_b64, password):
    raw = b64decode(ciphertext_b64)
    if not raw.startswith(b"Salted__"):
        raise ValueError("Invalid ciphertext format.")
    salt = raw[8:16]
    encrypted = raw[16:]

    # 使用 OpenSSL 的 EVP_BytesToKey 生成 key, iv
    def EVP_BytesToKey(password, salt, key_len, iv_len):
        d = d_i = b""
        while len(d) < key_len + iv_len:
            d_i = md5(d_i + password.encode("utf-8") + salt).digest()
            d += d_i
        return d[:key_len], d[key_len:key_len+iv_len]

    key, iv = EVP_BytesToKey(password, salt, 32, 16)
    cipher = AES.new(key, AES.MODE_CBC, iv)
    decrypted = cipher.decrypt(encrypted)
    return remove_padding(decrypted)



def crawl_duibiao():
    print("开始抓取")
    all_threads = load_existing_data()
    existing_ids = {t['id'] for t in all_threads}
    start = load_last_start()
    print(f"从缓存读取起点: start = {start}（已收集 {len(existing_ids)} 条）")

    while True:
        url = BASE_URL.format(start)
        print(f"\n正在抓取起点 {start} ...\n请求: {url}")
        success = False

        for attempt in range(RETRY_LIMIT):
            try:
                resp = requests.get(url, timeout=10)
                print(f"状态码: {resp.status_code}")
                if resp.status_code != 200:
                    raise Exception(f"HTTP {resp.status_code}")
                encrypted_payload = resp.json()["payload"]
                decrypted = decrypt_payload(encrypted_payload, PASSWORD)
                data = json.loads(decrypted.decode("utf-8"))
                threads = data.get("threads", [])
                new_threads = [t for t in threads if t["id"] not in existing_ids]
                print(f"本页共 {len(threads)} 条，新增 {len(new_threads)} 条")
                all_threads.extend(new_threads)
                for t in new_threads:
                    existing_ids.add(t["id"])
                start = data.get("next", start + PAGE_SIZE)
                save_last_start(start)
                save_data(all_threads)
                success = True
                time.sleep(SLEEP_BETWEEN_REQUESTS)
                if not data.get("hasNext"):
                    print("\n抓取完成，无更多数据。")
                    return
                break
            except Exception as e:
                print(f"抓取失败（尝试 {attempt+1}/{RETRY_LIMIT}）: {e}")
                time.sleep(2)

        if not success:
            print("失败")
            break

    print(f"\n共抓取 {len(all_threads)} 条，已保存到 {DATA_FILE}")

if __name__ == "__main__":
    crawl_duibiao()
