|
|
|
|
|
""" |
|
|
Launch multiple Selenium-powered scraper workers in parallel. |
|
|
Each worker runs `scraper.py --selenium` over a non-overlapping page range |
|
|
and writes to its own output directory. |
|
|
|
|
|
Example: |
|
|
python3 launch_selenium_workers.py --workers 6 --start 1 --end 3811 \ |
|
|
--out_root ./icrc_out --sleep 1.05 --keep_restricted |
|
|
|
|
|
Stop with Ctrl+C; children are terminated gracefully. |
|
|
""" |
|
|
import argparse |
|
|
import os |
|
|
import random |
|
|
import shlex |
|
|
import subprocess |
|
|
import sys |
|
|
import time |
|
|
from datetime import datetime |
|
|
|
|
|
BASE_DEBUG_PORT = 9300 |
|
|
|
|
|
DEF_END = 3811 |
|
|
|
|
|
|
|
|
def chunk_ranges(start: int, end: int, k: int): |
|
|
"""Split [start, end] inclusive into k nearly equal contiguous ranges.""" |
|
|
total = max(0, end - start + 1) |
|
|
if k <= 0: |
|
|
return [] |
|
|
base = total // k |
|
|
extra = total % k |
|
|
out = [] |
|
|
cur = start |
|
|
for i in range(k): |
|
|
span = base + (1 if i < extra else 0) |
|
|
s = cur |
|
|
e = cur + span - 1 |
|
|
if s > end or span == 0: |
|
|
s, e = 0, -1 |
|
|
out.append((s, e)) |
|
|
cur = e + 1 |
|
|
return out |
|
|
|
|
|
|
|
|
def build_cmd(py: str, scraper: str, s: int, e: int, out_dir: str, |
|
|
sleep_base: float, keep_restricted: bool, headless: bool, idx: int): |
|
|
|
|
|
jitter = random.choice([0.95, 1.0, 1.05, 1.1, 1.15]) |
|
|
sleep = max(0.5, sleep_base * jitter) |
|
|
args = [ |
|
|
py, scraper, |
|
|
"--selenium", |
|
|
"--start_page", str(s), |
|
|
"--end_page", str(e), |
|
|
"--sleep", f"{sleep:.2f}", |
|
|
"--out_dir", out_dir, |
|
|
"--chrome_profile_dir", os.path.join(out_dir, "chrome_profile"), |
|
|
"--remote_debug_port", str(BASE_DEBUG_PORT + idx), |
|
|
] |
|
|
if keep_restricted: |
|
|
args.append("--keep_restricted") |
|
|
if not headless: |
|
|
args.append("--no_headless") |
|
|
return args |
|
|
|
|
|
|
|
|
def main(): |
|
|
ap = argparse.ArgumentParser(description="Launch multiple Selenium scraper workers") |
|
|
ap.add_argument("--python", default=sys.executable, |
|
|
help="Python executable (default: current interpreter)") |
|
|
ap.add_argument("--scraper", |
|
|
default=os.path.join(os.path.dirname(__file__), "scraper.py"), |
|
|
help="Path to scraper.py") |
|
|
ap.add_argument("--workers", type=int, default=6, |
|
|
help="Number of parallel workers") |
|
|
ap.add_argument("--start", type=int, default=1, |
|
|
help="Global start page (inclusive)") |
|
|
ap.add_argument("--end", type=int, default=DEF_END, |
|
|
help="Global end page (inclusive)") |
|
|
ap.add_argument("--out_root", default="./icrc_out", |
|
|
help="Root output directory; _w{n} will be appended") |
|
|
ap.add_argument("--sleep", type=float, default=1.05, |
|
|
help="Base sleep; small jitter is added per worker") |
|
|
ap.add_argument("--keep_restricted", action="store_true", |
|
|
help="Include restricted/publication-limited records") |
|
|
ap.add_argument("--no_headless", action="store_true", |
|
|
help="Run visible browser windows for debugging") |
|
|
ap.add_argument("--dry_run", action="store_true", |
|
|
help="Print commands and exit") |
|
|
ap.add_argument("--skip", type=int, default=0, |
|
|
help="Shift each worker's start page forward by this many pages " |
|
|
"(e.g., --skip 60 turns [s,e] into [s+60,e])") |
|
|
args = ap.parse_args() |
|
|
|
|
|
ranges = chunk_ranges(args.start, args.end, args.workers) |
|
|
|
|
|
procs = [] |
|
|
try: |
|
|
for idx, (s, e) in enumerate(ranges, start=1): |
|
|
if s <= 0 or e < s: |
|
|
print(f"[w{idx}] empty range; skipping") |
|
|
continue |
|
|
|
|
|
|
|
|
adj_s = s + max(0, args.skip) |
|
|
if adj_s > e: |
|
|
print(f"[w{idx}] range {s}-{e} -> after --skip {args.skip}, empty; skipping") |
|
|
continue |
|
|
|
|
|
out_dir = f"{args.out_root}_w{idx}" |
|
|
os.makedirs(out_dir, exist_ok=True) |
|
|
|
|
|
cmd = build_cmd( |
|
|
args.python, args.scraper, adj_s, e, out_dir, |
|
|
args.sleep, args.keep_restricted, headless=not args.no_headless, idx=idx |
|
|
) |
|
|
stamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
|
log_path = os.path.join(out_dir, f"worker_{idx}_{adj_s}-{e}_{stamp}.log") |
|
|
|
|
|
print("[launch] ", " ".join(shlex.quote(c) for c in cmd)) |
|
|
print(f"[w{idx}] pages {s}-{e} -> start-shifted to {adj_s}-{e} " |
|
|
f"| profile={out_dir}/chrome_profile port={BASE_DEBUG_PORT + idx}") |
|
|
if args.dry_run: |
|
|
continue |
|
|
|
|
|
logf = open(log_path, "w", buffering=1) |
|
|
proc = subprocess.Popen(cmd, stdout=logf, stderr=subprocess.STDOUT) |
|
|
procs.append((proc, logf, log_path)) |
|
|
time.sleep(0.5) |
|
|
|
|
|
if args.dry_run: |
|
|
print("Dry run complete.") |
|
|
return |
|
|
|
|
|
|
|
|
while procs: |
|
|
alive = [] |
|
|
for proc, logf, log_path in procs: |
|
|
ret = proc.poll() |
|
|
if ret is None: |
|
|
alive.append((proc, logf, log_path)) |
|
|
else: |
|
|
logf.close() |
|
|
print(f"[exit] pid={proc.pid} code={ret} log={log_path}") |
|
|
procs = alive |
|
|
time.sleep(2.0) |
|
|
except KeyboardInterrupt: |
|
|
print("\n[ctrl-c] terminating workers…") |
|
|
for proc, logf, _ in procs: |
|
|
try: |
|
|
proc.terminate() |
|
|
except Exception: |
|
|
pass |
|
|
time.sleep(2.0) |
|
|
for proc, logf, _ in procs: |
|
|
try: |
|
|
proc.kill() |
|
|
except Exception: |
|
|
pass |
|
|
for _, logf, _ in procs: |
|
|
try: |
|
|
logf.close() |
|
|
except Exception: |
|
|
pass |
|
|
sys.exit(130) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|