
# -*- coding: utf-8 -*-
"""
Python 3.9.13
完整流程（含 robot7 绕过 + 配置文件 + 登录成功后获取开奖接口 + 打印请求时的 Cookie）：
"""
import re
import sys
import time
import json
import binascii
from dataclasses import dataclass
from typing import Optional, Tuple, List
from urllib.parse import urljoin, urlparse, parse_qs, quote
from pathlib import Path
import logging

import requests
from bs4 import BeautifulSoup
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5

CONFIG_FILE = "config.json"
LOCAL_PAGE1 = "/mnt/data/page1.html"
LOCAL_LOGINPAGE = "/mnt/data/loginpage.html"

logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")

UA = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
      "(KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36")

@dataclass
class LineLink:
    index: int
    url: str
    base: str

def print_cookiejar(session: requests.Session, domain_filter: Optional[str] = None):
    print("---- CookieJar Dump ----")
    for c in session.cookies:
        if domain_filter and (domain_filter not in c.domain):
            continue
        print(f"{c.domain}\t{c.path}\t{'Secure' if c.secure else '    - '}\t{c.name}={c.value}")
    print("------------------------")

def cookie_header_for(session: requests.Session, method: str, url: str, data=None, headers: Optional[dict]=None) -> str:
    req = requests.Request(method.upper(), url, data=data, headers=(headers or {"User-Agent": UA}))
    prep = session.prepare_request(req)
    return prep.headers.get("Cookie", "")

def send_with_cookie_print(session: requests.Session, method: str, url: str, *, data=None, timeout: int = 12, allow_redirects: bool = True):
    req = requests.Request(method.upper(), url, data=data, headers={"User-Agent": UA})
    prep = session.prepare_request(req)
    print(f"--> {method.upper()} {url}")
    print("    Cookie header to send:", prep.headers.get("Cookie", ""))
    resp = session.send(prep, timeout=timeout, allow_redirects=allow_redirects)
    # Set-Cookie chain dump
    print_set_cookies_chain(resp, label=f"{method.upper()} {url}")
    return resp

def is_js_cookie_challenge(html: str) -> bool:
    return "document.cookie" in html and "path=" in html

def parse_js_cookies(html: str):
    cookies = []
    for full in re.findall(r"document\.cookie\s*=\s*'([^']+)'", html):
        parts = [p.strip() for p in full.split(';')]
        if not parts:
            continue
        name, value = parts[0].split('=', 1)
        domain = None
        path = '/'
        for attr in parts[1:]:
            if attr.lower().startswith('domain='):
                domain = attr.split('=', 1)[1].strip()
            elif attr.lower().startswith('path='):
                path = attr.split('=', 1)[1].strip() or '/'
        cookies.append((name, value, domain, path))
    return cookies

def get_set_cookie_headers(resp: requests.Response):
    vals = []
    try:
        raw = resp.raw
        if hasattr(raw, "headers") and hasattr(raw.headers, "getlist"):
            vals = raw.headers.getlist("Set-Cookie")
    except Exception:
        pass
    if not vals:
        v = resp.headers.get("Set-Cookie")
        if v:
            vals = [v]
    return vals

def print_set_cookies_chain(resp: requests.Response, label: str = ""):
    chain = list(resp.history) + [resp]
    print(f"==== Set-Cookie Dump {label} ====")
    for i, r in enumerate(chain):
        try:
            url = r.url
        except Exception:
            url = "(unknown)"
        print(f"[{i}] {r.status_code} {url}")
        for sc in get_set_cookie_headers(r):
            print("  Set-Cookie:", sc)
    print("================================")

def get_with_robot7(session: requests.Session, url: str, *, timeout: int = 8) -> Optional[str]:
    try:
        resp = session.get(url, headers={"User-Agent": UA}, timeout=timeout)
        print_set_cookies_chain(resp, label=f"GET {url} (initial)")
        resp.raise_for_status()
        resp.encoding = resp.apparent_encoding or "utf-8"
        txt = resp.text
    except Exception as e:
        logging.warning("GET failed for %s (%s)", url, e)
        return None

    if is_js_cookie_challenge(txt):
        from urllib.parse import urlparse
        host = urlparse(url).hostname or ""
        parts = host.split(".")
        default_domain = "." + ".".join(parts[-2:]) if len(parts) >= 2 else host

        js_cookies = parse_js_cookies(txt)
        for name, value, domain, path in js_cookies:
            session.cookies.set(name, value, domain=(domain or default_domain), path=(path or "/"))
        logging.info("Injected %d JS cookies, retrying %s", len(js_cookies), url)

        try:
            resp = session.get(url, headers={"User-Agent": UA}, timeout=timeout)
            print_set_cookies_chain(resp, label=f"GET {url} (retry)")
            resp.raise_for_status()
            resp.encoding = resp.apparent_encoding or "utf-8"
            return resp.text
        except Exception as e:
            logging.warning("Retry after injecting JS cookies failed for %s (%s)", url, e)
            return None

    return txt

def parse_member_href(html: str, base_url: str) -> Optional[str]:
    soup = BeautifulSoup(html, "html.parser")
    a = soup.find("a", id="btnMember")
    if not a or not a.has_attr("href"):
        return None
    return urljoin(base_url, a["href"])

def extract_lines_and_token(page1_html: str) -> Tuple[List[str], Optional[str]]:
    arr_match = re.search(r'arrLine\s*=\s*\[([^\]]+)\]', page1_html)
    lines = []
    if arr_match:
        blob = arr_match.group(1)
        lines = re.findall(r'"([^"]+)"', blob)
    tok_match = re.search(r"token=([0-9A-F]+)", page1_html, re.IGNORECASE)
    token = tok_match.group(1) if tok_match else None
    return lines, token

def build_line_login_url(base: str, token: str) -> str:
    ts = int(time.time() * 1000)
    return f"{base}/Member/Login?_={ts}&token={token}"

def simulate_click_line1(page1_html: str) -> LineLink:
    lines, token = extract_lines_and_token(page1_html)
    if not lines:
        raise RuntimeError("Failed to parse arrLine from page1.html")
    if not token:
        raise RuntimeError("Failed to parse token from page1.html")
    base = lines[0]
    full_url = build_line_login_url(base, token)
    return LineLink(index=1, url=full_url, base=base)

EXPONENT_HEX = "010001"
MODULUS_HEX = ("BECA095A9D6509E1E78AA35D7198D95DF8B308CDA5E0D202E27452D56C312A5D"
               "B35CD62E159CD1A5CDD614316495F514947AD64AE7D0FD357958E7A66DBAA4DBA"
               "A005AF246C07E992FB4C988E5751328B6D2359CB99C38CDEC45AEBD36D9210F35"
               "17C577ECDA31A48F36D46F8A872C55623DFC2C905988D6BF84BCD0D0D7A33B")

def rsa_encrypt_real(text: str) -> str:
    utf8_text = quote(text, safe="~()*!.'")
    n = int(MODULUS_HEX, 16)
    e = int(EXPONENT_HEX, 16)
    rsa_key = RSA.construct((n, e))
    cipher = PKCS1_v1_5.new(rsa_key)
    encrypted_bytes = cipher.encrypt(utf8_text.encode("utf-8"))
    return binascii.hexlify(encrypted_bytes).decode().upper()

def main():
    cfg = json.loads(Path("config.json").read_text(encoding="utf-8"))
    base_origin = cfg["base_origin"].rstrip("/")
    safecode = cfg["safecode"]
    username = cfg["account"]
    password = cfg["password"]

    session = requests.Session()
    session.headers.update({"User-Agent": UA})

    root_url = base_origin + "/"
    print("Step 0) Visit root:", root_url)
    root_html = get_with_robot7(session, root_url)
    print_cookiejar(session, domain_filter="kb189.cc")
    if not root_html:
        print("  (warn) Failed to get root page; continue anyway.")

    navigate_url = f"{base_origin}/Navigation/NavigateByTarget?SafeCode={safecode}"
    print("Step 1) Navigate by SafeCode:", navigate_url)
    nav_html = get_with_robot7(session, navigate_url)
    print_cookiejar(session, domain_filter="kb189.cc")
    if not nav_html:
        nav_html = """
        <html><body>
          <a id="btnMember" href="/Navigation/Speed?token=FAKE_TOKEN&isbackend=0&ismobile=0">会员登入</a>
        </body></html>
        """
        print("  (fallback) using minimal nav html")

    member_url = parse_member_href(nav_html, navigate_url)
    if not member_url:
        raise SystemExit("Could not find the 会员登入 link in NavigateByTarget page.")
    print("  Member URL:", member_url)

    print("Step 2) Open line test page ...")
    page1_html = get_with_robot7(session, member_url)
    if not page1_html:
        page1_html = Path(LOCAL_PAGE1).read_text(encoding="utf-8")
        print("  (fallback) using local page1.html")

    line1 = simulate_click_line1(page1_html)
    print("  Line1 login URL:", line1.url)

    print("Step 3) Open login page ...")
    login_html = get_with_robot7(session, line1.url)
    from urllib.parse import urlparse
    line_host = urlparse(line1.base).hostname or ""
    line_root_domain = "." + ".".join(line_host.split(".")[-2:]) if "." in line_host else line_host
    print_cookiejar(session, domain_filter=line_root_domain.replace("www.", ""))

    if not login_html:
        login_html = Path(LOCAL_LOGINPAGE).read_text(encoding="utf-8")
        print("  (fallback) using local loginpage.html")

    print("Step 4) Encrypt & POST login ...")
    qs = parse_qs(urlparse(line1.url).query)
    token = (qs.get("token") or [""])[0]
    post_url = urljoin(line1.base, "/Member/DoLogin")

    payload = {
        "Account": rsa_encrypt_real(username),
        "Password": rsa_encrypt_real(password),
        "Token": token,
        "Captcha": "",
    }

    print("  Cookie header (login POST):", cookie_header_for(session, "POST", post_url, data=payload))

    try:
        resp = send_with_cookie_print(session, "POST", post_url, data=payload, timeout=12)
        print("  Server status:", resp.status_code)
        body = resp.text
        print("  Response (first 800 chars):")
        print(body[:800])
    except Exception as e:
        print("  (error) POST failed:", e)
        return

    success = False
    try:
        j = resp.json()
        success = (str(j.get("Status")) == "1")
    except Exception:
        m = re.search(r'"Status"\s*:\s*1\b', body)
        success = bool(m)

    if success:
        print("Login success. Fetching draw number ...")
        now_ms = int(time.time() * 1000)
        #https://f1.aa9814312.xyz/All/Agreement.html
        draw_url = urljoin(line1.base, f"/Period/GetDrawNumber?_={now_ms}")
        print("  Cookie header (draw GET):", cookie_header_for(session, "GET", draw_url))

        try:
            draw_resp = send_with_cookie_print(session, "GET", draw_url, timeout=12)
            print("  Draw GET status:", draw_resp.status_code)
            print("  DrawNumber response:")
            print(draw_resp.text)
        except Exception as e:
            print("  (error) Fetch draw number failed:", e)
    else:
        print("Login not successful (Status != 1).")

    print("\nDone.")

if __name__ == "__main__":
    main()
