from requests import Session
from bs4 import BeautifulSoup
import downloader
from pathlib import Path
import json
import re
import translator
import utils
import urllib
import idmapper

session = Session()
session.trust_env = False
session.adapters.DEFAULT_RETRIES=10
session.headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36 Edg/92.0.902.67"}

def scrap_hexagon_exchange():
    resp = session.get("https://ark.fandom.com/wiki/HLN-A_(Genesis)")
    soup = BeautifulSoup(resp.content,"lxml")
    span = soup.find("span",id="Hexagon_Exchange")
    # 数据所在的表格
    table=span.parent.find_next_sibling("table",class_="wikitable")
    trs = table.find_all("tr",align="left")
    hexagon_exchanges = {}
    for tr in trs:
        tds = tr.select("td")
        title=tds[0].a.get("title")
        hexagon_exchange={}
        id=idmapper.get_id(title)
        hexagon_exchange["id"]=id
        hexagon_exchange["quantity"]=int(tds[1].get_text(strip=True))
        hexagon_exchange["hexagons"]=int(tds[2].get_text(strip=True))
        hexagon_exchanges[id]=hexagon_exchange
    utils.save_data(hexagon_exchanges, "data/hexagon-exchanges.json")