# encoding=utf-8
import json
from requests import Session
from bs4 import BeautifulSoup
import re
from pathlib import Path
import translator

session = Session()
session.trust_env = False
session.adapters.DEFAULT_RETRIES=10
session.headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36 Edg/92.0.902.67"}

#官方地图
official_maps={
    "孤岛":"The_Island",
    "焦土":"Scorched_Earth",
    "畸变":"Aberration",
    "灭绝":"Extinction",
    "创世纪1":"Genesis:_Part_1",
    "创世纪2":"Genesis:_Part_2",
    "中心岛":"The_Center",
    "仙境":"Ragnarok",
    "瓦尔盖罗":"Valguero",
    "水晶岛":"Crystal_Isles",
    "迷失岛":"Lost_Island"
}

# 模组地图
modded_maps={
    "维京":"Fjordur",
    "卡巴勒斯":"Caballus",
    "火山岛":"模组Volcano",
    "希望岛":"模组Hope",
    "奥林匹斯":"模组Olympus",
    "通古斯":"模组Tunguska"
}

terminal_white_list = {"obelisk-red","obelisk-blue","obelisk-green","terminal","city-terminal","mission-terminal","rockwell-prime-terminal"}

# 品质映射表
qualities_map={
    # 孤岛及中心岛
    "c031525":"quality-wgb",
    "c15253545":"quality-gbpy",
    "c25354560":"quality-bpyr",
    # 焦土
    "c031530":"quality-wgb",
    "c03153045":"quality-wgbp",
    "c0315304555":"quality-wgbpy",
    "c304555":"quality-bpy",
    "c30455570":"quality-bpyr",
    # 畸变
    "c102535":"quality-wgb",
    "c10253550":"quality-wgbp",
    "c253550":"quality-gbp",
    "c355065":"quality-bpy",
    "c35506580":"quality-bpyr",
    "c506580":"quality-pyr",
    "d355065":"quality-bpy",
    "d50":"quality-p",
    "d506580":"quality-pyr",
    "d65":"quality-y",
    "d6580":"quality-yr",
    "s35506580":"quality-bpyr",
    # 灭绝
    "osd":"quality-bpyr",
    "element":"quality-byr",
    # 仙境
    "c0315304555":"quality-wgbpy",
    "c031525354560":"quality-wgbpyr",
    "c03152535456070":"quality-wgbpyr",
    "c456070":"quality-pr",
    # 瓦尔盖罗
    "c25354555":"quality-bpy",
    "c0315254570":"quality-wgbpr",
    "c1525354570":"quality-gbyr",
    "c0315253545":"quality-wgbpy",
    # 水晶岛
    "c0315253545505560657080":"quality-wgbpyr",
    # 迷失岛
    "c03152535":"quality-wgb",
    "c152535":"quality-gbp",
    "c153045":"",
    "c25":"",
    "c2535":"",
    "c253545":"",
    "c354560":"",
    "c45556070":""
}

# 每个点的id，用于前端页面实现单个点的显示和隐藏
dot_id = 0

def css2camel(text)->str:
    """css的中划线命名改为js的驼峰命名"""
    name=""
    i=0
    while i < len(text):
        if text[i] == "-":
            i+=1
            name+=text[i].upper()
        else:
            name+=text[i]
        i+=1
    return name

# region 爬虫部分

def get_dots(soup:BeautifulSoup,dot_type:str,has_quality=False,is_resource=False):
    """ 获取某一类资源的所有的点的数据 """
    global dot_id
    dots = []
    for div in soup.find_all("div",class_=dot_type):
        for dot in div.find_all("div"):
            rows=dot.get("title").split("\n")
            match=re.match("lat (-?\d{1,2}\.?\d{0,2}), lon (-?\d{1,2}\.?\d{0,2})", rows[1])
            data = {
                "id":dot_id,
                "caption":rows[0],
                "top":match.group(1),
                "left":match.group(2),
                "note":rows[2]
            }
            dot_id+=1
            # 如果是宝箱之类的东西会带有品质
            if has_quality:
                clazz=div.get("class")[1]
                if qualities_map.get(clazz) is None:
                    print("未知的品质:",clazz)
                else:
                    data["qualities"] = qualities_map[clazz]
            # 如果是战略资源要区分是否在洞内
            if is_resource:
                data["inCave"]=div.get("class")[1] == "cave"
            dots.append(data)
    return dots

def get_terminals(soup:BeautifulSoup):
    """所有方尖碑及终端的位置"""
    terminals=[]
    
    for tr in soup.select(".map-legend tr"):
        if tr.td.div is None:   # 如果没有div标签，则跳过
            continue
        terminal_type = tr.td.div.get("class")[1]      # 种类
        if terminal_type not in terminal_white_list:    # 如果不在白名单中，则跳过
            print(f"【终端】已忽略:{terminal_type}")
            continue
        terminal={
            "name":tr.find("label").get_text(), # 名称
            "class":terminal_type,
            "dots":get_dots(soup, terminal_type)
        }
        terminals.append(terminal)

    return terminals

def get_explorers(soup:BeautifulSoup):
    """获取和探险家相关的笔记、图鉴、补给箱等位置"""
    explorers = []
    type_with_quality = {"crate","crateSurface","crateCave","crateDungeon"} # 区分品质的种类
    for tr in soup.select(".map-legend tr"):
        if tr.td.div is None:   # 如果没有div标签，则跳过
            continue
        explorer_type = tr.td.div.get("class")[1]      # 种类
        if explorer_type in terminal_white_list:
            print(f"【探险家资源】已忽略:{explorer_type}")
            continue
        explorer={
            "name":tr.find("label").get_text(), # 名称
            "class":explorer_type,
            "dots":get_dots(soup, explorer_type,has_quality=explorer_type in type_with_quality)
        }
        explorers.append(explorer)

    return explorers

def get_resources(soup:BeautifulSoup):
    """所有资源点的位置"""
    resources = []
    # 可以忽略的种类
    black_list = {"obelisk-blue","obelisk-red","obelisk-green","2","3","metal-"}
    for tr in soup.select(".map-legend tr"):
        if tr.td.div is None:   # 如果没有div标签，则跳过
            continue
        resource_type = tr.td.div.get("class")[1]      # 种类
        if resource_type in black_list:
            print(f"【战略资源】已忽略:{resource_type}")
            continue
        resource={
            "name":tr.find("label").get_text(), # 名称
            "class":resource_type,
            "dots":get_dots(soup, resource_type,is_resource=True)
        }
        resources.append(resource)

    return resources

def scrap_one_official_map(map:str):
    """ 爬取单个官方地图的数据 """
    # 获取终端位置和探险家相关的资源
    response = session.get(f"https://ark.fandom.com/wiki/Explorer_Map_({map})")
    soup = BeautifulSoup(response.content,"lxml")
    terminals=get_terminals(soup)
    explorers=get_explorers(soup)
    # 战略资源
    response=session.get(f"https://ark.fandom.com/wiki/Resource_Map_({map})")
    soup = BeautifulSoup(response.content,"lxml")
    resources = get_resources(soup)

    # 文件路径中不能包括冒号，所以访问完网址后把冒号去掉，方便文件的保存
    map=map.replace(':', '')

    # 翻译
    translator.translate_map_data(terminals, map, is_resources=False,update_translations=True)
    translator.translate_map_data(explorers,map,is_resources=False,update_translations=True)
    translator.translate_map_data(resources,map,is_resources=True,update_translations=True)

    # 保存数据
    try:
        with open(f"data/maps/{map}.json","r",encoding="utf-8") as f:
            map_data=json.load(f)
    except FileNotFoundError:
        map_data={}
    map_data["terminals"] = terminals
    map_data["explorers"] = explorers
    map_data["resources"] = resources
    with open(f"data/maps/{map}.json","w",encoding="utf-8") as f:
        json.dump(map_data,f,ensure_ascii=False,indent=4)

def scrap_all_official_maps():
    """ 爬取所有官方地图的数据 """
    for key,value in official_maps.items():
        print(f"正在爬取{key}......")
        scrap_one_official_map(value)
        print(f"{key}爬取完毕...")

def scrap_one_modded_map(map:str)->None:
    # 获取终端位置和探险家相关的资源
    response = session.get(f"https://ark.fandom.com/wiki/Mod:{map}/Explorer_Map")
    soup = BeautifulSoup(response.content,"lxml")
    terminals=get_terminals(soup)
    explorers=get_explorers(soup)
    # 战略资源
    response=session.get(f"https://ark.fandom.com/wiki/Mod:{map}/Resource_Map")
    soup = BeautifulSoup(response.content,"lxml")
    resources = get_resources(soup)

    # 保存数据
    result = {
        "terminals":terminals,
        "explorers":explorers,
        "resources":resources
    }
    with open(f"data/maps/{map.replace(':', '')}.json","w",encoding="utf-8") as f:
        json.dump(result,f,ensure_ascii=False,indent=4)

def scrap_all_modded_map()->None:
    """爬取所有模组地图"""
    for map in modded_maps.values():
        scrap_one_modded_map(map)
# endregion

def update_map_translations(map:str):
    """ 更新某个地图数据的翻译 """
    map = map.replace(':', '')
    with open(f"data/maps/{map}.json","r",encoding="utf-8") as f:
        map_data = json.load(f)
    
    translator.translate_map_data(map_data["terminals"], map, is_resources=False)
    translator.translate_map_data(map_data["explorers"],map,is_resources=False)
    translator.translate_map_data(map_data["resources"],map,is_resources=True)
    
    with open(f"data/maps/{map.replace(':', '')}.json","w",encoding="utf-8") as f:
        json.dump(map_data,f,ensure_ascii=False,indent=4,sort_keys=True)

def update_all_map_translations():
    """ 更新所有地图数据的翻译 """
    for map in official_maps.values():
        update_map_translations(map)

# if __name__ == "__main__":
    # scrap_all_official_maps()
    # scrap_one_official_map(official_maps["孤岛"])
    # update_all_map_translations()