import encodings.utf_8
import json

from bs4 import BeautifulSoup
import requests


def fetch_table():
    parks = {}
    group = []

    read_cache()

    html = requests.get("http://www.jjjlyykt.com/nkjq/jingqumulu/2021/0909/434.html")
    soup = BeautifulSoup(html.content, "html.parser")
    trs = soup.find("table").find_all("tr")
    for tr in trs:
        td = tr.find_all("td")
        _len = len(td)
        if _len == 1:
            newGroupTitle = td[0].find("span").text
            newGroupTitle = str.replace(newGroupTitle, "\t", "")
            newGroupTitle = str.replace(newGroupTitle, "\r\n", "\t")
            newGroupTitle = str.removesuffix(newGroupTitle, "\t点击景区名称 查看景区接待详情")
            group = []
            parks[newGroupTitle] = group
        else:
            _id = td[0].find("span").text
            _name = td[1].find("span").text
            a = td[1].find("a")
            _url = a.attrs["href"] if a else ""
            if _len > 2:
                span = td[2].find("span")
                _price = span.text if span else ""
                if _len > 3:
                    span = td[3].find("span")
                    _discount = span.text if span else ""
                    if _len > 4:
                        _description = td[4].find("span").text
                    else:
                        _description = ""
                else:
                    _discount = ""
            else:
                _price = ""
            if len(_url) > 0:
                _address = fetch_address(_url)
            else:
                _address = ""

            group.append({"id": _id, "name": _name, "url": _url, "price": _price, "discount": _discount,
                          "description": _description, "address": _address})

    cache_address()
    # print(len(parks))
    # print(parks.keys())
    print(json.dumps(parks, ensure_ascii=False))


ads = {}


def read_cache():
    with open("address.json", "rt", encoding="utf-8") as address_cache:
        ads = json.loads(address_cache.read())
    pass


def cache_address():
    with open("address.json", "wt", encoding="utf-8") as address_cache:
        address_cache.write(json.dumps(ads))
    pass


# 地址获取过慢，可以缓存起来，有就用缓存的，没有就用最新的
def fetch_address(url):
    if ads[url]:
        return ads[url]
    else:
        html = requests.get(url)
        soup = BeautifulSoup(html.content, "html.parser")
        detail_div = soup.find_all("div", attrs={"class": "jq-details-item p-top20"})
        # print(detail_div)
        if detail_div and len(detail_div) > 0:
            child_divs = detail_div[0].contents
            for child in child_divs:
                if child.find("div") != -1:
                    span = child.find_all("span")
                    if span and span != -1 and len(span) > 1:
                        if str.__contains__(span[0].text, "地址"):
                            print(span[1].text)
                            ads[url] = span[1].text
                            return span[1].text


read_cache()
# fetch_table()
# fetch_address("http://zglynk.com/ITS/appModules/goAreaDetail.action?id=716")
# if len(td) == 1:
#     print(td[0].find("span").text)
#     print("************")
# else:
#     a = td[1].find("a")
#     if a:
#         url = td[1].find("a").attrs["href"]
#         print(url)
