import requests
from bs4 import BeautifulSoup
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
import time

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36",
    "Referer": "https://bing.ioliu.cn/ranking",
}

base_url = "https://bing.ioliu.cn"
session = requests.session()
url = base_url + "/ranking?p={}"

def save_all_urls_to_file():
    urls = []
    for i in range(1, 157):
        res = session.get(url.format(i), headers=headers)
        soup = BeautifulSoup(res.text, 'html.parser')
        tags = soup.find_all('a', attrs={'class': 'mark'})
        urls += [(base_url + x['href']+"\n") for x in tags]
    with open("./urls.txt", "w") as file:
        file.writelines(urls)


def write_to_file(path, content):
    with open(path, "wb") as file:
        file.write(content)


def get_all_urls():
    urls = []
    with open("./urls.txt", "r") as file:
        urls = file.readlines()
    return urls


def get_image_info_task(url):
    try:
        res = session.get(url, headers=headers)
        soup = BeautifulSoup(res.text, 'html.parser')
        title = soup.find_all('p', attrs={'class': 'title'})[0].text
        desc = soup.find_all('p', attrs={'class': 'sub'})[0].text
        loc = soup.find_all('p', attrs={'class': 'location'})[0].text
        bgc = soup.find_all('img', attrs={'class': 'target'})[0]["src"]
        return {
            "title": title,
            "desc": desc,
            "location": loc,
            "background": bgc
        }
    except Exception as e:
        print(e)
        return None


if __name__ == "__main__":
    urls = get_all_urls()
    result_list = []
    # 并发执行
    # with ThreadPoolExecutor(max_workers=10) as t:
    #     reqs = [t.submit(get_image_info_task, u) for u in urls]
    #     for req in as_completed(reqs):
    #         r = req.result()
    #         if r:
    #             result_list.append(r)
    # 串行
    for u in urls:
        time.sleep(2)
        r = get_image_info_task(u)
        if r:
            result_list.append(r)
    with open("./data.json", "w") as file:
        json.dump(result_list, file)
