import asyncio
import os
import shutil

from utils.httpClient import HttpClient
from bs4 import BeautifulSoup as bs

hc = HttpClient()

base_url = "https://pal7.cubejoy.com"
index = "/videoImg.html"

BASE_IMAGE_PATH = "images/仙剑奇侠传7"


async def save_image(url,index):
    image_url = f"{base_url}/{url}"
    print(image_url)
    image = await hc.get(image_url, parse_json=False)
    with open(f"{BASE_IMAGE_PATH}/{index}.jpg", 'wb') as f:
        f.write(image)


async def main():
    if not os.path.exists(BASE_IMAGE_PATH):
        os.makedirs(BASE_IMAGE_PATH)

    url = f"{base_url}{index}"
    data = await hc.get(url, parse_json=False)
    soup = bs(data, features="html.parser")
    pic_divs = soup.find_all("img", class_="VImgLmPicture")
    for i, img in enumerate(pic_divs):
        await save_image(img["v-src"], i)
    await hc.close()

if __name__ == '__main__':
    loop = asyncio.get_event_loop()
    loop.run_until_complete(main())
