import requests
import os
from bs4 import BeautifulSoup
# pip install fake_useragent
from fake_useragent import UserAgent

'''
练习：觅知网桌面壁纸图片资源爬虫
'''
class SpiderMiZhiDeskWallpaper():
    def __init__(self):
        self.url = 'https://www.51miz.com/so-sucai/199239.html'
        self.headers = {
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
            'Accept-Language': 'zh-CN,zh;q=0.9',
            'Cache-Control': 'max-age=0',
            'Connection': 'keep-alive',
            'Host': 'www.51miz.com',
            'Sec-Fetch-Dest': 'document',
            'Sec-Fetch-Mode': 'navigate',
            'Sec-Fetch-Site': 'none',
            'Sec-Fetch-User': '?1',
            'Upgrade-Insecure-Requests': '1',
            'User-Agent': UserAgent().chrome
        }

    def main(self):
        response = requests.get(url=self.url, headers=self.headers)
        bs = BeautifulSoup(response.content.decode(errors="ignore"), 'lxml')
        download_path = './images/case_04_test/'
        download_path_first = './images/'
        # 判断该文件夹是否存在
        if os.path.exists(download_path_first) is False:
            # 不存在则创建
            os.mkdir(download_path_first)
            os.mkdir(download_path)
        elif os.path.exists(download_path) is False:
            os.mkdir(download_path)
        for element_box in bs.select('.flex-images > .element-box'):
            image_src = element_box.select_one('.element-box > div > div > a > img')['data-original']
            image_url = "https:" + image_src.split("!")[0]
            print(image_url)
            image_name = image_url.split('/')[-1]
            self.image_download(image_url, image_name, download_path)

    def image_download(self, image_url, image_name, download_path):
        try:
            image_res = requests.get(image_url)
            if image_res.ok is True and image_res.status_code == 200:
                image_content = image_res.content
                with open(download_path + image_name.split("&")[0], 'wb') as file:
                    file.write(image_content)
                    file.close()
        except Exception:
            pass

if __name__ == '__main__':
    spider = SpiderMiZhiDeskWallpaper()
    print("开始爬取觅知网桌面壁纸图片资源")
    spider.main()
    print("爬取结束")
