# coding=utf-8
from threading import Thread
import os
import requests
from bs4 import BeautifulSoup
from queue import Queue

# 多线程类
class Download_Images(Thread):
    # 重写构造函数
    def __init__(self, threadName, queue, path):
        super().__init__()
        # 类属性
        self.queue = queue
        self.path = path
        self.threadName = threadName
        if not os.path.exists(self.path):
            os.mkdir(path)
    def run(self) -> None:
        while True:
            # 图片资源的url链接地址
            url = self.queue.get()
            try:
                download_images(self.threadName, url, self.path)
            except:
                print('下载失败')
            finally:
                # 当爬虫程序执行完成/出错中断之后发送消息给线程 代表线程必须停止执行
                self.queue.task_done()


def download_images(threadName, url, path):
    headers = {
        'User-Agent':
            'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36'
    }
    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.text, 'lxml')
    img_list = soup.find_all('img', class_='ui image lazy')
    for img in img_list:
        image_title = img['title']
        image_url = img['data-original']
        try:
            with open(path + image_title + os.path.splitext(image_url)[-1], 'wb') as f:
                image = requests.get(image_url, headers=headers).content
                print(threadName, '正在保存图片:', image_title)
                f.write(image)
                print(threadName, '保存成功:', image_title)
        except:
            pass

if __name__ == '__main__':
    _url = 'https://fabiaoqing.com/biaoqing/lists/page/{page}.html'
    urls = [_url.format(page=page) for page in range(10)]
    queue = Queue()
    print(queue.qsize())
    path = './threading_images/'
    for x in range(10):
        worker = Download_Images("线程%d" % x, queue, path)
        worker.daemon = True
        worker.start()
    for url in urls:
        queue.put(url)
    queue.join()
    print('下载完成...')