from queue import  Queue
from bs4 import BeautifulSoup
from threading import  Thread
import requests
import os

class Page_Url(Thread):
    def __init__(self,page_queue,img_queue):
        super().__init__()
        self.page_queue = page_queue
        self.img_queue = img_queue

    def run(self):
        while True:
            if self.page_queue.empty():
                break
            url = self.page_queue.get()
            self.parse_url_page(url)

    def parse_url_page(self,url):
        res = requests.get(url).text
        soup = BeautifulSoup(res,'lxml')
        img_list = soup.find_all('img',class_="lazy")

        for img in img_list:
            title = img.get("alt")
            href = img.get("data-original")
            self.img_queue.put((href,title))


class Img_Url(Thread):
    def __init__(self,page_queue,img_queue):
        super().__init__()
        self.page_queue = page_queue
        self.img_queue = img_queue

        def run(self):
            while True:
                image = self.img_queue.get()

                href,title = image
                try:
                    with open("./img/"+title+os.path.splitext(href)[-1],'wb') as f:
                        resp = requests.get(href).content
                        f.write(resp)
                        print(title,'保存成功')
                except OSError:
                    print("图片资源不存在")
                    pass
                    if self.page_queue.empty() and self.img_queue.empty():
                        break

def main():
    page_queue = Queue()
    img_queue = Queue()
    for num in range(1):
        url = "https://sc.chinaz.com/tupian/index.html".format(num)
        page_queue.put(url)

        t1 = Page_Url(page_queue,img_queue)
        t1.start()
        t2 = Img_Url(page_queue,img_queue)
        t2.start()

if __name__ == '__main__':
    main()