import socket
import urllib.request as urllib
import os
import os.path as path
from bs4 import BeautifulSoup
import json
import queue
import aiohttp as http
import asyncio
import threading

socket.setdefaulttimeout(60)
imageQueue = queue.Queue()
down = True
savePath = "C:/tmp/images4"


async def load_images(page_num, keyword):
    url = "https://cn.bing.com/images/async?q={0}&first={1}&count=35&relp=35&scenario=ImageBasic&datsrc=N_I&layout=RowBased&mmasync=1&dgState=x*641_y*1600_h*191_c*3_i*36_r*9&IG=01A32A66D4134C7487728195DC27000E&SFX=2&iid=images.6018"
    agent = {
        'User-Agent': "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.165063 Safari/537.36 AppEngine-Google."}
    for i in range(0, page_num):
        print("download page: %s" % (i + 1))
        try:
            url2 = url.format(keyword, i * 35 + 1)
            # print(url2)
            async with http.ClientSession() as session:
                async with session.get(url2, headers=agent) as resp:
                    content = await resp.read()
                    soup = BeautifulSoup(content, 'html.parser')
                    a_list = soup.select('.iusc')
                    for a in a_list:
                        attr = json.loads(a.attrs['m'])
                        if "murl" in attr:
                            imageQueue.put(attr.get("murl"))
        except Exception as e:
            print("Error: ", e)


def down_image():
    global down, imageQueue, savePath
    if not path.exists(savePath):
        os.mkdir(savePath)
    while down:
        if not imageQueue.empty():
            print("waiting for download: %s" % imageQueue.qsize())
            img = imageQueue.get()
            name = path.split(img)[1]
            i = name.find("?")
            if i > -1:
                name = name[0:i]
            fullname = path.join(savePath, name)
            if path.exists(fullname):
                continue
            print("begin download photo: %s, %s" % (img, fullname))
            try:
                urllib.urlretrieve(img, fullname)
                print("finish download photo: %s" % name)
            except socket.timeout as e:
                print("error download photo: %s" % name)
                print(e)
            except Exception as e:
                print("error download photo: %s" % name)
                print(e)


if __name__ == '__main__':
    # 开启下载后台线程，必须在启动服务之前开启
    threading.Thread(target=down_image, name="down_image_thread").start()
    page = 1000
    # 输入需要搜索的关键字
    word = 'forklift'
    loop = asyncio.get_event_loop()
    loop.run_until_complete(load_images(page, word))
