import requests
import time
import random
import urllib
import urllib3
from bs4 import BeautifulSoup

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

heads = {'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36'}

url = "https://uland.taobao.com/sem/tbsearch?refpid=mm_26632258_3504122_32538762&keyword=%E6%96%87%E8%83%B8&clk1=56ccaa3f6e7121010090ecc328440dda&upsId=56ccaa3f6e7121010090ecc328440dda&spm=a2e0b.20350158.search.1&pid=mm_26632258_3504122_32538762&union_lens=recoveryid%3A201_33.60.152.21_27475474_1648477655992%3Bprepvid%3A201_33.60.152.21_27475474_1648477655992"

url = "https://search.smzdm.com/?c=home&s=%E6%96%87%E8%83%B8&v=b"

suburl = []

def getimg(url):
    try:
        res = requests.get(url,timeout=30,headers=heads,verify=False)
        res.encoding = 'UTF-8'
        content = res.text
        #print(content)

        soup = BeautifulSoup(content,'html.parser')

        title = soup.find('title')
        h1 = soup.find('h1')
        target = title.text
        if len(target) == 0:
            target = h1.text

        result = "["+target+"]("+url+")\n"
        print("[\033[32m%s\033[0m](\033[34m%s\033[0m)" % (target,url) )
        #time.sleep(random.random()*3)
        #time.sleep(0.1)
    except:
        print("[\033[31m############   Something Error  #############\033[0m](\033[31m%s\033[0m)"% (url))
        result = ''

    des.write(result)
    return result

def getsubimg(url):
    tmp_suburl = []
    # get img's sub url
    res = requests.get(url,timeout=30,headers=heads,verify=False)
    res.encoding = 'UTF-8'
    content = res.text
    # print(content)
    soup = BeautifulSoup(content,'html.parser')

    # split sub url with div "ul class = pc-search-items-list"
    realhtml = soup.find('ul', id="feed-main-list").find_all('img')
    # print(str(realhtml[0].get('src')))
    num = len(realhtml)
    for i in range(num):
        tmp = "https:" + realhtml[i].get('src')
        #print(tmp)
        #print(len(tmp))
        tmp_suburl.append(tmp)

    return tmp_suburl


if __name__ == '__main__':
    suburl = getsubimg(url)
    for i in range(len(suburl)):
        # print(type(suburl[i]))
        # print(suburl[i])
        if len(suburl[i]) < 9:
            continue
        urllib.request.urlretrieve(suburl[i], filename=str(i) + ".img")

    print("get img ok!")
