import urllib.request
from bs4 import BeautifulSoup
import requests
from queue import Queue
import threading
import time


num = 1
def get():
    global num
    user_agent_list = [
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
            "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
            "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
            "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
            "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
            "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
            "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
    ]
    UA = random.choice(user_agent_list)  # 随机取出字符串
    headers = {'User-Agent': UA}  # 构造成一个随机完整的User-Agent
    
    url = "http://www.budejie.com/video/"+str(num)
    html = requests.get(url, headers=headers).text

    soup = BeautifulSoup(html, 'lxml')
    url_contents = soup.find_all('div', attrs={'class': 'j-r-list-c'})
    # print(url_contents)
    for item in url_contents:
        title = item.find(
            'div', attrs={'class': 'j-r-list-c-desc'}).find('a').get_text()
        video = item.find('div', attrs={'class': 'j-video'}).attrs['data-mp4']
        url_list.put([title, video])  # name和 video连接放入队列中

    num+=1
    return url_list

def geteveryurl():      #获取所有页面url
    for i in range(50):
        time.sleep(3)
        get()


def write():
    item = url_list.get()
    urllib.request.urlretrieve(item[1], './video/'+item[0]+'.mp4')  # 下载视频 存放目录

def start():
    process=[]
    while not url_list.empty():        #如果队列数据不为空时
        for i in range(12):          #开12个下载线程
            t = threading.Thread(target=write)
            t.start()  # 创建线程并启动
            process.append(t)
        for p in process:
            p.join()


if __name__ == "__main__":
    url_list = Queue()
    geteveryurl()
    start()
    