# -*- coding: utf-8 -*-
# @Time : 2019/7/1 15:02
# @Author : 李天超
# @Site : 多线程启动爬虫任务
# @File : multithread_startup_task.py
# @Software: PyCharm
from multiprocessing import Pool
# import run
import Task
from Spider.BaiduSpider import Baidu


def start_up_process(word):
    print('启动任务：', word)
    # 开始爬取网页
    baiduspider = Baidu().start_baidu_search_unique_code(word=word)
    # data = baiduspider.start_baidu_search_unique_code(word=word)
    # run.spider(page=word)
    # 储存爬取的数据
    # save_info.save_Gov(data)


def start():
    all_task = Task.get_all_task()
    p = Pool(16)
    print(all_task)

    for word in all_task:
        word = word.split(' ')[-1]
        a = p.apply_async(start_up_process, args=(word,))
        print('生成任务：' + word)

    # 关闭进程池,不在接收新的任务
    p.close()
    # 等待所有的进程都走完了
    p.join()

    print("结束爬取")


if __name__ == '__main__':
    start()

# if __name__ == '__main__':
#
#     page = 5
#
#     p = Pool(4)
#
#     for i in range(1, page + 1):
#         a = p.apply_async(start_up_process, args=(i,))
#         print('启动任务：' + i)
#
#     # 关闭进程池,不在接收新的任务
#     p.close()
#     # 等待所有的进程都走完了
#     p.join()
#
#     print("结束爬取")
