# coding=utf-8
from scrapy import cmdline
from multiprocessing import Pool as Process_Pool
from time import sleep


def run_spider(number) :
    print("线程" + str(number))
    cmdline.execute("scrapy crawl spider".split())

if __name__ == '__main__':
    p_pool = Process_Pool()
    for i in range(5):
        p_pool.apply_async(run_spider, args=(i,))
        sleep(1)
    p_pool.close()
    p_pool.join()
    sleep(5)
