from blog_spider import urls, crawl
import threading
import time

def single_thread():
    print('single thread start')
    for url in urls:
        crawl(url)
    print('single thread end')

def multi_thread():
    print('multi thread start')
    threads = []
    for url in urls:
        threads.append(threading.Thread(target=crawl, args=(url,)))
    for thread in threads:
        thread.start()
    for thread in threads:
        thread.join()
    print('multi thread end')

if __name__ == '__main__':
    start = time.time()
    single_thread()
    end = time.time()
    print(f'single thread cost: {end - start}')

    start = time.time()
    multi_thread()
    end = time.time()
    print(f'multi thread cost: {end - start}')