import threading
from DataAcquisition import download_repos
import json
from multiprocessing import Pool
import os

def one_process(url_and_name, start, end):

	# 开启5个线程下载
	indexs = list(range(start, end, 8000))
	if indexs[-1] != end:
		indexs.append(end)
	for i in range(len(indexs) - 1):
		t = threading.Thread(target=download_repos,args=(url_and_name, indexs[i], indexs[i+1]),name=str(i))
		t.start()

if __name__=='__main__':
	print('Parent process %s.' % os.getpid())
	p = Pool(8)
	with open(os.path.join('RepoJson', 'all_repo.json'), 'r') as fr:
		url_and_name = json.load(fr)
		indexs = [0, 40000, 80000, 120000, 160000, 200000, 240000, 280000, 320000, 360000, len(url_and_name)]
		for i in range(10):
			p.apply_async(one_process, args=(url_and_name, indexs[i], indexs[i+1]))
		print('Waiting for all subprocesses done...')
		p.close()
		p.join()
		print('All subprocesses done.')
