from multiprocessing import Pool
from page_parsing import get_item_info_from,url_list,item_info,get_links_from
from channel_extracing import channel_list

db_urls = [item['url'] for item in url_list.find()]
index_urls = [item['url'] for item in item_info.find()]
x = set(db_urls)
y = set(index_urls)
rest_of_urls = x-y

def get_all_links_from(channel):
    for i in range(1,100):
        get_links_from(channel,i)


if __name__ == '__main__':
	#pool = Pool()
	#pool.map(get_all_links_from,channel_list.split())
	# map(get_item_info_from,db_urls)
	pool = Pool(processes=2)
	# pool = Pool()
	pool.map(get_links_from,get_all_links_from)
	pool.close()
	pool.join()


'''
'sha1:70ecb3481b0f:019a4e6f39e649c33c9701ae2d09f54d5be0087d'
'''

