from Registry import Registry
from Common import Common
from WSJob import WSJob
import queue, time, sys

class SpiderJob(WSJob): #todo debug output через опцию
	def __init__(self, maxsize = 0):
		super().__init__(maxsize)
		self.db = Registry.get('mongo')

	def _fill_queue(self):
		links = self.db.spider_urls.find({'checked': 0, 'getted': 0}).limit(50)
		links = Common.mongo_result_to_list(links)

		if len(links):
			for link in links:
				link['getted'] = 1
				self.db.spider_urls.update({'hash': link['hash']}, {'$set': {'getted': 1}})

			print("extracted " + str(len(links)))

			for link in links:
				self.put(link)

	def get(self, block = True, timeout = None):
		if(self.empty()):
			self._fill_queue()

		if(self.empty()):
			raise queue.Empty


		return super().get(block, timeout)

	def get_many(self):
		result = []
		try:
			while len(result) < int(Registry.get('config')['spider']['links_one_time_in_work']):
				result.append(self.get())
		except queue.Empty:
			if not len(result):
				raise queue.Empty

		return result

	def have_work_links(self):
		return bool(self.db.spider_urls.find({'checked': 0}).count())

