from Registry import Registry
from Common import Common
from SpiderCommon import SpiderCommon
from WSThread import WSThread


import time, queue, requests, sys, re, urllib, time

class SpiderThread(WSThread):
	_external_hosts = []

	def __init__(self, job, host):
		super().__init__(None, None)
		self.db = Registry.get('mongo')
		self.job = job
		self.host = host

	def run(self):
		while True:
			try:
				links = self.job.get_many()
				self.scan_links(links)
			except queue.Empty:
				break
		self.running = False

	def _checked(self, link):
		del link['_id']
		self.db.spider_urls.update({'hash': link['hash']}, {'$set': link})

	def scan_links(self, links):
		s = requests.Session()
		for link in links:
			url = SpiderCommon.gen_url(link, self.host)

			start_time = int(round(time.time() * 1000))
			response = s.get(url)
			print(url)
			sys.stdout.flush()
			result_time = int(round(time.time() * 1000)) - start_time

			self._put_html_in_map(link['hash'], response.text)

			new_links = SpiderCommon.parse_links(str(response.content), '')
			SpiderCommon.insert_links(new_links, url, self.host)

			link['size'] = len(response.content)
			link['referer'] = url
			link['code'] = response.status_code
			link['time'] = result_time

		SpiderCommon.links_checked(links, self.host)

	def _put_html_in_map(self, hash, html):
		fh = open(Registry.get('data_path') + '/' + Registry.get('map') + '/' + hash, 'w')
		fh.write(html)
		fh.close()

