# Description: insulation_crawler.py implements a crawler that be insulted.
# Colaborators: Saulo Ricci
# Date: 2010/08/23
import commands
from insulation.insulation import insulation
from settings import *
import sys

class insulation_crawler(insulation):
	def __init__(self, input_set, output_set):
		insulation.__init__(self, input_set, output_set)

	def execute(self):
		# First empty the files and directories to do crawling job
		self.check_empties_structures()

		# Now is ready to executes the crawling
		self.safety_crawling()	

	def check_empties_structures(self):
		# First check the directories
		# Check queue directory
		if commands.getoutput("ls " + self.input_set['queue_dir']) != "":
			result = commands.getoutput("rm -rf " + self.input_set['queue_dir'] + "/*")
		
		# Check auxiliary directory
		if commands.getoutput("ls " + self.input_set['auxiliary_dir']) != "":
			result = commands.getoutput("rm -rf " + self.input_set['auxiliary_dir'] + "/*")

		# Second check the files
		# Check log file
		if commands.getoutput("cat " + self.input_set['log_filename']) != "":
			result = commands.getoutput("> " + self.input_set['log_filename'])

		# Check log output file
		if commands.getoutput("cat " + self.input_set['log_output_filename']) != "":
			result = commands.getoutput("> " + self.input_set['log_output_filename'])

	def safety_crawling(self):
		print "Gonna crawl!"
		self.check_seeds(self.input_set['seeds_filename'])
		list_seeds = []
		with open(self.input_set['seeds_filename'], 'r') as file:
			for line in file:
				list_seeds.append(line.strip())
		
		# Crawling
		while True:
			# Verify
			crawled_objects = self.get_list_crawled()
			aux_list_seeds = []
			for seed in list_seeds:
				if seed.replace("Ar:", "").strip() not in crawled_objects:
					aux_list_seeds.append(seed.strip())
				else:
					result = commands.getoutput("mv " + self.input_set['auxiliary_dir'] + '/ar/' + seed.replace("Ar:", "").strip() + " " + self.input_set["output_dir"] + "/")
					result = commands.getoutput("rename.ul " + seed.replace("Ar:", "").strip() + " lastfm.ar." + seed.replace("Ar:", "").strip() + " " + self.input_set['output_dir'] + "/" + seed.replace("Ar:", "").strip())
					self.output_set['output_files_list'].append(self.input_set['output_dir'] + "/" + "lastfm.ar." + seed.replace("Ar:", "").strip())

			# All objects have gotten
			if aux_list_seeds == []:
				break
	
			result = commands.getoutput("> " + self.input_set['seeds_filename'])
			list_seeds = aux_list_seeds
			with open(self.input_set['seeds_filename'], 'w') as file:
				for entry in list_seeds:
					file.write(entry.strip() + '\n')

			# Empties all structures again
			self.check_empties_structures()
			print "Try to craw"
			result = commands.getoutput(self.input_set['crawler_script'] + " -c " + self.input_set['crawler_type']+ " -e " + self.input_set['seeds_filename'] + " -l " + self.input_set['log_filename'] + " -s " + self.input_set['sleep_time'] + " -t " + self.input_set['number_threads'] + " -w " + self.input_set['queue_dir'] + " -o " + self.input_set['auxiliary_dir'] + " > " + self.input_set['log_output_filename'])
			print "Crawled more one!"
			print result
			print self.input_set['crawler_script'] + " -c " + self.input_set['crawler_type']+ " -e " + self.input_set['seeds_filename'] + " -l " + self.input_set['log_filename'] + " -s " + self.input_set['sleep_time'] + " -t " + self.input_set['number_threads'] + " -w " + self.input_set['queue_dir'] + " -o " + self.input_set['auxiliary_dir'] + " > " + self.input_set['log_output_filename']
			sys.exit(0)

	def check_seeds(self, seeds_filename):
		seeds_crawled = commands.getoutput("ls " + self.input_set['output_dir'])
		if seeds_crawled != "":
			seeds_crawled_list = seeds_crawled.split('\n')
			seeds_requested = commands.getoutput("cat " + seeds_filename)
			seeds_requested = seeds_requested.replace("Ar:", "lastfm.ar.")
			seeds_requested_list = seeds_requested.split('\n')
			seeds_gonna_crawl_list = []
			for seed in seeds_requested_list:
				if seed not in seeds_crawled_list:
					seeds_gonna_crawl_list.append(seed)
				# Seeds are crawled, so append in output_set to mapper transform the crawled seed
				else:
					self.output_set['output_files_list'].append(self.input_set['output_dir'] + "/" + seed.strip())
			# Update seeds
			with open(seeds_filename, 'w') as seeds_file:
				for updated_seed in seeds_gonna_crawl_list:
					seeds_file.write(updated_seed.replace("lastfm.ar.", "Ar:") + '\n')	

	def get_list_crawled(self):
		result = commands.getoutput("ls " + self.input_set['auxiliary_dir'])
		if result != "":
			result = commands.getoutput("ls " + self.input_set['auxiliary_dir'] + "/ar")
			if result != "":
				return result.split('\n')
		
		return []
