#!/usr/bin/env python
import spade, urllib2, time, sys, sqlite3, re, random, json
from bs4 import BeautifulSoup, Comment
from collections import deque

class locationCrawler(spade.Agent.Agent):
	__urls__ = []
	class informWhitepages(spade.Behaviour.OneShotBehaviour):	# inform service directory
		def _process(self):
			sd = spade.DF.ServiceDescription()
			sd.setName("location")
			sd.setType("crawler")
			dad = spade.DF.DfAgentDescription()
			dad.addService(sd)
			dad.setAID(self.myAgent.getAID())
			res = self.myAgent.registerService(dad)
			print "Started location crawler"
	class check_for_empty_list(spade.Behaviour.PeriodicBehaviour):	# if no more urls in list, ask for more
		def _onTick(self):
			if not (len(self.myAgent.__urls__) > 0):
				dad = spade.DF.DfAgentDescription()
				ds = spade.DF.ServiceDescription()
				ds.setType("provider")
				ds.setName("location")
				dad.addService(ds)
				agents = self.myAgent.searchService(dad)
				if len(agents) > 0:
					request = spade.ACLMessage.ACLMessage()
					request.setPerformative("request")
					request.setOntology("location")
					request.addReceiver(random.choice(agents).getAID())
					self.myAgent.send(request)
	class parseMainURL(spade.Behaviour.EventBehaviour):		# receive a request to crawl a location
		def _process(self):
			_hotels_ = []
			msg = self._receive(True)
			self.myAgent.coordinator = msg.getSender()
			basic_url = msg.getContent()
			print "Got location: " + basic_url
			print "Visiting page: " + basic_url
			soop = BeautifulSoup(urllib2.urlopen("http://www.tripadvisor.com"+basic_url).read(),'html.parser')
			for hotel in soop.find("div",{"id":"ACCOM_OVERVIEW"}).findAll('div', id=re.compile('^hotel_')):
				_hotels_.append(hotel.a['href'])
			print str(len(_hotels_)) + " Hotels found"
			reply = spade.ACLMessage.ACLMessage()
			reply.setOntology("findings")
			reply.addReceiver(self.myAgent.getAID())
			reply.setContent(json.dumps(_hotels_))
			self.myAgent.send(reply)
			try:
				for i in xrange(30,int("".join(soop.find('span',{'class':'tab_count'}).text.strip('()').split(','))),30):
					self.myAgent.__urls__.append("-".join(basic_url.split('-')[:2]) + "-oa" + str(i) + "-" + "-".join(basic_url.split('-')[2:]))
			except:
				print "failed in extra pages"
				print ("".join(soop.find('span',{'class':'tab_count'}).text.strip('()').split('.')))

	class parseUrlList(spade.Behaviour.PeriodicBehaviour):
		def onStart(self):
			self.reply = spade.ACLMessage.ACLMessage()
			self.reply.setOntology("findings")
			self.reply.addReceiver(self.myAgent.getAID())
		def _onTick(self):					# get extra pages of hotels in location to parse
			if len(self.myAgent.__urls__) > 0:
				url = self.myAgent.__urls__.pop().strip()
				_hotels_ = []
				print "Visiting page: " + url
				soop = BeautifulSoup(urllib2.urlopen("http://www.tripadvisor.com"+url).read(),'html.parser')
				for hotel in soop.find("div",{"id":"ACCOM_OVERVIEW"}).findAll('div', id=re.compile('^hotel_')):
					_hotels_.append(hotel.a['href'])
				print str(len(_hotels_)) + " Hotels found"
				self.reply.setContent(json.dumps(_hotels_))
				self.myAgent.send(self.reply)

	class forwardFindings(spade.Behaviour.EventBehaviour):
		def _process(self):
			incoming = self._receive(True)
			outgoing = spade.ACLMessage.ACLMessage()
			outgoing.setOntology("location")
			outgoing.setPerformative("inform")
			outgoing.addReceiver(self.myAgent.coordinator)
			outgoing.setContent(incoming.getContent())
			self.myAgent.send(outgoing)
			print "Reply sent."
	def _setup(self):
		forward_template = spade.Behaviour.ACLTemplate()
		request_template = spade.Behaviour.ACLTemplate()
		forward_template.setOntology("findings")
		forward_template.setSender(self.getAID())
		request_template.setOntology("location")
		request_template.setPerformative("request")
		self.addBehaviour(self.forwardFindings(),spade.Behaviour.MessageTemplate(forward_template))
		self.addBehaviour(self.parseUrlList(2),None)
		self.addBehaviour(self.parseMainURL(),spade.Behaviour.MessageTemplate(request_template))
		self.addBehaviour(self.informWhitepages(),None)
		self.addBehaviour(self.check_for_empty_list(60),None)

if __name__ == "__main__":
   a = locationCrawler("locationcrawrler@127.0.0.1","secret")
   a.start()
   Alive = True
   while Alive:
      try: time.sleep(1)
      except KeyboardInterrupt: Alive = False
   a.stop()
   sys.exit(0)


