#!/usr/bin/env python
import spade, urllib2, time, sys, sqlite3, re, random
from bs4 import BeautifulSoup, Comment
from collections import deque

class hotelCrawler(spade.Agent.Agent):
	class informWhitepages(spade.Behaviour.OneShotBehaviour):
		def _process(self):					# inform service directory
			sd = spade.DF.ServiceDescription()
			sd.setName("hotel")
			sd.setType("crawler")
			dad = spade.DF.DfAgentDescription()
			dad.addService(sd)
			dad.setAID(self.myAgent.getAID())
			res = self.myAgent.registerService(dad)
			print "Started hotel crawler"

	class askForURL(spade.Behaviour.PeriodicBehaviour):	# if no more urls in list, ask for more
		def _onTick(self):
			dad = spade.DF.DfAgentDescription()
			ds = spade.DF.ServiceDescription()
			ds.setType("provider")
			ds.setName("hotelurl")
			dad.addService(ds)
			self.agents = self.myAgent.searchService(dad)
			if len(self.agents) > 0:
				request = spade.ACLMessage.ACLMessage()
				request.setPerformative("request")
				request.setOntology("hotel")
				request.addReceiver(random.choice(self.agents).getAID())
				self.myAgent.send(request)
				print "Asked For hotelurl"
			else: print "no known agents"
	class parseMainURL(spade.Behaviour.EventBehaviour):
		def _process(self):					# receive a request to crawl a hotel
			_reviews_ = []
			msg = self._receive(True)
			self.myAgent.coordinator = msg.getSender()
			basic_url = msg.getContent().strip()
			print "Visiting Motel: " + basic_url
			soop = BeautifulSoup(urllib2.urlopen("http://www.tripadvisor.com" + basic_url).read(),'html.parser')
			for review in soop.find("div",{"id":"REVIEWS"}).findAll('div', id=re.compile('^review_')):
				_reviews_.append(review.findAll('a')[1]['href'])
			print str(len(_reviews_)) + " Reviews found"
			reply = spade.ACLMessage.ACLMessage()
			reply.setOntology("findings")
			reply.addReceiver(self.myAgent.getAID())
			reply.setContent(_reviews_)
			self.myAgent.send(reply)
			limit = int(soop.find('label',{'for': 'sortOrder'}).text.split()[0])
			print "Total reviews: " + str(limit)
			for i in xrange(10,limit,10):
				url = ("-".join(basic_url.split('-')[:4]) + "-or" + str(i) + "-" + "-".join(basic_url.split('-')[4:]))
				_reviews_ = []
				print "Visiting Hotel: " + url
				soop = BeautifulSoup(urllib2.urlopen("http://www.tripadvisor.com"+url).read(),'html.parser')
				for review in soop.find("div",{"id":"REVIEWS"}).findAll('div', id=re.compile('^review_')):
					_reviews_.append(review.findAll('a')[1]['href'])
				print str(len(_reviews_)) + " Reviews found"
				reply = spade.ACLMessage.ACLMessage()
				reply.setOntology("findings")
				reply.addReceiver(self.myAgent.getAID())
				reply.setContent(_reviews_)
				self.myAgent.send(reply)
	class forwardFindings(spade.Behaviour.EventBehaviour):
		def _process(self):
			incoming = self._receive(True)
			outgoing = spade.ACLMessage.ACLMessage()
			outgoing.setOntology("hotel")
			outgoing.setPerformative("inform")
			outgoing.addReceiver(self.myAgent.coordinator)
			outgoing.setContent(incoming.getContent())
			self.myAgent.send(outgoing)
			print "Reply sent."
	def _setup(self):
		forward_template = spade.Behaviour.ACLTemplate()
		request_template = spade.Behaviour.ACLTemplate()
		forward_template.setOntology("findings")
		forward_template.setSender(self.getAID())
		request_template.setOntology("hotel")
		request_template.setPerformative("request")
		self.addBehaviour(self.forwardFindings(),spade.Behaviour.MessageTemplate(forward_template))
		self.addBehaviour(self.parseMainURL(),spade.Behaviour.MessageTemplate(request_template))
		self.addBehaviour(self.informWhitepages(),None)
		self.addBehaviour(self.askForURL(45),None)

if __name__ == "__main__":
   a = hotelCrawler("hc1@127.0.0.1","secret")
   a.start()
   Alive = True
   while Alive:
      try: time.sleep(1)
      except KeyboardInterrupt: Alive = False
   a.stop()
   sys.exit(0)


