# -*- coding: cp1254 -*-
import pageOps
import Queue
import db
import threading

class Crawler(threading.Thread):
	ns = 0
	tocrawl = Queue.Queue()
	dbLock = threading.Lock()

	@staticmethod
	def callback(p):
		global ns
		if p.done:
			Crawler.dbLock.acquire()
			DBHelper = db.DBOps()
			DBHelper.connDB()
			DBHelper.insertSite(p.url)
			DBHelper.closeDB()
			Crawler.dbLock.release()
			
			Crawler.ns += 1
			f = open('dbadded.log', 'a+')
			print >>f, p.url + " added to db"
			f.close()
			
			for link in p.links:
				Crawler.tocrawl.put(link)

	@staticmethod
	def error(e, url):
		f = open('errors.log', 'a+')
		print >>f, url + " -", e
		f.close()

	def crawlPage(self, url):
		p = pageOps.pageOps(url, Crawler.callback, Crawler.error)
		self.nc += 1

	def run(self):
		DBHelper = db.DBOps()
		DBHelper.connDB()
		Crawler.tocrawl.put(self.seed)
		while not self._stop:
			try:
				page = self.tocrawl.get(True, 10)
				self.lastPage = page
				if page:
					Crawler.dbLock.acquire()
					if not DBHelper.isSiteExists(page):
						self.crawlPage(page)
					Crawler.dbLock.release()
			except threading.ThreadError as terr:
				f = open('errors.log', 'a+')
				print >>f, "{0}, waiting..({1})".format(terr, threading.activeCount())
				f.close()
				Crawler.tocrawl.put(page)
				threading._sleep(5)
			except Queue.Empty:
				f = open('errors.log', 'a+')
				print >>f, "No site left to crawl!"
				f.close()
				break
		DBHelper.closeDB()
		self._stop = True

	def getStatus(self):
		global ns
		if self._stop:
			return "Crawler Done!"
		return "Threads:{0} Success:{1}/{2} Crawling:{3}".format(threading.activeCount(), Crawler.ns, self.nc, self.lastPage)
	
	def stop(self):
		self._stop = True

	def done(self):
		return self._stop

	def __init__(self, seed):
		threading.Thread.__init__(self)
		self.nc = 0
		self.lastPage = ""
		self._stop = False
		self.seed = seed
		Crawler.ns = 0
		Crawler.lock = threading.Lock()

		f = open('errors.log', 'w')
		f.close()
		f = open('dbadded.log', 'w')
		f.close()

