from crawler_base import *
import httplib
import urllib2
from xml.sax import ContentHandler, parseString, make_parser
import logging
import traceback

class HTTPDownloader(object):
    def __init__(self, host):
        self.conn = httplib.HTTPConnection(host, timeout=15)
        #self.conn.set_debuglevel(10)

    def getPage(self, url):
        self.conn.request("GET", url)
        res  = self.conn.getresponse()
        return res


class HTTPDownloaderUrllib(object):
    def __init__(self, host):
        self.host = 'http://'+host

    def getPage(self, url):
        res = urllib2.urlopen(self.host+url)
        return res


class DBLPXmlAuthorsHandler(ContentHandler):
    def __init__(self):
        self.is_valid = False
        self.coauthors = []

    def reset(self):
        self.is_valid = False
        self.coauthors = []

    def startElement(self, name, attrs):
        if name == "author":
            try:
                self.coauthors.append(CrawledDataNode(attrs['urlpt'],attrs['count']))
            except KeyError:
                self.is_valid = False
        elif name == 'coauthors':
            self.is_valid = True

class DBLPWorker(CrawlerWorker):
    def __init__(self, downloader_factory):
        CrawlerWorker.__init__(self)
	self._host = 'dblp.uni-trier.de'
	#self._host = 'dblp.org'
        self._downloader_factory = downloader_factory
        self._downloader = None

        self._handler = DBLPXmlAuthorsHandler()
        self._parser = make_parser()
        self._parser.setContentHandler(self._handler)

    def __call__(self, inq, outq, stop_flag):
        self._downloader = self._downloader_factory(self._host)
        CrawlerWorker.__call__(self,inq, outq, stop_flag)

    def _run(self, key):
        path = '/rec/pers/'+key+'/xc'
        try:
            xmldata= self._downloader.getPage(path)
            self._handler.reset()
            self._parser.parse(xmldata)
            if self._handler.is_valid:
                return CrawledData(CrawledState.DATA, key, '', self._handler.coauthors)
            return CrawledData(CrawledState.INVALID, key, None, None)
        except CannotSendRequest:
            self._downloader.close()
            self._downloader = self._downloader_factory(self._host)
            print "Ricreo connessione"
            return CrawledData(CrawledState.RETRY, key, None, None)
        except Exception, e:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stdout)

        #    print "W",
        #    print e
        #    return CrawledData(CrawledState.RETRY, key, None, None)

    def _queueTimeout(self):
        return max(.03, self._fails*(.03))


class DBLPCrawlerFeeder(CrawlerFeeder):
    def __init__(self, inq, stop_flag, try_stop, active_tasks, db):
        CrawlerFeeder.__init__(self, inq, stop_flag, try_stop, active_tasks, db)

    def _maxFails(self):
        return 20

    def _queueTimeout(self):
        if self._fails == 0:
            return .03
        return self._fails*(.03)
        #return max(.005, self._fails*(.005))

    def _dbTimeout(self):
        return self._queueTimeout()


class DBLPCrawlerCollector(CrawlerCollector):
    def __init__(self, outq, stop_flag, try_stop, active_tasks, db):
        CrawlerCollector.__init__(self, outq, stop_flag, try_stop, active_tasks, db)

    def _maxFails(self):
        return 20

    def _queueTimeout(self):
        if self._fails == 0:
            return .03
        return self._fails*(.03)
        #return max(.005, self._fails*(.005))


class DBLPCrawler(CrawlerBase):
    def __init__(self, num_processes):
        CrawlerBase.__init__(self, num_processes)

    def _getWorker(self):
        df = HTTPDownloader
        return DBLPWorker(df)

    def _getFeeder(self, inq, stop_flag, try_stop, active_tasks, db):
        return DBLPCrawlerFeeder(inq, stop_flag, try_stop, active_tasks, db)

    def _getCollector(self, inq, stop_flag, try_stop, active_tasks, db):
        return DBLPCrawlerCollector(inq, stop_flag, try_stop, active_tasks, db)

