#!/usr/bin/env python

from crawler_dblp import DBLPCrawler
from crawler_db import CrawlerDbTcIndexed
from crawler_utils import *
from threading import Thread, Event
import argparse
import logging
import signal
import sys
import select

#Global var
#Needed to handler signal
_CRAWLER=None

class printStatsOnCtrlD(object):
    def __init__(self, fun):
        self.stop_ev = Event()
        self.t = Thread(target=self.runOnEOF, args=[fun, self.stop_ev])
        self.t.deamon = True

    def runOnEOF(self, fun, stop):
        epoll = select.epoll()
        stdin_no = sys.stdin.fileno()
        epoll.register(stdin_no, select.EPOLLIN)
        while not stop.is_set():
            events = epoll.poll(1, 1)
            for fileno, event in events:
                if fileno == stdin_no and event & select.EPOLLIN \
                       and len(sys.stdin.readlines()) == 0:
                    fun()
        epoll.unregister(stdin_no)
        epoll.close()

    def start(self):
        self.t.start()

    def stop(self):
        self.stop_ev.set()
        self.t.join()

def stopCrawling(signum, frame):
    logging.info('Stopping...')
    _CRAWLER.stop()

def posInt(string):
    try:
        value = int(string)
    except:
        raise argparse.ArgumentTypeError("'" + string + "' must be an integer")
    if not value >= 0:
        raise argparse.ArgumentTypeError("'" + string + "' must be greater or equal to zero")
    return value

def strictPosInt(string):
    try:
        value = int(string)
    except:
        raise argparse.ArgumentTypeError("'" + string + "' must be an integer")
    if not value > 0:
        raise argparse.ArgumentTypeError("'" + string + "' must be greater or equal to zero")
    return value

def configLogging(args):
    loglevel = getattr(logging, args.loglevel.upper())
    format='%(asctime)s %(levelname)s %(message)s'
    #logconfig = {'format':format, 'datefmt':'%m-%d-%Y %I:%M:%S', 'level':logging.INFO}
    #if args.logfile <> None:
    #    logconfig['filename']=args.logfile

    if args.logfile <> None:
        logging.basicConfig(filename=args.logfile, format=format, datefmt='%m-%d-%Y %I:%M:%S', level=loglevel)
    else:
        logging.basicConfig(format=format, datefmt='%m-%d-%Y %I:%M:%S', level=loglevel)

if __name__ == '__main__':

    signal.signal(signal.SIGINT, signal.SIG_IGN)
    signal.signal(signal.SIGTERM, signal.SIG_IGN)

    parser = argparse.ArgumentParser(description='Crawler for dblp.uni-trier.de')
    parser.add_argument('filename', action='store',
                        help='path to the database')
    parser.add_argument('-k', '--key', action='store', dest='key',
                        help='key to enter in the database as to visit')
    parser.add_argument('-m', '--max', action='store', dest='max', type=posInt,
                        help='number of keys to retrive in the execution')
    parser.add_argument('-p', '--processes', action='store', dest='processes', type=strictPosInt, default=1,
                        help='number of workers to run in parallel')
    parser.add_argument('-c', '--cache', action='store', dest='cache', type=posInt, default=0,
                        help='size of the cache')
    parser.add_argument('-f', '--flush', action='store', metavar='TIME', dest='flush', type=posInt, default=691,
                        help='interval (in seconds) between flush. 0 disable (default %(default)s)')
    parser.add_argument('-s', '--statistics', action='store',  metavar='TIME', dest='statistics', type=posInt, default=0,
                        help='interval (in seconds) between prints of statistics. 0 disable (default %(default)s).\n'\
                             'Statistics can be printed anytime with SIGUSR1')
    parser.add_argument('-o', '--optimize', action='store',  metavar='TIME', dest='optimize', type=posInt, default=3307,
                        help='interval (in seconds) between optimization. 0 disable (default %(default)s)')
    group = parser.add_argument_group('logging')
    group.add_argument('--loglevel', action="store", dest='loglevel', default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
                       help='Log level to use. (default: %(default)s)')
    group.add_argument('--lofile', action="store",  metavar='FILENAME', dest='logfile',
                       help='File onto save logs. If not specified logs are printed on stdout')

    args = parser.parse_args()

    configLogging(args)

    maxdnld = args.max
    cachesize = args.cache
    dbfile = args.filename
    processes = args.processes
    

    flush_time = args.flush
    stats_time = args.statistics
    optimize_time = args.optimize

    db = CrawlerDbTcIndexed(dbfile, maxdownload=maxdnld, cache=cachesize)

    if args.key <> None:
        db.addToVisit(args.key)

    _STATS  = ContinuousPrintDbStats(db, stats_time)
    flush = ContinuousFlushDb(db, flush_time)
    optimize = ContinuousOptimizeDb(db, optimize_time)
    ctrld = printStatsOnCtrlD(_STATS.fun)

    _CRAWLER = DBLPCrawler(processes)

    _STATS.fun()
    
    if stats_time > 0:
        _STATS.start()
    if flush_time > 0:
        flush.start()
    if optimize_time > 0:
        optimize.start()

    ctrld.start()

    signal.signal(signal.SIGINT, stopCrawling)
    signal.signal(signal.SIGTERM, stopCrawling)
    
    _CRAWLER.start(db)

    ctrld.stop()
    _STATS.stop()
    flush.stop()
    optimize.stop()

    _STATS.fun()

