import Queue
import time
from collections import namedtuple
from math import floor
from multiprocessing.managers import SyncManager
#from multiprocessing import Pool
from multiprocessing.pool import ThreadPool as Pool
from threading import Lock, Thread, Event
import traceback
import sys
import signal
import logging

class CrawlerProcess_Base(object):
    def __init__(self, queue, stop_flag, try_stop, active_tasks):
        self._queue = queue
        self._stop_flag = stop_flag
        self._try_stop = try_stop
        self._active_tasks = active_tasks
        self._fails = 0

    def _commonFail(self):
        self._fails+=1
        if self._fails > self._maxFails():
            self._try_stop.set()

    def _queueFail(self):
        self._commonFail()
        time.sleep(self._queueTimeout())

    def _noFail(self):
        self._fails = 0
        self._try_stop.clear()

    def _maxFails(self):
        raise NotImplementedError

    def _queueTimeout(self):
        raise NotImplementedError


class CrawledState(object):
    DATA = 0
    INVALID = 1
    RETRY = 2

CrawledData = namedtuple('CrawledData', ['state','key','kdata','nodes'])
CrawledDataNode = namedtuple('CrawledDataNode', ['key','data'])

import cProfile

class CrawlerCollector(CrawlerProcess_Base):
    def __init__(self, queue, stop_flag, try_stop, active_tasks, db):
        CrawlerProcess_Base.__init__(self, queue, stop_flag, try_stop, active_tasks)
        self._db = db

    def __call__(self):
        #signal.signal(signal.SIGINT, signal.SIG_IGN)
        try:
            while not self._stop_flag.is_set():
                try:
                    data = self._queue.get(timeout=self._queueTimeout()) 
                    self._active_tasks.decr()
                    if data <> None:
                        self._toDb(data)
                        self._noFail()
                except Queue.Empty:
                        self._queueFail()
        except Exception:
            print "Collector chiude per eccezzione"
            self._stop_flag.set()
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr)
        print "Collector prova a fermare"
        self._try_stop.set()


    def _maxFails(self):
        raise NotImplementedError

    def _queueTimeout(self):
        raise NotImplementedError

    def _toDb(self, data):
        key = data.key
        if data.state == CrawledState.DATA:
            key_data = data.kdata
            nodes = data.nodes
            for n in nodes:
                n_key = n.key
                n_data = n.data
                #if not self._db.isVisited(n_key):
                self._db.addToVisit(n_key)
                self._db.addEdge(key, n_key, key_data)
            self._db.addVisited(key, key_data)

        elif data.state == CrawledState.INVALID:
            self._db.invalidData(key)
        elif data.state == CrawledState.RETRY:
            self._db.toRetry(key)


class CrawlerFeeder(CrawlerProcess_Base):
    def __init__(self, inq, stop_flag, try_stop, active_tasks, db):
        CrawlerProcess_Base.__init__(self, inq, stop_flag, try_stop, active_tasks)
        self._db = db

    def __call__(self):
        #signal.signal(signal.SIGINT, signal.SIG_IGN)
        try:
            while not self._stop_flag.is_set():
                data = self._fromDb()
                if data <> None:
                    while not self._stop_flag.is_set():
                        try:
                            self._queue.put(data,timeout=self._queueTimeout())
                            self._noFail()
                            self._active_tasks.incr()
                            break
                        except Queue.Full:
                            self._queueFail()
                else:
                    self._fromDbFail()
        except Exception:
            print "Feeder chiude per eccezzione"
            self._stop_flag.set()
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr)
        print "Feeder prova a fermare"
        self._try_stop.set()

    def _fromDbFail(self):
        self._commonFail()
        time.sleep(self._dbTimeout())

    def _maxFails(self):
        raise NotImplementedError

    def _queueTimeout(self):
        raise NotImplementedError

    def _fromDb(self):
        return self._db.getToVisit()

    def _dbTimeout(self):
        raise NotImplementedError


class CrawlerWorker(object):
    def __init__(self):
        self._fails = 0

    def __call__(self, inq, outq, stop_flag):
        try:
            self._fails = 0
            while not stop_flag.is_set():
                try:
                    data = inq.get(timeout=self._queueTimeout())
                    self._noFail()
                    if data <> None:
                        res = self._run(data)
                        while not stop_flag.is_set():
                            try:
                                outq.put(res,timeout=self._queueTimeout())
                                self._noFail()
                                break
                            except Queue.Full:
                                self._queueFail()
                except Queue.Empty:
                    self._queueFail()
        except KeyboardInterrupt:
            pass    
        except Exception:
            print "Worker chiude per eccezzione"
            self._stop_flag.set()
            exc_type, exc_value, exc_traceback = sys.exc_info()
            logging.info('Type %s   Value %s',exc_tpye, ecx_value)
            print exc_tpye
            print ecx_value
            traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr)
        print "Worker esce"

    def _run(self):
        raise NotImplementedError

    def _queueTimeout(self):
        raise NotImplementedError

    def _queueFail(self):
        self._fails+=1
        time.sleep(self._queueTimeout())

    def _noFail(self):
        self._fails = 0

class SharedCounter(object):
    def __init__(self):
        self._lock = Lock()
        self._counter = 0

    def incr(self):
        with self._lock:
            self._counter+=1

    def decr(self):
        with self._lock:
            self._counter-=1

    def value(self):
        with self._lock:
            return self._counter        

    def reset(self):
        self._counter = 0

class FakeCounter(object):
    def __init__(self):
        self._counter=0

    def incr(self):
        self._counter+=1

    def decr(self):
        self._counter+=1

    def value(self):
        return self._counter

class CrawlerBase(object):
    def __init__(self, num_processes):
        self._num_processes = num_processes
        self._inqueue_size = floor((self._num_processes*1.5))+1
        self._outqueue_size = floor((self._num_processes*2.0))+1

        self._manager = SyncManager()
        self._manager.start(self._init_process)
        self._pool = None
        #self._inq = self._manager.Queue(self._inqueue_size)
        #self._outq = self._manager.Queue(self._outqueue_size)
        #self._stop_flag = self._manager.Event()
        self._inq = Queue.Queue(self._inqueue_size)
        self._outq = Queue.Queue(self._outqueue_size)
        self._stop_flag = Event()
        self._collector_try_stop = Event()
        self._feeder_try_stop = Event()
        self._active_tasks = SharedCounter()

        self._feeder_thread = None
        self._collector_thread = None

    def _init_process(self):
        signal.signal(signal.SIGINT, signal.SIG_IGN)

    def start(self, db):
        #self._pool = Pool(self._num_processes,initializer=self._init_process)
        self._pool = Pool(self._num_processes)
        self._stop_flag.clear()
        self._collector_try_stop.clear()
        self._feeder_try_stop.clear()
        self._active_tasks.reset()

        for i in range(self._num_processes):
            w = self._getWorker()
            self._pool.apply_async(w, (self._inq,self._outq,self._stop_flag))
        self._pool.close()

        feeder = self._getFeeder(self._inq, self._stop_flag, self._feeder_try_stop, self._active_tasks, db)
        self._feeder_thread = Thread(target=feeder, args=())

        collector = self._getCollector(self._outq, self._stop_flag, self._collector_try_stop, self._active_tasks, db)
        self._collector_thread = Thread(target=collector, args=())

        self._feeder_thread.start()
        self._collector_thread.start()

        self._run()
        db.flush()

    def _run(self):

        while not self._stop_flag.is_set():
            self._feeder_try_stop.wait()
            self._collector_try_stop.wait()

            print '%s - %s'%(self._active_tasks.value(),self._stop_flag.is_set())
            import time
            time.sleep(20)
            print self._active_tasks.value()
            if self._feeder_try_stop.is_set() and \
                    self._collector_try_stop.is_set() and \
                    self._active_tasks.value() == 0:
                break

        self.stop()

    def stop(self):

        self._stop_flag.set()
        self._feeder_try_stop.set()
        self._collector_try_stop.set()

        self._pool.join()
        self._feeder_thread.join()
        self._collector_thread.join()


    def _getWorker(self):
        raise NotImplementedError

    def _getFeeder(self, inq, stop_flag, try_stop, db):
        raise NotImplementedError

    def _getCollector(self, inq, stop_flag, try_stop, db):
        raise NotImplementedError


# Only to explicit the db interface
class CrawlerDbInterface(object):

    def flush(self):
        raise NotImplementedError
    
    def getToVisit(self):
        raise NotImplementedError

    def addToVisit(self, data):
        raise NotImplementedError

    def isVisited(self, data):
        raise NotImplementedError

    def addVisited(self, key, data):
        raise NotImplementedError

    def addEdge(self, n1, n2, data):
        raise NotImplementedError

    def invalidData(self, data):
        raise NotImplementedError

    def toRetry(self, data):
        raise NotImplementedError

