"""
Classes for data pipelines.
"""

# Imports
# -------
# Stdlib
import datetime
import random
import struct
import sys
import threading
import time
# Third-party
from amqplib import client_0_8 as amqp
from netlogger.nllog import DoesLogging

# Constants
# ---------

# Logger name
LOG_NAME = '.amqpbench'

# Master/worker exchange and queues
WORK_EX = "worker_exchange"
WORK_EX_TYPE = "direct"
WORK_MSG_TYPE = "WORK"
WORK_QUEUE_KW = dict(durable=True, auto_delete=False)
RESULT_QUEUE = "RESULT"
STAGE_QUEUE_PREFIX = "STAGE" # prefix of queue name STAGE<n>

# Management (broadcast) exchange
MGMT_EX = "mgmt"
MGMT_DONE_MSG = "DONE"

# Functions
# ---------

def declare_main_exchange(channel):
    """Declare (ie create, idempotently) exchange for
    worker and producers.
    """
    channel.exchange_declare(exchange=WORK_EX,
                             type=WORK_EX_TYPE, **WORK_QUEUE_KW)

def declare_mgmt_exchange(channel):
    channel.exchange_declare(exchange=MGMT_EX, type="fanout", durable=False,
                             auto_delete=True)

def declare_mgmt_queue(channel):
    queues = channel.queue_declare("", durable=False, exclusive=True,
                                  auto_delete=True)
    return queues[0]

def declare_work_queue(channel, name):
    """Declare (ie create, idempotently) a work queue.
    """
    channel.queue_declare(queue=name, **WORK_QUEUE_KW)

# Classes
# -------

class Sequence:
    """Thread-safe sequence number.
    """
    def __init__(self):
        self.__seq = 0
        self.__lck = threading.Lock()

    def next(self):
        self.__lck.acquire()
        self.__seq += 1
        self.__lck.release()
        return self.__seq

class Progress:
    """
    Displays a progress bar when running the master consumer.
    """
    def __init__(self, count=10, width=50):
        self.maxCount = count
        self.width = width
        self._full = self.width - 2 # [square brackets]
        self.currentCount = 0
        self.barstring = ''
        self.last_hnum, self._dirty = -1, True
        self.update(0)

    def update(self, newCount):
        hnum = int(1. * self._full * newCount / self.maxCount)
        if hnum != self.last_hnum:
            pct = 100.0 * newCount / self.maxCount
            self.barstring = '[' + '#'*hnum + ' '* (self._full-hnum) + ']' + \
                            ' %d%% complete' % pct
            self.last_hnum, self._dirty = hnum, True
        self.currentCount = newCount

    def write(self):
        if self._dirty:
            sys.stdout.write(self.barstring + '\r')
            sys.stdout.flush()
            self._dirty = False

    def done(self):
        print '\r'

class WorkMessage(amqp.Message, DoesLogging):
    """Message representing a unit of work.
    """
    _SEQNO = Sequence() # sequence number generator
    _TSZ = 8 # size of 1 timestamp

    def __init__(self, exec_time=1.0, prob_fail=0.0, body_size=1024,
                 ts_list=[],
                 amqp_message=None):
        """Create a new message describing work to be done.

        Parameters:
          - exec_time (float): Time in seconds for the task to pretend
                               to execute the work.

          - prob_fail (float): Probability (0 <= p <= 1) of the task
                               failing for a given execution.

          - body_size (int): Total size of message, will be rounded to a
                        multiple of 8.

          - ts_list (list): List of timestamps, each seconds since
                            1/1/1970 as a double.

          - amqp_message (amqp.Message): If not None, ignore all
            the other parameters and encapsulate the contents
            of this messsage.
        """
        DoesLogging.__init__(self, LOG_NAME + '.WorkMessage')
        if amqp_message:
            props = amqp_message.properties.copy()
            if self._trace:
                self.log.trace("amqp.props", value=props)
            amqp.Message.__init__(self, **props)
        else:
            # serialize timestamps into a string
            ts_buf = ""
            for ts in ts_list:
                ts_buf += struct.pack(">d", ts)
            # set header fields
            # strangely, floats can't be handled, so convert them to ints
            hdr = { 'exec_time_ms' : int(1000*exec_time),
                    'prob_fail_pct' : int(prob_fail*100),
                    'timestamps' : ts_buf }
            # set body buffer
            body_buf = body_size * chr(0)
            # message id is seq. number
            seqno = self._SEQNO.next()
            msgid = "%d" % seqno
            # Create message
            amqp.Message.__init__(self, application_headers=hdr,
                                  body=body_buf,
                                  message_id=msgid,
                                  type=WORK_MSG_TYPE,
                                  timestamp=datetime.datetime.now())

    def add_timestamp(self, t=None):
        """Add a timestamp to the stored list.

        Parameters:

          - t (float): Timestamp, as seconds since 1/1/1970. If None,
                       the current time is used.
        """
        if t is None:
            t = time.time()
        timestamps = self.application_headers[u'timestamps']
        timestamps += struct.pack(">d", t)
        self.application_headers[u'timestamps'] = timestamps

    @property
    def timestamp(self):
        buf = self.application_headers[u'timestamps']
        if len(buf) < self._TSZ:
            return None
        return struct.unpack('>d',buf[-self._TSZ:])

    @property
    def seqno(self):
        return int(self.message_id)

    @property
    def prob_fail(self):
        return self.application_headers['prob_fail_pct'] / 100.

    @property
    def exec_time(self):
        return self.application_headers['exec_time_ms'] / 1000.

class HasConnection(DoesLogging):
    def __init__(self, whoami, conn_kw):
        DoesLogging.__init__(self, LOG_NAME + "." + whoami)
        self.conn = amqp.Connection(**conn_kw)
        self.channel = self.conn.channel()

    def declare_mgmt_queue(self):
        declare_mgmt_exchange(self.channel)
        self.mq = declare_mgmt_queue(self.channel)
        self.channel.queue_bind(queue=self.mq, exchange=MGMT_EX)
        self.channel.basic_consume(queue=self.mq,
                                   callback=self.done, no_ack=True)
    def close(self):
        self.log.info("channel.close.start")
        try:
            self.channel.close()
            self.log.info("channel.close.end", status=0)
        except amqp.exceptions.AMQPChannelException, e:
            self.log.warn('channel.close.end', status=-1, 
                          msg='Error closing channel: %s' % e[1])
        self.log.info("conn.close.start")
        self.conn.close()
        self.log.info("conn.close.end", status=0)

class MasterProducer(HasConnection):
    """Produce 'work' items and place them on
    the given queue for workers to process.
    """
    def __init__(self, q_first, **conn_kw):
        """Create master that produces work items.
        """
        HasConnection.__init__(self, "master_producer", conn_kw)
        declare_main_exchange(self.channel)
        declare_mgmt_exchange(self.channel)
        self._queue_name = q_first

    def send_work(self, count=10, delay=0.0, **msg_param):
        """Produce some number of messages.

        Parameters:
          - count (int): How many messages to produce

          - delay (float): How long to sleep between messages

          - msg_param (kw): Additional keywords passed
                            to WorkMessage constructor.
        """
        for i in xrange(count):
            final = (i == count-1)
            if self._dbg:
                self.log.debug("work.send.start", route=self._queue_name,
                               final=final)
            amsg = WorkMessage(**msg_param)
            self.channel.basic_publish(amsg, exchange=WORK_EX,
                                       routing_key=self._queue_name)
            if self._dbg:
                self.log.debug("work.send.end", route=self._queue_name)
            if delay > 0:
                time.sleep(delay)
        # tell all workers: done
        #amsg = amqp.Message(MGMT_DONE_MSG, content_type='text/plain')
        #self.channel.basic_publish(amsg, exchange=MGMT_EX)

class MasterConsumer(HasConnection,threading.Thread):
    """Consume the final 'result' message from the data pipeline
    workers.
    """
    def __init__(self, num_expected=1, **conn_kw):
        """Constructor.

        Parameters:

          - conn_kw (kw): amqp.Connection() keywords
        """
        HasConnection.__init__(self, "master_consumer", conn_kw)
        threading.Thread.__init__(self)
        # Management queue
        self.declare_mgmt_queue()
        # Result queue
        declare_main_exchange(self.channel)
        q = RESULT_QUEUE
        declare_work_queue(self.channel, q)
        self.channel.queue_bind(queue=q, exchange=WORK_EX, routing_key=q)
        self.tag = self.channel.basic_consume(queue=q,
                                              callback=self.got_result,
                                              no_ack=False)
        self._q = q
        self._done = False
        self._ne, self._n = num_expected, 0
        self._progress = Progress(self._ne)
        self._progress.write()

    def run(self):
        """Consume from result queue and report results.
        """
        while not self._done:
            self.log.debug("channel.wait.start")
            self.channel.wait()
            self.log.debug("channel.wait.end", status=0)

    def got_result(self, amsg):
        if self._trace:
            self.log.trace("got_result.start")
        msg = WorkMessage(amqp_message=amsg)
        self._n += 1
        status=0
        self.channel.basic_ack(amsg.delivery_tag)
        self._progress.update(self._n)
        self._progress.write()
        self.log.debug('got_result', got=self._n, expect=self._ne)
        if self._n == self._ne:
            self.log.debug('got_result.hangup', msg='Sending hangup')
            hmsg = amqp.Message(MGMT_DONE_MSG, content_type='text/plain')
            self.channel.basic_publish(hmsg, exchange=MGMT_EX)
        if self._trace:
            self.log.trace("got_result.end", status=status)

    def close(self):
        self.log.debug('close.start')
        # XXX: ought to add in cancel code for tidyness but this works
        HasConnection.close(self)
        self._done = True
        self.log.debug('close.end')

    def done(self, msg_in):
        self.log.debug("done.start")
        self.close()
        self._progress.done()
        self.log.debug("done.end")


class Worker(threading.Thread, HasConnection):
    """Consume 'work' items from one or more queues
    and place the results back on a 'result' queue.
    """

    WORKER_NUM = Sequence()

    def __init__(self, **conn_kw):
        """Create a new worker, connected to queues.

        Parameters:

          - conn_kw (kw): amqp.Connection() keywords
        """
        threading.Thread.__init__(self)
        self.daemon = True
        HasConnection.__init__(self, "worker", conn_kw)
        # Exchanges
        declare_main_exchange(self.channel)
        # Management queue
        self.declare_mgmt_queue()
        # Set flag for main event loop
        self._done = False
        # number
        self._num = self.WORKER_NUM.next()
        self._logkw = {'worker.id' : self._num}
        # delivery tags for queues
        self._tags = [ ]
        # mapping from input to output queue
        self._routes = { }

    def add_queue_pair(self, q_in, q_out):
        """Add queue to read from and write to.

        Parameters:

          - q_in (str): Queue from which to receive messages.

          - q_out (str): Queue on which to place results.
         """
        self.log.debug("queue_pair", q__in=q_in, q__out=q_out)
        # Create work queue
        declare_work_queue(self.channel, q_in)
        self.channel.queue_bind(queue=q_in, exchange=WORK_EX,
                                routing_key=q_in)
        tag = self.channel.basic_consume(queue=q_in,
                                         callback=self.do_work,
                                         no_ack=False)
        self._tags.append(tag)
        # Create result queue
        declare_work_queue(self.channel, q_out)
        # update mapping
        self._routes[q_in] = q_out


    def do_work(self, amsg):
        """Do the 'work' of a message, and add the 'result' to
        the output queue.
        """
        if self._dbg:
            self.log.debug("do_work.start", **self._logkw)
        # Convert to WorkMessage
        msg = WorkMessage(amqp_message=amsg)
        # Add time received.
        msg.add_timestamp()
        # Do 'work'
        if self._dbg:
            self.log.debug("work.start", sec=msg.exec_time, **self._logkw)
        if msg.exec_time > 0:
            time.sleep(msg.exec_time)
        # See if work failed.
        failed = random.random() <= msg.prob_fail
        if self._dbg:
            self.log.debug("work.end", status=0, failed=failed, 
                           **self._logkw)
        # Acknowledge and post result if work didn't fail
        if not failed:
            if self._dbg:
                self.log.debug("work.ack", tag=amsg.delivery_tag)
            self.channel.basic_ack(amsg.delivery_tag)
            # Re-use input message for result message
            # Get routing key for next hop
            rkey = self._routes[amsg.delivery_info['routing_key']]
            # Send message
            if self._dbg:
                self.log.debug("result.publish", route=rkey, **self._logkw)
            self.channel.basic_publish(msg, exchange=WORK_EX, routing_key=rkey)
        if self._dbg:
            self.log.debug("do_work.end",  status=0,
                           exec_status=(0,-1)[failed], **self._logkw)

    def done(self, msg_in):
        self._done = True
        self.log.debug("done.start", **self._logkw)
        self.close()
        self.log.debug("done.end", **self._logkw)

    def run(self):
        """Consume from source queues, put data in result queues,
        until told to stop.
        """
        while not self._done:
            self.log.debug("channel.wait.start", **self._logkw)
            self.channel.wait()
            self.log.debug("channel.wait.end", status=0, **self._logkw)
        # done!

    def close(self):
        """Close channel.
        """
        self.log.info("channel.cancel.start", **self._logkw)
        try:
            for tag in self._tags:
                self.channel.basic_cancel(tag)
            self.log.info("channel.cancel.end", status=0, **self._logkw)
        except KeyError, err:
            self.log.info("channel.cancel.end", status=-1, **self._logkw)
        self.waiting = False
        HasConnection.close(self)
