using System;
using System.Text;
using Microsoft.Ccr.Core;
using System.Threading;


namespace mds.Queue.Generic
{
    /// <summary>
    /// QueueProcessor is a generic class for queueing items in memory for processing at a later time.
    /// Items queued will be dequeued in order and sent to the delegate you specify via the ProcessQueue
    /// property.  Alternatively, if your generic T is of type IQueueProcessable, it will fire the 
    /// ProcessItem delegate associated with the object queued.
    /// </summary>
    /// <remarks>	
    /// <ul>
    /// <li>Unconstrained: Default behavior, all tasks are queued with no constraints </li>
    /// <li>ConstrainQueueDepthDiscardTasks: Queue enforces maximum depth (specified at queue creation) and discards tasks enqueued after the limit is reached.</li>
    /// <li>ConstrainQueueDepthDiscardTasks: Queue enforces maximum depth (specified at queue creation) but does not discard anny tasks. It forces the thread posting any tasks after the limit is reached, to sleep until the queue depth falls below the limit.</li>
    /// <li>ConstrainQueueDepthThrottleExecution: Queue enforces the rate of task scheduling specified at queue creation and discards tasks enqueued after the current scheduling rate is above the specified rate. </li>
    /// <li>ConstrainSchedulingRateDiscardTasks: Queue enforces the rate of task scheduling specified at queue creation and forces the thread posting tasks to sleep until the current rate of task scheduling falls below the specified average rate. </li>    
    /// </ul>    
    /// </remarks>
    /// <typeparam name="T">The type of the objects you want to queue up to be processed</typeparam>
    public class QueueProcessor<T> : IDisposable, IReportable
    {
        private string queueName;
        protected Dispatcher dispatcher;
        protected DispatcherQueue dispatcherQueue;
        protected Port<T> port;
        protected Port<EmptyValue> teardownPort;
        protected int running = 0; // 0 == no
        protected int upperBound;

        // variables for report metrics
        protected long totalSecondsProcessing = 0;
        private long totalItemsProcessed = 0;
        protected long totalTimesProcessed = 0;
        private long totalItemsQueued = 0;
        protected long totalItemsLost = 0;
        protected DateTime startTime;  // initialized on first enqueue
        protected bool disposing = false;
        protected int activateItems = 0;//default 0;


      
        /// <summary>
        /// The ProcessQueue property fires a delegate of this type when an item is dequeued
        /// </summary>
        /// <param name="referenceQueue">a reference to the original queue that did the processing,
        /// in case the item needs to be re-queued.
        /// </param>
        /// <param name="item">the item to process</param>
        public delegate void ProcessorDelegate(QueueProcessor<T> referenceQueue, T item);
        public ProcessorDelegate ProcessQueue;

        public delegate void CompletedDelegate();
        public CompletedDelegate CompletedQueue;

        protected QueueProcessor() { }

        public string QueueName
        {
            get { return queueName; }
        }
        /// <summary>
        /// Instantiates a QueueProcessor with the queueName specified, plus a threadCount of 1 and 
        /// no upper bound.
        /// </summary>
        /// <param name="queueName">The name of the queue, which needs to be unique per-Dispatcher.</param>
        public QueueProcessor(string queueName) :
            this(queueName, 1, TaskExecutionPolicy.Unconstrained, ThreadPriority.Highest, 0, 0) { }

        /// <summary>
        /// Instantiates a QueueProcessor with the queueName specified, with the threadCount specified, 
        /// plus no upper bound.
        /// </summary>
        /// <param name="queueName">The name of the queue, which needs to be unique per-Dispatcher.
        /// Keep this in mind if the threadCount is 0, as you're sharing a Dispatcher with potentially
        /// many QueueProcessors.
        /// </param>
        /// <param name="threadCount">the number of threads to instantiate for the Dispatcher, if 0,
        /// shares DispatcherFactory.DefaultDispatcher.
        /// </param>
        /// <see>Beisen.Queue.DispatcherFactory</see>
        /// <seealso>Microsoft.Ccr.Core.Dispatcher</seealso>
        public QueueProcessor(string queueName, int threadCount) :
            this(queueName, threadCount, TaskExecutionPolicy.Unconstrained, ThreadPriority.Highest, 0, 0) { }

        /// <summary>
        /// Instantiates a QueueProcessor with an upper bound. This upper bound is not enforced!
        /// (old doc: If the queue hits the upper bound, items that would normally be enqueued will get thrown away.)
        /// </summary>
        /// <param name="queueName">The name of the queue, which needs to be unique per-Dispatcher.
        /// Keep this in mind if the threadCount is 0, as you're sharing a Dispatcher with potentially
        /// many QueueProcessors.</param>
        /// <param name="threadCount">the number of threads to instantiate for the Dispatcher, if 0,
        /// shares DispatcherFactory.DefaultDispatcher.</param>
        /// <param name="upperBound">
        /// The upper bound of the queue. Not enforced!
        /// </param>
        [Obsolete("Please call the proper constructor explicitly. Note current comment is NOT what code is doing!")]
        public QueueProcessor(string queueName, int threadCount, int upperBound)
            : this(queueName, threadCount, TaskExecutionPolicy.Unconstrained, ThreadPriority.Highest, upperBound, 0)
        {
        }


        /// <summary>
        /// Instantiates a QueueProcessor with the queueName specified, the threadCount, an upperBound of the queue, and a policy what to do when the queue upper bound is reached.        
        /// </summary>
        /// <param name="queueName">Name of the queue.</param>
        /// <param name="threadCount">The thread count.</param>
        /// <param name="taskExecutionQueuePolicy">The task execution queue policy.</param>
        /// <param name="upperBound">The upper bound.</param>
        public QueueProcessor(string queueName, int threadCount, TaskExecutionQueuePolicy taskExecutionQueuePolicy, Priority priority, int upperBound, int activateItems)
        {
            this.queueName = queueName;
            this.activateItems = activateItems;

            // if the threadCount is larger than 0, this QueueProcessor gets it's own Dispatcher
            if (threadCount > 0)
            {
                //this.dispatcher = DispatcherFactory.CreateDispatcher(threadCount, this.queueName);

                ThreadPriority threadPriority = ConvertThreadPriority(priority);
                this.dispatcher = DispatcherFactory.CreateDispatcher(threadCount, threadPriority, this.queueName);
            }
            else // otherwise, share the Dispatcher
            {
                this.dispatcher = DispatcherFactory.DefaultDispatcher;
            }

            if (upperBound > 0)
            {
                TaskExecutionPolicy taskExecutionPolicy = ConvertTaskExecutionPolicy(taskExecutionQueuePolicy);
                this.dispatcherQueue =
                    new DispatcherQueue(this.queueName, dispatcher, taskExecutionPolicy, upperBound);
            }
            else
            {
                //If no upperBound is defined, don't define any constraints!
                this.dispatcherQueue = new DispatcherQueue(this.queueName, dispatcher);
            }

            this.upperBound = upperBound;
            this.port = new Port<T>();
            this.teardownPort = new Port<EmptyValue>();
            ReportableObjectDirectory.Add(this.queueName, this);
        }


        /// <summary>
        /// Instantiates a QueueProcessor with the queueName specified, the threadCount, a scheduling rate of the queue, and a policy what to do when the scheduling rate is reached.        
        /// </summary>
        /// <param name="queueName">Name of the queue.</param>
        /// <param name="threadCount">The thread count.</param>
        /// <param name="taskExecutionSchedulingPolicy"></param>
        /// <param name="schedulingRate">The scheduling rate.</param>
        public QueueProcessor(string queueName, int threadCount, TaskExecutionSchedulingPolicy taskExecutionSchedulingPolicy, Priority priority, double schedulingRate, int activateItems)
        {
            this.queueName = queueName;
            this.activateItems = activateItems;

            // if the threadCount is larger than 0, this QueueProcessor gets it's own Dispatcher
            if (threadCount > 0)
            {
                //this.dispatcher = DispatcherFactory.CreateDispatcher(threadCount, this.queueName);
                ThreadPriority threadPriority = ConvertThreadPriority(priority);
                this.dispatcher = DispatcherFactory.CreateDispatcher(threadCount, threadPriority, this.queueName);
            }
            else // otherwise, share the Dispatcher
            {
                this.dispatcher = DispatcherFactory.DefaultDispatcher;
            }

            if (schedulingRate > 0)
            {
                TaskExecutionPolicy taskExecutionPolicy = ConvertTaskExecutionPolicy(taskExecutionSchedulingPolicy);
                this.dispatcherQueue =
                    new DispatcherQueue(this.queueName, dispatcher, taskExecutionPolicy, schedulingRate);
            }
            else
            {
                //If no upperBound is defined, don't define any constraints!
                this.dispatcherQueue = new DispatcherQueue(this.queueName, dispatcher);
            }

            this.upperBound = 0;
            this.port = new Port<T>();
            this.teardownPort = new Port<EmptyValue>();
            ReportableObjectDirectory.Add(this.queueName, this);
        }


        private ThreadPriority ConvertThreadPriority(Priority priority)
        {
            switch (priority)
            {
                case Priority.Lowest:
                    return ThreadPriority.Lowest;
                case Priority.BelowNormal:
                    return ThreadPriority.BelowNormal;
                case Priority.Normal:
                    return ThreadPriority.Normal;
                case Priority.AboveNormal:
                    return ThreadPriority.AboveNormal;
                case Priority.Highest:
                    return ThreadPriority.Highest;
                default:
                    return ThreadPriority.Normal;
            }
        }


        /// <summary>
        /// Map subset of TaskExecutionQueuePolicy back to the CCR value.
        /// </summary>
        /// <param name="policy"></param>
        /// <returns></returns>
        private TaskExecutionPolicy ConvertTaskExecutionPolicy(TaskExecutionQueuePolicy policy)
        {
            switch (policy)
            {
                case TaskExecutionQueuePolicy.ConstrainQueueDepthDiscardTasks:
                    return TaskExecutionPolicy.ConstrainQueueDepthDiscardTasks;
                case TaskExecutionQueuePolicy.ConstrainQueueDepthThrottleExecution:
                    return TaskExecutionPolicy.ConstrainQueueDepthThrottleExecution;
                default:
                    return TaskExecutionPolicy.Unconstrained;
            }
        }

        /// <summary>
        /// Map subset of TaskExecutionSchedulingPolicy back to the CCR value.
        /// </summary>
        /// <param name="policy"></param>
        /// <returns></returns>
        private TaskExecutionPolicy ConvertTaskExecutionPolicy(TaskExecutionSchedulingPolicy policy)
        {
            switch (policy)
            {
                case TaskExecutionSchedulingPolicy.ConstrainSchedulingRateDiscardTasks:
                    return TaskExecutionPolicy.ConstrainSchedulingRateDiscardTasks;
                case TaskExecutionSchedulingPolicy.ConstrainSchedulingRateThrottleExecution:
                    return TaskExecutionPolicy.ConstrainSchedulingRateThrottleExecution;
                default:
                    return TaskExecutionPolicy.Unconstrained;
            }
        }


        /// <summary>
        /// 
        /// </summary>
        /// <param name="queueName"></param>
        /// <param name="threadCount"></param>
        /// <param name="taskExecutionPolicy">
        /// <ul>
        /// <li>Unconstrained: Default behavior, all tasks are queued with no constraints </li>
        /// <li>ConstrainQueueDepthDiscardTasks: Queue enforces maximum depth (specified at queue creation) and discards tasks enqueued after the limit is reached.</li>
        /// <li>ConstrainQueueDepthThrottleExecution: Queue enforces maximum depth (specified at queue creation) but does not discard anny tasks. It forces the thread posting any tasks after the limit is reached, to sleep until the queue depth falls below the limit.</li>
        /// <li>ConstrainSchedulingRateDiscardTasks: Queue enforces the rate of task scheduling specified at queue creation and discards tasks enqueued after the current scheduling rate is above the specified rate. </li>
        /// <li>ConstrainSchedulingRateThrottleExecution: Queue enforces the rate of task scheduling specified at queue creation and forces the thread posting tasks to sleep until the current rate of task scheduling falls below the specified average rate. </li>    
        /// </ul>
        /// </param>
        /// <param name="maximumQueueDepth"></param>
        /// <param name="schedulingRate"></param>        
        private QueueProcessor(string queueName, int threadCount, TaskExecutionPolicy taskExecutionPolicy, ThreadPriority threadPriority, int maximumQueueDepth, double schedulingRate)
        {
            this.queueName = queueName;

            // if the threadCount is larger than 0, this QueueProcessor gets it's own Dispatcher
            if (threadCount > 0)
            {
                this.dispatcher = DispatcherFactory.CreateDispatcher(threadCount, this.queueName);
            }
            else // otherwise, share the Dispatcher
            {
                this.dispatcher = DispatcherFactory.DefaultDispatcher;
            }

            if (maximumQueueDepth > 0)
            {
                this.dispatcherQueue =
                    new DispatcherQueue(this.queueName, dispatcher, taskExecutionPolicy, maximumQueueDepth);

            }
            else if (schedulingRate > 0)
            {
                this.dispatcherQueue =
                    new DispatcherQueue(this.queueName, dispatcher, taskExecutionPolicy, schedulingRate);
            }
            else
            {
                //If no upperBound is defined, don't define any constraints!
                this.dispatcherQueue = new DispatcherQueue(this.queueName, dispatcher);
            }

            this.upperBound = maximumQueueDepth;
            this.port = new Port<T>();
            this.teardownPort = new Port<EmptyValue>();
            ReportableObjectDirectory.Add(this.queueName, this);
        }

        /// <summary>
        /// Enqueues an item to be processed.  Performs bounds checking on the queue, and
        /// activates the process thread pool if it hasn't been activated.
        /// </summary>
        /// <remarks>Items are only dropped when an upperBound is defined (and reached) Not when a scheduling rate is defined.</remarks>
        /// <param name="item">The item you want to enqueue.</param>
        /// <returns>return true is item is queued, returns false if item is dropped.</returns>
        public virtual bool Enqueue(T item)
        {
            bool allowOnQueue;
            // do upper bound checking, if upper bound is 0, there is no upper bound
            if (this.dispatcherQueue.Policy == TaskExecutionPolicy.ConstrainQueueDepthDiscardTasks && this.upperBound > 0)
            {
                //this is only a guess to see if a message is discarded or not. The port will drop messages internally, however
                //there is no way to get an event for this.
                allowOnQueue = (dispatcher.PendingTaskCount + port.ItemCount + this.dispatcher.WorkerThreadCount) < this.upperBound;
            }
            else
            {
                allowOnQueue = true;
            }

            if (allowOnQueue)
            {
                //if (log.IsDebugEnabled)
                //{
                //    log.DebugFormat("Item enqueued to queue {0} of type {1}", this.queueName, typeof(T).FullName);
                //}

                this.port.Post(item);

                // increase the total number of items queued
                Interlocked.Increment(ref totalItemsQueued);


            }
            else
            {
                //if (log.IsDebugEnabled)
                //{
                //    log.DebugFormat("Upper bound of {0} hit on queue {1}", this.upperBound, this.queueName);
                //}

                // increase the total number of items thrown away due to upper bound hits
                Interlocked.Increment(ref totalItemsLost);
            }

            // this will start the queue processing if it hasn't already started, thread safe
            if (Interlocked.CompareExchange(ref this.running, 1, 0) == 0)
            {
                //if (log.IsDebugEnabled)
                //{
                //    log.DebugFormat("Queue {0} started", this.queueName);
                //}

                // store the start time of the queue for reporting
                startTime = DateTime.Now;

                if (this.activateItems > 0)
                {
                    this.ActivateProcessQueue();
                }
                else
                {

                    this.InternalProcessQueue();
                }

            }
            return allowOnQueue;
        }

        /// <summary>
        /// This method activates the queue processor, and will get called after each item is processed
        /// to re-activate and process the next item
        /// </summary>
        protected virtual void InternalProcessQueue()
        {
            Receiver<T> receiver = Arbiter.Receive<T>(true, this.port,
                delegate(T item)
                {
                    QueueHandler(item);
                });

            Arbiter.Activate(
                    dispatcherQueue, receiver
                // this sets up a non-persistent receive task to process an item
            );
        }

        protected virtual void ActivateProcessQueue()
        {
            Arbiter.Activate(
          dispatcherQueue, Arbiter.MultipleItemReceive<T>(true, this.port, this.activateItems, delegate(T[] items)
          {
              foreach (T item in items)
              {
                  this.QueueHandler(item);
              }
          })
                // this sets up a non-persistent receive task to process an item
  );

        }

        private void QueueHandler(T item)
        {
            DateTime processStart = DateTime.Now;  // for reporting

            try
            {
                //if (log.IsDebugEnabled)
                //{
                //    log.DebugFormat("Processing item of type {0} from queue {1}", typeof(T).FullName, this.queueName);
                //}
                // if this item contains a delegate to process itself, run that instead
                if (item is IQueueProcessable)
                {
                    ((IQueueProcessable)item).ProcessItem(item);
                }
                else
                {
                    this.ProcessQueue(this, item);

                }

                // increment total items and times processed
                Interlocked.Increment(ref totalItemsProcessed);
                Interlocked.Increment(ref totalTimesProcessed);

                if (this.totalItemsProcessed == this.totalItemsQueued)
                {
                    if (CompletedQueue != null)
                    {
                        CompletedQueue();
                    }
                }

            }
            catch (Exception ex)
            {
                //log.ErrorFormat("Error processing queue {0} - Exception {1} : {2}",
                //    this.queueName, ex.Message, ex.StackTrace);
            }

            // store the total amount of time spent processing for reporting (approx)
            TimeSpan totalTime = TimeSpan.FromTicks(DateTime.Now.Ticks - processStart.Ticks);
            Interlocked.Add(ref totalSecondsProcessing, (long)Math.Round(totalTime.TotalSeconds));
        }




        /// <summary>
        /// This method builds an report formatted with HTML tags.
        /// </summary>
        /// <returns>the report</returns>
        public virtual string CreateReport()
        {

            StringBuilder report = new StringBuilder();
            report.Append("<div>\n");
            report.AppendFormat("<b>Queue Name:</b> {0}<br />\n", this.queueName);
            report.AppendFormat("<b>Queue TypeName:</b> {0}<br />\n", typeof(T).FullName);
            try
            {
                report.AppendFormat("<b>Queue'd Items:</b> {0}<br />\n", this.port.ItemCount);
                report.AppendFormat("<b>Items Being Processed:</b> {0}<br />\n", this.dispatcherQueue.Count);
                report.AppendFormat("<b>Thread Pool Type:</b> {0}<br />\n", (this.dispatcher.Name == DispatcherFactory.DefaultDispatcher.Name) ? "Shared" : "Individual");
                report.AppendFormat("<b>Thread Count:</b> {0}<br />\n", this.dispatcher.WorkerThreadCount);
                report.AppendFormat("<b>Total Items Enqueued:</b> {0}<br />\n", this.totalItemsQueued);
                report.AppendFormat("<b>Total Items Processed:</b> {0}<br />\n", this.totalItemsProcessed);

                if (upperBound > 0)
                    report.AppendFormat("<b>Total Items Lost (due to upperbound hits)</b>: {0}<br />\n", this.totalItemsLost);

                report.AppendFormat("<b>Total Times Processed:</b> {0}<br />\n", this.totalTimesProcessed);
                report.AppendFormat("<b>Total Seconds Processing (approx):<b> {0}<br />\n", this.totalSecondsProcessing);

                double totalMinutes = TimeSpan.FromTicks(DateTime.Now.Ticks - this.startTime.Ticks).TotalMinutes;
                double averageItemsQueuedAMinute = (double)this.totalItemsQueued / totalMinutes;
                double averageItemsProcessedAMinute = (double)this.totalItemsProcessed / totalMinutes;
                double averageProcessingTime = (double)this.totalTimesProcessed / this.totalTimesProcessed;

                report.AppendFormat("<b>Total Running Time:</b> {0}<br />\n", totalMinutes);
                report.AppendFormat("<b>Average Items Queued a Minute:</b> {0}<br />\n", averageItemsQueuedAMinute);
                report.AppendFormat("<b>Average Items Processed a Minute:</b> {0}<br />\n", averageItemsProcessedAMinute);
                report.AppendFormat("<b>Average Processing Time (approx, in seconds):</b> {0}<br />\n", averageProcessingTime);
            }
            catch (Exception e)
            {
                report.Append(e.Message + " " + e.StackTrace);
            }

            report.Append("<br />\n");
            report.Append("</div>\n");

            return report.ToString();
        }

        /// <summary>
        /// 	<para>Gets the number of items still to be processed in this queue.</para>
        /// </summary>
        /// <value>
        /// 	<para>An <see cref="Int32"/> value indicating the queue length.</para>
        /// </value>
        public virtual int QueueLength
        {
            get
            {
                // If the queue is being batched and a timeout has been
                // set up on it.  This number could be 1 or 2
                // greater than the correct number.  It more than likely 
                // should be close enough. [tchow 06/01/2007]
                return dispatcher.PendingTaskCount + port.ItemCount;
            }
        }

        #region IDisposable Members

        /// <summary>
        /// This will post a teardown command to the thread pool to stop processing.
        /// </summary>
        public virtual void Dispose()
        {
            this.disposing = true;
            ReportableObjectDirectory.Remove(this.queueName);

            if (this.dispatcher.Name != DispatcherFactory.DefaultDispatcher.Name)
            {
                this.dispatcherQueue.Dispose();
                this.dispatcher.Dispose();
            }
            else
            {
                this.dispatcherQueue.Dispose();

                if (DispatcherFactory.DefaultDispatcher != null &&
                    DispatcherFactory.DefaultDispatcher.DispatcherQueues.Count == 0)
                {
                    this.dispatcher = null;
                    DispatcherFactory.DestroyDefaultDispatcher();
                }
            }

            GC.SuppressFinalize(this);

            //if (log.IsDebugEnabled)
            //{
            //    log.DebugFormat("Teardown of queue {0} completed", this.queueName);
            //}
        }

        #endregion
    }
}
