﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Needle.Utils.Threading;
using System.Threading;

namespace Needle.Utils.Collections.Threadsafe
{

    /// <summary>
    /// A lock-free threadsafe queue.
    /// </summary>
    /// <remarks>
    /// This queue seems as fast or even slower than the  
    /// LockedQueue , but may have better performance 
    /// characteristics on multi-core machines when the queue is under 
    /// heavy load of many writing and reading threads.
    /// 
    /// Then again it may not..., testing performance gave too
    /// widely varying results to make any meaningfull observations
    /// about relative performance.
    /// 
    /// If a thread is switched out while it holds a lock in the locked
    /// implementation all other threads will block when using that queue,
    /// causing a massive pile-up. 
    /// This can not happen in the lock-free implementation, but because of
    /// the generous use of Interlocked the hit on CPU cache performance 
    /// (because of the memory barriers) may well be worse than the occasional 
    /// pile-up in the locked version...
    /// 
    /// The implementation is also is way more complex and subtle 
    /// then the LockedQueue one, I still can't tell if this queue is bugfree.
    /// the LockedQueue one however is 100% bugfree, and written in 5 minutes,
    /// while this queue took me 2 full days to implement.
    /// 
    /// Also, trying to convert the lockfree queue to a double-ended queue
    /// will be very difficult, while the locked version will be trivial to
    /// convert...
    /// 
    /// so LockedQueue: 2, LockFreeQueue: 1, methinks...
    /// </remarks>
    public class LockFreeQueue<T> : IQueue<T>
    {

        #region Node

        private class Node
        {
            public Node(T item) { this.Item = item; }

            public T Item;
            /// <summary>
            /// Next should only be written using TrySetNext.
            /// Once written with a non-null value, Next is considered read-only.
            /// </summary>
            public Node Next;
            /// <summary>
            /// Sets the next node, returns true if the write succeeded.
            /// </summary>
            public bool TrySetNext(Node newNext)
            {
                // This conditional prevents the potentially costly 
                // Interlocked.CompareExchange and it works because if Next is
                // non-null, it is readonly. Also the compiler or JIT will not move
                // the CAS before the if statement, because its a memory barrier,
                // so we can't accidentally return false because of our own write
                // occurring before the null check.
                if (Next != null)
                    return false;

                Node oldNext;
                //--------- memory barrier -------------
                oldNext = Interlocked.CompareExchange(ref Next, newNext, null);
                //--------- memory barrier -------------
                return Object.ReferenceEquals(null, oldNext);
            }
        }
            
        #endregion
        /// <summary>
        /// Hold the old head, _head.Next holds the real head Node.
        /// </summary>
        private Node _head;
        /// <summary>
        /// Holds the tail, or a node close to the tail.
        /// </summary>
        private Node _tail;
        /// <summary></summary>
        public LockFreeQueue()
        {
            _head = new Node(default(T));
            _tail = _head;
        }

        private int _count;
        /// <summary></summary>
        public int Count
        {
            get
            {
                // no need to lock because all writes are done via
                // Interlocked.Increment or Interlocked.Decrement.
                return _count;
            }
        }
        /// <summary></summary>
        public bool TryEnqueue(T item)
        {
            Enqueue(item);
            return true;
        }
        /// <summary></summary>
        public void Enqueue(T item)
        {
            // create a new node to enqueue.
            Node newNode = new Node(item);

            // no need to lock when reading _tail, because all writes to _tail
            // are done with Interlocked.CompareExchange.
            Node oldTail = _tail;

            // Loop until appending the new node has succeeded.
            // This is a race to the end of the queue, if multiple threads are
            // enqueueing at the same time.
            while (!oldTail.TrySetNext(newNode))
            {
                // We failed to set oldTail.Next, so it is not null and 
                // from now on considered read-only.
                var newTail = oldTail.Next;
                    
                // Avoid CompareExchange() if we already can tell 
                // someone has updated _tail.
                if (Object.ReferenceEquals(_tail, oldTail))
                {
                    //--------- memory barrier -------------
                    // set _tail to oldTail.Next, if that hasn't been done yet.
                    Interlocked.CompareExchange(ref _tail, newTail, oldTail);
                    //--------- memory barrier -------------
                }
                // get the new tail. This could be oldTail.Next, 
                // or some node even further down the list.
                oldTail = _tail;
            }

            // We succeeded in setting oldTail.Next, 
            // time to update _tail to newNode.
            // If it fails, some other thread is already busy appending a new 
            // tail but don't worry, _tail will get fixed in the loop above, 
            // eventually.

            // Avoid CompareExchange() if we already can tell 
            // someone has updated _tail.
            if (Object.ReferenceEquals(_tail, oldTail))
            {
                //--------- memory barrier -------------
                Interlocked.CompareExchange(ref _tail, newNode, oldTail);
                //--------- memory barrier -------------
            }
            //--------- memory barrier -------------
            Interlocked.Increment(ref _count);
            //--------- memory barrier -------------

        }
        /// <summary></summary>
        public bool TryDequeue(out T item)
        {
            // clear the item location.
            item = default(T);

            Node oldHead = null;
            Node newHead = null;

            while (true)
            {
                oldHead = _head;
                newHead = oldHead.Next;
                if (newHead != null)
                { // there is an item to dequeue.
                    if (ThreadUtils.CAS(ref _head, newHead, oldHead))
                    {   
                        //dequeueing the newHead succeeded, set the item.
                        item = newHead.Item;
                        // clear the Item field, so the item reference
                        // is not held any longer than neccesary
                        newHead.Item = default(T);
                        Interlocked.Decrement(ref _count);
                        return true;
                    }
                    // A competing thread won the head swap, try again.
                    continue;
                }
                // queue empty...
                return false;
            }
        }
        /// <summary></summary>
        public T Dequeue()
        {
            T result;
            TryDequeue(out result);
            return result;
        }

    }
}
