﻿using System;
using System.Collections.Specialized;
using System.Runtime.Caching;
using System.Threading;
using System.Threading.Tasks;

namespace MemoryCacheLeakTest
{
    /// <summary>
    /// Test program to examine MemoryCache eviction performance under different garbage collection scenarios.
    /// </summary>
    /// <remarks>
    /// <see cref="ParallelCacheInsert"/> is the original test case, simulated parallel writes to the cache, with
    /// some working time before each write.  <c>ParallelCacheInsert</c> contains the comments for the test logic flow.
    /// 
    /// <see cref="SingleThreadCacheInsert"/> is a single threaded comparision test.  Comments from <c>ParallelCacheInsert</c>
    /// are applicable to variable in the <c>SingleThreadCacheInsert</c> test case.
    /// 
    /// To change the garbage collection options (and effect different <see cref="MemoryCache"/> eviction behaviors:
    /// <list type="bullet">
    /// <item>Change (in the app.config), gcConcurrent enabled -- true (the default) will display leaky behavoir, false evictions will occur as expected.</item>
    /// <item>Swap gcConcurrent for gcServer, with enabled = true.  Will behave close to when gcConcurrent is enabled</item>
    /// <item>Set forceGc = true in the test cases.  This will force evictions, for any gc setting.</item>
    /// </list>
    /// 
    /// <seealso cref="http://stackoverflow.com/questions/5655439/net-4-memorycache-leaks-with-concurrent-garbage-collection"/>
    /// </remarks>
    internal class Program
    {
        static volatile int _evictCount;

        static void ForceGarbageCollection()
        {
            Thread.Sleep(200);
            GC.Collect();
            GC.WaitForPendingFinalizers();
        }

        static void Main(string[] args)
        {
            ParallelCacheInsert();
            //SingleThreadCacheInsert();
        }

        static void ParallelCacheInsert()
        {
            // tracks the number of evictions
            _evictCount = 0;

            // the size of the object to put in the cache
            const int objSize = 1024 * 66;

            // the "burst" of cache insert operations (used to not overload the cache)
            const int cacheOperationCount = 25;

            // total number of desired cache inserts
            const int totalOpCount = 2500;

            // degree of parallelism
            const int workerCount = 4;

            // force garbage collection (will ensure proper evictions if true)
            const bool forceGc = false;

            var cache = new MemoryCache(
                "test-cache",
                new NameValueCollection
                {
                    { "pollingInterval", "00:00:01" },
                    { "cacheMemoryLimitMegabytes", 10.ToString() }
                });

            using (cache)
            {
                for (var i = 0; i < totalOpCount / cacheOperationCount; i++)
                {
                    // perform the cache insert burst in parallel.  Sleep to simulate work (reading file system, downloading file...)
                    Parallel.For(
                        0, cacheOperationCount, new ParallelOptions { MaxDegreeOfParallelism = workerCount },
                        p =>
                        {
                            Thread.Sleep(100);
                            var buffer = new byte[objSize];
                            cache.Add(
                                new CacheItem(Guid.NewGuid().ToString(), buffer),
                                new CacheItemPolicy { RemovedCallback = arguments => _evictCount++ }
                                );
                        });

                    // report, depending on loop count
                    if (i % 10 == 0)
                    {
                        // if forceGc is true, the cache will behave as expected and evict when the size limit is reached
                        if (forceGc) ForceGarbageCollection();

                        // compute and report stats
                        Report(i, cacheOperationCount, objSize);

                        // sleep to simulate more work, allow cache polling to catch up.
                        Thread.Sleep(250);
                    }
                }
            }

            Console.WriteLine("Finished Parallel Cache Insert -- press any key to exit");
            Console.Read();
        }

        static void Report(int i, int cacheOperationCount, int objSize)
        {
            var currentOpCount = i * cacheOperationCount;
            var totalOpMemSize = currentOpCount * objSize;
            var evictedMemSize = _evictCount * objSize;
            Console.WriteLine(
                string.Format(
                    "{0}:evicted-> {1}/{2}: mem (MB)-> {3}: gc mem (MB)-> {4}",
                    i.ToString().PadLeft(3),
                    _evictCount.ToString().PadLeft(5),
                    currentOpCount.ToString().PadRight(5),
                    ((totalOpMemSize - evictedMemSize) / 1024f / 1024f).ToString("f1").PadRight(8),
                    (GC.GetTotalMemory(false) / 1024f / 1024f).ToString("f1").PadLeft(8)));
        }

        static void SingleThreadCacheInsert()
        {
            _evictCount = 0;
            const int objSize = 1024 * 66;
            const int cacheOperationCount = 25;
            const int totalOpCount = 2500;
            const bool forceGc = false;

            var cache = new MemoryCache(
                "test-cache",
                new NameValueCollection
                {
                    { "pollingInterval", "00:00:01" },
                    { "cacheMemoryLimitMegabytes", 10.ToString() }
                });

            using (cache)
            {
                for (var i = 0; i < totalOpCount / cacheOperationCount; i++)
                {
                    for (var j = 0; j < cacheOperationCount; j++)
                    {
                        var buffer = new byte[objSize];
                        cache.Add(
                            new CacheItem(Guid.NewGuid().ToString(), buffer),
                            new CacheItemPolicy { RemovedCallback = arguments => _evictCount++ });

                        Thread.Sleep(10);
                    }

                    if (i % 10 == 0)
                    {
                        if (forceGc) ForceGarbageCollection();

                        Report(i, cacheOperationCount, objSize);
                        Thread.Sleep(250);
                    }
                }
            }
            Console.WriteLine("Finished Single Thread Cache Insert");
            Console.Read();
        }
    }
}