﻿using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using System.Linq;
using System.Threading;
using System.Text;
using System.Threading.Tasks;

namespace Assignment
{
    /// <summary>
    /// Traverses the file system and loads the content of the files.
    /// </summary>
    public class FileReader
    {
        /// <summary>
        /// Producer consumer queue.
        /// Producer : file reader thread.
        /// Consumer : duplicate writer thread.
        /// Elements : files having duplicate content.
        /// </summary>
        private ConcurrentQueue<IList<string>> duplicateFiles;

        /// <summary>
        /// Direcotry to search the files for.
        /// </summary>
        private DirectoryInfo rootDirectory;

        private IList<KeyValuePair<string, string>> nameContentMap = new List<KeyValuePair<string, string>>();

        private IList<Thread> readerThreads = new List<Thread>();

        /// <summary>
        /// Number of reader threds to use.
        /// </summary>
        private int threadCount;

        private ConcurrentQueue<FileInfo> files = new ConcurrentQueue<FileInfo>();

        private ManualResetEvent pauseResumeEvent = new ManualResetEvent(true);

        private ManualResetEvent abortEvent = new ManualResetEvent(false);

        /// <summary>
        /// Initilizes a new instance of the <see cref="FileReader"/> class.
        /// </summary>
        /// <param name="duplicateFiles">Blocking queue to place the discovered matches</param>
        /// <param name="rootDirectoryPath">Path to the root directory to search for files.</param>
        /// <param name="threadCount">Number of searcher threads to use.</param>
        public FileReader(ConcurrentQueue<IList<string>> duplicateFiles, string rootDirectoryPath, int threadCount)
        {
            this.duplicateFiles = duplicateFiles;
            rootDirectory = new DirectoryInfo(rootDirectoryPath);
            this.threadCount = threadCount;
        }

        /// <summary>
        /// Starts looking for duplicate files.
        /// </summary>
        public void Read()
        {
            FileInfo[] fileInfos = rootDirectory.GetFiles("*.*", SearchOption.AllDirectories);

            foreach (FileInfo fileInfo in fileInfos)
            {
                files.Enqueue(fileInfo);
            }

            for (int i = 0; i < threadCount; i++)
            {
                Thread t = new Thread(DoRead);
                readerThreads.Add(t);
                t.Start();
            }
        }

        /// <summary>
        /// Pause the reader threads.
        /// </summary>
        public void Stop()
        {
            // Setting the state to non-signaled to block the reader threads.
            pauseResumeEvent.Reset();
        }

        /// <summary>
        /// Resume reader threads.
        /// </summary>
        public void Resume()
        {
            // Resuming reader threads.
            pauseResumeEvent.Set();
        }

        /// <summary>
        /// Aborts searcher threads.
        /// </summary>
        public void Abort()
        {
            abortEvent.Set();

            foreach (Thread readerThread in readerThreads)
            {
                readerThread.Join();
            }
        }

        /// <summary>
        /// Restart the readers
        /// </summary>
        public void Restart()
        {
            readerThreads.Clear();
            Read();
        }

        /// <summary>
        /// Read the content of the files in parallel.
        /// </summary>
        private void DoRead()
        {
            while (true)
            {
                pauseResumeEvent.WaitOne();

                FileInfo fileInfo;
                files.TryDequeue(out fileInfo);

                if (fileInfo != null)
                {
                    using (StreamReader reader = new StreamReader(fileInfo.FullName))
                    {
                        string fileName = fileInfo.FullName;
                        string content = reader.ReadToEnd();
                        KeyValuePair<string, string> pair = new KeyValuePair<string, string>(fileName, content);

                        lock (nameContentMap)
                        {
                            for (int i = 0; i < nameContentMap.Count; i++)
                            {
                                string existingFilePath = nameContentMap[i].Key;
                                string existingFileContent = nameContentMap[i].Value;

                                // Check if matching content exist.
                                if (pair.Value.Equals(existingFileContent))
                                {
                                    IList<string> dup = new List<string>();

                                    // Add matching pair
                                    dup.Add(existingFilePath);
                                    dup.Add(pair.Key);

                                    duplicateFiles.Enqueue(dup);
                                }
                            }

                            nameContentMap.Add(pair);
                        }
                    }
                }

                if (abortEvent.WaitOne(0))
                {
                    break;
                }
            }
        }

        /// <summary>
        /// Discover duplicate files. As they are found, the FileWriter consumer can dequeue them.
        /// </summary>
        private void Compute()
        {
            for (int outer = 0; outer < nameContentMap.Count; outer++)
            {
                for (int inner = outer + 1; inner < nameContentMap.Count; inner++)
                {
                    if (nameContentMap[outer].Value.Equals(nameContentMap[inner].Value))
                    {
                        IList<string> dup = new List<string>();

                        // Add matching pair
                        dup.Add(nameContentMap[outer].Key);
                        dup.Add(nameContentMap[inner].Key);

                        duplicateFiles.Enqueue(dup);
                    }
                }
            }
        }
    }
}
