﻿using CrawlerFramework.ConfigEntity;
using CrawlerFramework.Models;
using CrawlerFramework.PageProcessor;
using CrawlerFramework.Repository;
using MediaBot.Library;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Threading;
using System.Threading.Tasks;

namespace CrawlerFramework.CrawlerEngine
{
    public class BaseCrawlerEngine : ICrawlerEngine
    {
        #region Properties

        /// <summary>
        /// Whether the crawler operate in parallel. Set this property in the app.config file
        /// </summary>
        public bool IsParallel
        {
            get
            {
                return ConfigurationSettings.Settings.IsRunParallelOnSite;
            }
        }

        /// <summary>
        /// Logging information
        /// </summary>
        public ILogService Logger
        {
            get;
            private set;
        }

        public PageProcessor.IPageProcessorBuilder PageProcessorBuilder
        {
            get;
            private set;
        }

        #endregion Properties

        #region Constructors

        public BaseCrawlerEngine(IPageProcessorBuilder processorBuilder, ILogService logService)
        {
            this.PageProcessorBuilder = processorBuilder;
            this.Logger = logService;
        }

        #endregion Constructors

        #region Public methods

        /// <summary>
        /// Run all scheduled job which has the CrawlStatus = Initial
        /// </summary>
        public virtual void ExecuteNewScheduledJobs()
        {
            var jobs = GetJobs(j => j.CrawlStatus == CrawlStatus.Initial);
            var queues = QueueingJobs(jobs);
            if (IsParallel)
            {
                Parallel.ForEach(queues, queue =>
                {
                    var job = queue.Dequeue();
                    while (job != null)
                    {
                        ExecuteScheduledJob(job.JobGuid);
                    }
                });
            }
            else
            {
                foreach (var queue in queues)
                {
                    var job = queue.Dequeue();
                    while (job != null)
                    {
                        ExecuteScheduledJob(job.JobGuid);
                    }
                }
            }
        }

        /// <summary>
        /// Run a particular scheduled job
        /// </summary>
        /// <param name="jobGuid">GUID of scheduled job to run</param>
        public virtual void ExecuteScheduledJob(Guid jobGuid)
        {
            var jobRepo = new ScheduledJobRepository();

            try
            {
                var job = jobRepo.Single(j => j.JobGuid == jobGuid);
                if (job == null)
                {
                    Logger.Info(string.Format("Job GUID {0} is not exists", jobGuid));
                    return;
                }

                var precedenceJob = jobRepo.Single(j => j.JobGuid == job.RunAfterJobGuid);
                if (precedenceJob != null && precedenceJob.LastEnd == null)
                    ExecuteScheduledJob(precedenceJob.JobGuid);

                var linkConfig = job.GetLinkConfig();
                if (linkConfig == null)
                    throw new InvalidOperationException("Cannot deserialized the link config instance", new ArgumentException("Specified GUID: " + job.LinkConfigGuid));

                var links = linkConfig.GetLinks();

                job.Start();
                jobRepo.Edit(job);
                jobRepo.Save();

                if (IsParallel)
                {
                    ProcessLinksParallel(linkConfig, links);
                }
                else
                {
                    
                    foreach (var link in links)
                    {
                        var pageProcessor = PageProcessorBuilder.GetPageProcessor(this, linkConfig, link, this.Logger, null);
                        pageProcessor.Execute();
                    }
                }

                job = jobRepo.Single(j => j.ScheduledJobId == job.ScheduledJobId);
                if (job != null)
                {
                    job.Finish(CrawlStatus.Completed);
                    jobRepo.Edit(job);
                    jobRepo.Save();
                }
                
            }
            catch (Exception e)
            {
                Logger.Error(string.Format("ExecuteScheduledJob({0})", jobGuid), e);
                throw;
            }
            finally
            {
                jobRepo.Dispose();
            }
        }

        /// <summary>
        /// Execute all jobs that need to run at the time this method is called
        /// </summary>
        public virtual void ExecuteScheduledJobs(IEnumerable<Guid> jobGuids)
        {
            foreach (var guid in jobGuids)
            {
                ExecuteScheduledJob(guid);
            }
        }

        /// <summary>
        /// Get all jobs has been scheduled
        /// </summary>
        /// <returns></returns>
        public virtual IEnumerable<ScheduledJob> GetAllScheduledJobs()
        {
            var jobRepo = new ScheduledJobRepository();
            try
            {
                return jobRepo.All;
            }
            finally
            {
                jobRepo.Dispose();
            }
        }

        /// <summary>
        /// Get all running jobs
        /// </summary>
        /// <returns></returns>
        public virtual IEnumerable<ScheduledJob> GetRunningJobs()
        {
            return GetJobs(j => j.CrawlStatus == CrawlStatus.Processing);
        }

        /// <summary>
        /// Pause all running jobs
        /// </summary>
        public void PauseAllProcesses()
        {
            var jobRepo = new ScheduledJobRepository();
            try
            {
                var runningJobs = jobRepo.FindBy(j => j.CrawlStatus == CrawlStatus.Processing);
                foreach (var job in runningJobs)
                {
                    job.Pause();
                    jobRepo.Edit(job);
                }

                jobRepo.Save();
            }
            finally
            {
                jobRepo.Dispose();
            }
        }

        /// <summary>
        /// Pause a particular job if it's running
        /// </summary>
        /// <param name="jobGuid"></param>
        public void PauseScheduledJob(Guid jobGuid)
        {
            var jobRepo = new ScheduledJobRepository();
            try
            {
                var job = jobRepo.Single(j => j.JobGuid == jobGuid && j.CrawlStatus == CrawlStatus.Processing);
                if (job != null)
                {
                    job.Pause();
                    jobRepo.Edit(job);
                }
            }
            finally
            {
                jobRepo.Dispose();
            }
        }

        /// <summary>
        /// Update information of a job
        /// </summary>
        /// <param name="job"></param>
        public void UpdateScheduledJob(ScheduledJob job)
        {
            var jobRepo = new ScheduledJobRepository();
            try
            {
                jobRepo.Edit(job);
                jobRepo.Save();
            }
            finally
            {
                jobRepo.Dispose();
            }
        }
        #endregion Public methods

        protected virtual ScheduledJob FindAndRemove(Guid targetGuid, IList<ScheduledJob> collection)
        {
            var target = collection.FirstOrDefault(j => j.RunAfterJobGuid == targetGuid);
            if (target == null)
                return null;

            collection.Remove(target);
            return target;
        }

        protected virtual IEnumerable<ScheduledJob> GetJobs(Expression<Func<ScheduledJob, bool>> condition)
        {
            var jobRepo = new ScheduledJobRepository();
            try
            {
                return jobRepo.FindBy(condition);
            }
            finally
            {
                jobRepo.Dispose();
            }
        }

        protected virtual void ProcessLinksParallel(LinkConfig linkConfig, IEnumerable<Link> links)
        {
            var maxConcurrent = ConfigurationSettings.Settings.NumberOfThreadsInPool;
            var option = new ParallelOptions{MaxDegreeOfParallelism = maxConcurrent};
            Parallel.ForEach(links, option, (link) =>
            {
                var processor = PageProcessorBuilder.GetPageProcessor(this, linkConfig, link, this.Logger, null);
                processor.Execute();
            });

            Logger.Info("ProcessLinksParallel finished");
        }

        protected virtual IEnumerable<Queue<ScheduledJob>> QueueingJobs(IEnumerable<ScheduledJob> jobs)
        {
            List<Queue<ScheduledJob>> queues = new List<Queue<ScheduledJob>>();

            var priorJobs = jobs.Where(j => j.RunAfterJobGuid == null);
            var remainJobs = jobs.Except(priorJobs).ToList();
            foreach (var job in priorJobs)
            {
                var queue = new Queue<ScheduledJob>();
                queue.Enqueue(job);
                queues.Add(queue);

                var child = FindAndRemove(job.JobGuid, remainJobs);
                while (child != null)
                {
                    queue.Enqueue(child);
                    child = FindAndRemove(child.JobGuid, remainJobs);
                }
            }

            return queues;
        }
    }
}