﻿using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Threading.Tasks;

namespace Kaspersky.Test.RecursiveLinkLogger
{
    /// <summary>
    /// Recursive link searcher
    /// </summary>
    public class LinkLogger<TLinkExtractor> where TLinkExtractor : ILinkExtractor, new()
    {
        /// <summary>
        /// The resulting set of links
        /// </summary>
        private ConcurrentDictionary<string, string> _links;

        /// <summary>
        /// The resulting set of exceptions
        /// </summary>
        private ConcurrentBag<Exception> _errors;

        /// <summary>
        /// A base URL part to be applied to all found links
        /// </summary>
        private readonly string _baseUrl;

        /// <summary>
        /// An object, that should be used for extracting links from a string
        /// </summary>
        private readonly ILinkExtractor _linkExtractor = new TLinkExtractor();

        /// <summary>
        /// Fires a 'link found' event
        /// </summary>
        /// <param name="link"></param>
        private void FireLinkFoundEvent(string link)
        {
            var handler = this.LinkFoundEvent;
            if (handler != null)
            {
                handler(link);
            }
        }

        /// <summary>
        /// Recursively searches for URLs.
        /// Referenced pages are downloaded and parsed via async delegates.
        /// </summary>
        /// <param name="url">URL to start with</param>         
        private void RecursiveLogLinks(string url)
        {
            // a list of child tasks
            var taskList = new List<Task>();

            // downloading the page
            using (var client = new WebClient())
            {
                // Add a user agent header in case the requested URI contains a query
                client.Headers.Add("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; .NET CLR 1.0.3705;)");

                string html = client.DownloadString(this._baseUrl + url);
                foreach (var curUrl in this._linkExtractor.ExtractLinksFromString(html))
                {
                    // preventing cross-references
                    if (!this._links.TryAdd(curUrl.ToLower(), curUrl))
                    {
                        continue;
                    }

                    this.FireLinkFoundEvent(curUrl);

                    // recursively getting links 
                    taskList.Add(Task.Factory.StartNew(l => this.RecursiveLogLinks((string)l), curUrl));
                }
            }

            // waiting for all tasks to finish
            try
            {
                Task.WaitAll(taskList.ToArray());
            }
            catch (AggregateException ex)
            {
                foreach (var inEx in ex.InnerExceptions)
                {
                    this._errors.Add(inEx);
                }
            }
            catch (Exception ex)
            {
                this._errors.Add(ex);
            }
        }

        /// <summary>
        /// An empty ctor
        /// </summary>
        public LinkLogger()
        {
        }

        /// <summary>
        /// A ctor, that allows to specify a base URL part for all URLs to be opened
        /// </summary>
        /// <param name="baseUrl">A base URL part</param>
        public LinkLogger(string baseUrl)
        {
            if (baseUrl == null)
            {
                throw new ArgumentNullException("baseUrl");
            }
            this._baseUrl = baseUrl;
        }

        /// <summary>
        /// Recursively searches for URLs on a page.
        /// Stores results in Links.
        /// Stores exceptions (if any) in Errors
        /// </summary>
        /// <param name="url">URL to start with</param>
        public void LogLinks(string url)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            this._links = new ConcurrentDictionary<string, string>();
            this._errors = new ConcurrentBag<Exception>();

            try
            {
                this.RecursiveLogLinks(url);
            }
            catch (Exception ex)
            {
                this._errors.Add(ex);
            }
        }

        /// <summary>
        /// URLs found by LogLinks()
        /// </summary>
        public IEnumerable<string> Links { get { return this._links == null ? null : this._links.Values; } }

        /// <summary>
        /// Exceptions occured during LogLinks()
        /// </summary>
        public IEnumerable<Exception> Errors { get { return this._errors; } }

        /// <summary>
        /// Fired when a link is found by LogLinks()
        /// </summary>
        public event Action<string> LinkFoundEvent;
    }
}
