﻿using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Framework.Extensions;
using Microsoft.Practices.Unity;

namespace WebCrawler
{
    public class HttpWebCrawler : IWebCrawler
    {
        private readonly IPageProcessor _pageProcessor;
        private int _recursionDepth;

        public HttpWebCrawler(IPageProcessor pageProcessor)
        {
            _pageProcessor = pageProcessor;
            _recursionDepth = 1;
        }

        public int RecursionDepth
        {
            get { return _recursionDepth; }
            set { _recursionDepth = value; }
        }

        public IEnumerable<PageInfo> StartFrom(string startUrl)
        {
            return this.TraverseDepthFirst(startUrl, 0);
        }

        private IEnumerable<PageInfo> TraverseDepthFirst(string startUrl, int level)
        {
            var descriptior = _pageProcessor.GetPageDescriptor(startUrl);
            yield return descriptior.Info;

            if (level < _recursionDepth)
            {
                var results = new BlockingCollection<PageInfo>();
                results.ExecuteAndFinallyComplete(() =>
                    Parallel.ForEach(descriptior.Urls, 
                        new ParallelOptions() { MaxDegreeOfParallelism = 20 }, 
                        url =>
                    {
                        var res = TraverseDepthFirst(url, level + 1);
                        res.Foreach(results.Add);
                    }));
                
                foreach (var res in results.GetConsumingEnumerable()) 
                    yield return res;
            }
        }

    }
}