﻿using System;
using System.Linq;
using System.Threading.Tasks;
using Framework.Extensions;
using Microsoft.Practices.Unity;
using NUnit.Framework;
using Rhino.Mocks;

namespace WebCrawler.Tests
{
    public class WebCrawlerTestData : CrawlerTestData
    {
        private IPageProcessor _pageProcessor;

        public IPageProcessor PageProcessor { get { return _pageProcessor; } }

        public WebCrawlerTestData(string startUrl)
            : base(startUrl)
        {
            _httpClient = Stub<IHttpClient>();
            _urlMatcher = Stub<IUrlMatcher>();
            _urlRepository = Stub<IUrlRepository>();
            _pageProcessor = Stub<IPageProcessor>();
        }
    }

    public class WebCrawlerTests : CrawlerTestFixture<WebCrawlerTestData>
    {
        [SetUp]
        public new void Setup()
        {
            Data = new WebCrawlerTestData("http://company.com");
        }

        [Test]
        public void ShouldReturn_EachUniqueUrl_ThatIsFound()
        {
            var crawler = new HttpWebCrawler(Data.PageProcessor);
            With.Mocks(Data.Mocks).Expecting(() =>
            {
                Expect.Call(Data.PageProcessor.GetPageDescriptor(Data.StartUrl))
                    .Return(new PageDescriptior()
                                {
                                    Info = Data.ResultHtml, Urls = new[] {Data.Url1, Data.Url2}
                                });
                Expect.Call(Data.PageProcessor.GetPageDescriptor(Data.Url1))
                    .Return(new PageDescriptior() { Info = PageInfo.Skip(Data.Url1), Urls = new string[] {} });
                Expect.Call(Data.PageProcessor.GetPageDescriptor(Data.Url2))
                    .Return(new PageDescriptior() { Info = PageInfo.Skip(Data.Url2), Urls = new string[] { } });

            }).Verify(() =>
            {
                var res = crawler.StartFrom(Data.StartUrl).Select(r => r.Url).ToArray();
                Assert.That(res, Is.EqualTo(new[] { Data.StartUrl, Data.Url1, Data.Url2 }));
            });
        }

        [Test]
        public void ShouldRequestStartUrl()
        {
            var httpClient = Data.Mock<IHttpClient>();
            var urlMatcher = new UrlMatcher();
            var urlRepository = new UrlRepository();
            var pageProcessor = new PageProcessor(httpClient, urlMatcher, urlRepository);
            var crawler = new HttpWebCrawler(pageProcessor);
            
            With.Mocks(Data.Mocks).Expecting(() =>
            {
                Expect.Call(httpClient.LoadPageInfoAsync(Data.StartUrl))
                    .Return(Task.Factory.StartNew(() => Data.ResultHtml)).Repeat.Once();
            }).Verify(() =>
            {
                var res = crawler.StartFrom(Data.StartUrl);
                foreach (var page in res)
                {
                    
                }
            });
        }
    }
}