﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Framework.Extensions;
using Framework.Unity.Extensions;
using Microsoft.Practices.Unity;
using NUnit.Framework;
using Rhino.Mocks;

namespace WebCrawler.Tests.Integration
{
    public class WebCrawlerIntegrationTests : IntegrationTestFixture
    {
        private IWebCrawler _crawler;
        private IHttpClient _httpClient;
        private string _startUrl;
        private string _url1;
        private string _url2;
        private string _url11;
        private string _url21;
        private string _startPage;
        private string _page1;
        private string _page2;
        private Dictionary<string, PageInfo> _results;

        [SetUp]
        public new void Setup()
        {
            Data = new IntegrationTestData();
            _startUrl = "http://company.com/start";
            _url1 = "http://company.com/page1";
            _url2 = "http://company.com/page2";
            _url11 = "http://company.com/page11";
            _url21 = "http://company.com/page21";
            _startPage = "<a href='" + _url1 + "'> <a href='" + _url2 + "'>";
            _page1 = "<a href='" + _url11 + "'>";
            _page2 = "<a href='" + _url21 + "'>";
            _results = new Dictionary<string, PageInfo>();
            _results.Add(_startUrl, PageInfo.Success(_startUrl, _startPage));
            _results.Add(_url1, PageInfo.Success(_url1, _page1));
            _results.Add(_url2, PageInfo.Success(_url2, _page2));
            _results.Add(_url11, PageInfo.Error(_url11, new TimeoutException("test timeout")));
            _results.Add(_url21, PageInfo.Skip(_url21));
        }

        [Test]
        public void ShouldResolve_From_LoadedConfig()
        {
            _crawler = RootContainer.Resolve<IWebCrawler>();
            Assert.NotNull(_crawler);
        }

        [Test]
        public void Should_Return_Urls_From_StartPage()
        {

            _crawler = RootContainer.InstanceOf<IWebCrawler>()
                .WithProperty("RecursionDepth", 2)
                .Resolve();

            foreach (var page in _crawler.StartFrom("http://microsoft.com"))
            {

                if (page.IsSuccess())
                    Console.WriteLine(page.Url + ": " + page.ResultType.ToString());
            }
            //Assert.IsTrue(_crawler.UrlRepository.Count > 0);
        }

        [Test]
        public void Should_CreateFiles()
        {
            _httpClient = Data.Stub<IHttpClient>();
            RootContainer.RegisterInstance(_httpClient);
            _crawler = RootContainer.InstanceOf<IWebCrawler>()
                .WithProperty("RecursionDepth", 2)
                .Resolve();
            With.Mocks(Data.Mocks).Expecting(() =>
            {
                // stub loading behavior
                _results.Foreach(kv =>
                {
                    var url = kv.Key;
                    var val = (_results.ContainsKey(url))
                         ? _results[url]
                         : PageInfo.Error(url, new TimeoutException("resource not found: " + url));
                    Expect.Call(_httpClient.LoadPageInfoAsync(url))
                        .Return(Task.Factory.StartNew(() => val));
                });
            }).Verify(() =>
            {
                var tempDir = "logdir_" + Guid.NewGuid();
                var logger = RootContainer.InstanceOf<IUrlLogger>()
                    .WithProperty("FileSize", 30)
                    .WithProperty("LogDirectory", tempDir)
                    .Resolve();
                logger.Start();
                foreach (var page in _crawler.StartFrom(_startUrl))
                {
                    logger.WriteLine(page.Url);
                }
                logger.Stop();

                string directory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, tempDir);
                ExpectFilesInDirectory(directory, 5);
                Directory.Delete(directory, true);
            });
        }
    }
}