﻿using NUnit.Framework;

namespace WebCrawler.Tests
{
    public class UrlParserTests
    {
        public string Url1 = "http://test.com/1";
        public string Url2 = "http://test.com/2";
        public string Text1;
        public string Text2;

        [SetUp] public void Setup()
        {
            Text1 = "<a href='" + Url1 + "' />";
            Text2 = "<a href='" + Url1 + "' /> <a href='" + Url2 + "' />";
        }

        [Test] 
        public void ShouldAccept_StandardUrls() 
        {
            Valid("http://company.com");
            Valid("http://www.company.com");
            Valid("http://company.com/");
            Valid("http://company.com/path");
            Valid("https://company.com/path/");
            Valid("https://company.com/path/file.txt");
            Valid("https://company.com/path/file.aspx?");
            Valid("https://company.com/path/file.aspx?param=");
            Valid("https://company.com/path/file.aspx?param=1");
            Valid("https://company.com/path/file.aspx?param=1");
            Valid("https://company.com/path/file.aspx?param=1&param2=2&param2=test");

            Invalid("http://company");
        }

        [Test]
        public void ShouldMatch_StandardUrls()
        {
            Assert.That(UrlParser.MatchUrls(Text1), Is.EqualTo(new[] {Url1}));
            Assert.That(UrlParser.MatchUrls(Text2), Is.EqualTo(new[] { Url1,Url2 }));
        }

        private void Valid(string url)
        {
            var valid = UrlParser.IsValidUrl(url);
            Assert.IsTrue(valid, "should be valid url: " + url);
        }
        private void Invalid(string url)
        {
            var valid = UrlParser.IsValidUrl(url);
            Assert.IsFalse(valid, "should NOT be valid url: " + url);
        }
    }
}