﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NetSpider.Policies;
using Xunit;

namespace NetSpider.Tests
{
    public class RobotsTxtParserFixture
    {
        [Fact]
        public void Should_parse_a_simple_robots_txt_string()
        {
            RobotsTxtParser parser = new RobotsTxtParser();
            RobotPolicy policy = parser.ParsePolicy("www.example.com", new DateTime(2008, 8, 30, 14, 3, 09),  CreateExample1());

            const string userAgent = "NetSpider/0.1";

            Assert.Equal("www.example.com", policy.DomainName);

            Assert.Equal(PolicyDecision.VisitProhibited, policy.MakeDecision(userAgent, "http://www.example.com/foo.html"));
        }


        private string CreateExample1()
        {
            StringBuilder sb = new StringBuilder();
            sb.AppendLine("# robots.txt for http://www.example.com/");
            sb.AppendLine();
            sb.AppendLine("User-agent: *");
            sb.AppendLine("Disallow: /cyberworld/map/ # This is an infinite virtual URL space");
            sb.AppendLine("Disallow: /tmp/ # these will soon disappear");
            sb.AppendLine("Disallow: /foo.html");

            return sb.ToString();
        }

        






    }
}
