﻿using System.Diagnostics;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using RobotRules;
using System;
using System.IO;
using RobotRules.Cache;

namespace RobotTest
{
    [TestClass]
    public class UnitTest1
    {
        private const string UserAgent = @"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";

        [TestMethod]
        public void TestInitialization()
        {
            var parser = new RobotsFileParser()
            {
                LocalUserAgent = UserAgent,
                Options = ParseOptions.All
            };

            parser.Parse(new Uri("http://www.robotstxt.org/robotstxt.html"));
            Assert.IsTrue(parser.IsAllowed("Googlebot", new Uri("http://www.robotstxt.org/robotstxt.html")), "Allow OK");
        }

        [TestMethod]
        public void TestValidUserAgent()
        {
            var parser = new RobotsFileParser()
                {
                    LocalUserAgent = UserAgent
                };

            parser.Parse(new FileInfo("Assets/robotAllowAll.txt"), new Uri("http://www.robotstxt.org/robotstxt.html"));

            Assert.IsTrue(parser.IsAllowed("Googlebot", new Uri("http://www.robotstxt.org/robotstxt.html")), "Allow OK");
        }
        [TestMethod]
        public void TestInvalidValidUserAgent()
        {
            var parser = new RobotsFileParser()
            {
                LocalUserAgent = UserAgent
            };

            parser.Parse(new FileInfo("Assets/robotDenyAll.txt"), new Uri("http://www.robotstxt.org/robotstxt.html"));

            Assert.IsTrue(parser.IsAllowed("Googlebot", new Uri("http://www.robotstxt.org/robotstxt.html")), "not Allow OK");
        }


        [TestMethod]
        public void TestValidUserAgentMetaTag()
        {
            var parser = new RobotsFileParser()
            {
                LocalUserAgent = UserAgent
            };

            parser.Parse(new FileInfo("Assets/robotAllowAll.txt"), new Uri("http://www.robotstxt.org/robotstxt.html"));
            
            var policy = parser.CheckRobotControlStrategy("Googlebot", File.ReadAllText("Assets/AllowAll.html"));
            Assert.IsTrue(policy.CanFollow && policy.CanIndex, "The file can be index and links follow");
        }
        [TestMethod]
        public void TestDenyIndex()
        {
            var parser = new RobotsFileParser()
            {
                LocalUserAgent = UserAgent
            };
            var policy = parser.CheckRobotControlStrategy("Googlebot", File.ReadAllText("Assets/DenyIndex.html"));

            Assert.IsFalse(policy.CanIndex, "The file cannot be index");
        }
        [TestMethod]
        public void TestDenyFollow()
        {
            var parser = new RobotsFileParser()
            {
                LocalUserAgent = UserAgent
            };

            var policy = parser.CheckRobotControlStrategy("Googlebot", File.ReadAllText("Assets/DenyFollow.html"));
            Assert.IsFalse(policy.CanFollow, "The file cannot be index");
        }
        [TestMethod]
        public void TestAddItemToCache()
        {
            var key = new Uri("http://wwww.free.fr/robots.txt");
            ICache cache = new MemoryCache(TimeSpan.FromSeconds(10));
            cache.Add(key, new[] { "Line1", "Line2" });

            Assert.IsTrue(cache.Exists(key));
        }

        [TestMethod]
        public void TestGetItemFromCache()
        {
            var key = new Uri("http://wwww.free.fr/robots.txt");
            ICache cache = new MemoryCache(TimeSpan.FromSeconds(10));
            cache.Add(key, new[] { "Line1", "Line2" });

            Assert.IsTrue(cache.GetFile(key) != null);
        }

        [TestMethod]
        public void TestUpdateItemFromCache()
        {
            var key = new Uri("http://wwww.free.fr/robots.txt");
            ICache cache = new MemoryCache(TimeSpan.FromSeconds(10));
            cache.Add(key, new[] { "Line1", "Line2" });

            if (cache.Exists(key))
            {
                cache.Update(key, new[] {"Line1", "Line2", "Line3"});
                Assert.IsTrue(cache.GetFile(key) != null && cache.GetFile(key).Length == 3);
            }
            else
            {
                Assert.Fail("Unable to find the item into the cache");
            }
        }
        [TestMethod]
        public void TestDeleteItemFromCache()
        {
            var key = new Uri("http://wwww.free.fr/robots.txt");
            ICache cache = new MemoryCache(TimeSpan.FromSeconds(10));
            cache.Add(key, new[] { "Line1", "Line2" });

            cache.Delete(key);
            Assert.IsTrue(cache.GetFile(key) == null);
        }


    }
}
