using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;

using Jeffrey.XFramework;
using Jeffrey.XFramework.XCrawl;

namespace Jeffrey.XProcess
{
    public class HttpFilter : Filter
    {
        public HttpFilter(String name, String classname, String path)
            : base(name, classname, path) { }
        public HttpFilter(String name, String description, String classname, String path)
            : base(name, description, classname, path) { }

        protected override bool innerAccepts(CrawlUri uri)
        {
            //Regex uriPattern = new Regex(HTTP_URI_PATTERN, RegexOptions.IgnoreCase);
            //Match match = uriPattern.Match(uri.AbsoluteUri);

            String scheme = uri.Scheme;
            if (scheme != "http")
                return false;

            /*
             * for this case, we assert that is a directory...
             * for the uris got from CrawlUriManagement has been well formatted!
             */
            if (uri.Host != String.Empty &&
                uri.AbsoluteUri.EndsWith("/")
               )
                return true;

            //here means the uris is a file-page...
            Regex extends = new Regex(Extractor.HtmlFilePattern, RegexOptions.IgnoreCase);

            //Notice: use absolutePath...
            return extends.Match(uri.AbsolutePath).Success;
        }
    }
}
