﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NewsMine.DomainObjects;
using System.Text.RegularExpressions;

namespace NewsDiscoveryEngine.Cleansing
{
    /// <summary>
    /// This class will make sure all the urls are properly cleaned, before even processing any task;
    /// Suppose some of the websites will simply extra query strings to get refereces.
    /// So, we can mantain the configuration, globally and also website level.
    /// 
    /// </summary>
    public class UrlCleanser : ICleanser
    {
        //todo: this needs to be implemeted.
        //* Remove the unnecessary links from the list. like more, click, vivaralu(telugu)....


        public List<HtmlLink> Cleanse(List<HtmlLink> htmlLinks)
        {
            List<HtmlLink> resultedHtmlLinks = new List<HtmlLink>();
            foreach (HtmlLink link in htmlLinks)
            {
                if (IsValidHtmlLink(link) == true)
                {
                    resultedHtmlLinks.Add(link);
                }
                else
                    continue;
            }

            return resultedHtmlLinks;
        }


        public bool IsValidHtmlLink(HtmlLink htmlLink)
        {
            //todo: Scenarios to clean the url.
            // #comments.... 

            string[] invalidUrlPatterns = GetInvalidUrlPatternsToBeRemoved();

            foreach (var invalidUrlPattern in invalidUrlPatterns)
            {
                if (Regex.IsMatch(htmlLink.Link, invalidUrlPattern) == true)
                    return false;
                else
                    continue;
            }

            return true;
        }

        private string[] GetInvalidUrlPatternsToBeRemoved()
        {
            //todo: These patterns can be read from the file; 
            return new string[] { ".+[#]+.+" };
        }
    }
}
