﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NewsMine.DomainObjects;

namespace NewsDiscoveryEngine.Classification
{
    /// <summary>
    /// this class is meant for controlling the classification process.
    /// 
    /// </summary>
    public static class ClassificationManager
    {
        static List<IClassifier> classifers = new List<IClassifier>();

        static ClassificationManager()
        {

            classifers.Add(new KeywordBasedClassifier());

            //classifers.Add(new UrlBasedTagsDetectionClassifier());

            //classifers.Add(new LanguageBasedClassifier());
            //classifers.Add(new RulesBasedClassifier());
            //classifers.Add(new GeoClassifier());
            //classifers.Add(new WikiAwareTagsClassifier());
        }


        public static FeedItem ClassifyFeedItem(FeedItem feedItem)
        {
            /*
            * 
            * if the refUrl and domain home url is in same length, then mark that as headlines tag
            * 
            * if the refUrl is having only domain, no next segment in the url just mark that has headlines tag
            * 
            * if the refurl's domain or subdomain consists of the language just pickup the language tag.
            * 
            * Pickup the menuItem and assign the menuItem text to the tags.
            * 
            * And also get the WebSiteInfo object from the store and see is there any special tags or cofigs for cleansing and other stuff.
            * 
            * I this for now this is enough.
            * 
            * 
            * */

            if (classifers == null || classifers.Count == 0)
            {
                return feedItem;
            }


            foreach (var classifier in classifers)
            {
                feedItem = classifier.Classify(feedItem);
            }

            return feedItem;
        }
    }
}
