﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace NewsMine.DomainObjects
{
    [Serializable]
    public class WebSiteInfo : BasicInfo
    {
        public WebSiteInfo()
        {
            Tags = new List<string>();
        }
        /// <summary>
        /// Stores the primary domain of the website.
        /// </summary>
        public string Domain { get; set; }
        /// <summary>
        /// This is related to the parent domain website info, some times the subtime can take the advantage of getting missing info from its prime domain.
        /// </summary>
        public string ParentDomain { get; set; }

        public string HomePageUrl { get; set; }
        /// <summary>
        /// Description of the web site.
        /// </summary>
        public string Description { get; set; }

        public string Language { get; set; }

        //*********** LOGO INFORMATION HERE (BIG AND SMALL ICON FOR INDICATING THIS WEBSITE ******************
        
        public string FaviconIconUri { get; set; }

        public string Logo1Uri { get; set; }
        public string Logo2Uri { get; set; }
        public string Logo3Uri { get; set; }
        public string Logo4Uri { get; set; }

        /// <summary>
        /// List of the menu items, which are availaible on this web site. like Home, business, Cricket, Sports, Technologies
        /// </summary>
        public List<WebSiteMenu> MenuItemconfigs { get; set; }

        public List<FeedSourceConfig> FeedSources { get; set; }

        /// <summary>
        /// All possible Categories in this website, this information will be used to get the list of websites for a specific category.
        /// </summary>
        public List<string> Tags { get; set; }



        #region Unused properties for later improvement

        public List<AuthorInfo> Authors { get; set; }

        public List<HtmlScrapSourceConfig> HtmlScrapSourceConfigs { get; set; }

        /// <summary>
        /// To determine, whether we need to scrap html from this web site or not.
        /// </summary>
        public bool IsHtmlScrapingEnabled { get; set; }

        /// <summary>
        /// To determine, how frequenctly this site will be getting updated.... this will be used to run the crawlers.
        /// This values can be maintained in the minutes.
        /// 
        /// </summary>
        public int FrequencyOfUpdates { get; set; }


        /// <summary>
        /// To determine, when this site is been visited by our crawlers.
        /// </summary>
        public DateTime LastCrawledTimeStampGMT { get; set; }


        /// <summary>
        /// To determine the adult content in the web site.
        /// 
        /// </summary>
        public bool IsAdultContentSite { get; set; }

        public int Rank { get; set; }

        public long AlexaRank { get; set; }
        #endregion
    }
}
