using System;
using System.Collections.Generic;
using System.Xml.Linq;
using System.Linq;
using _404park.Core.Crawler;

namespace _404park.ApplicationServices.Crawler.Parser.Provider
{
	public class HuffingtonPostParser : BaseFeedParser
	{
		public override string BaseUrl { get { return "http://feeds.huffingtonpost.com/FeaturedPosts"; } }

		internal protected override IList<Feed> ParseFeed(string responseData)
		{
			var feedList = new List<Feed>();
			var xmlDoc = XDocument.Parse(responseData);
			var xmlns = xmlDoc.Root.GetDefaultNamespace();
			if (xmlDoc.Root != null)
			{
				xmlDoc.Root.Descendants(xmlns + "entry").ToList().ForEach(element => feedList.Add(CreateFeedItem(element)));
			}
			return feedList;
		}

		private Feed CreateFeedItem(XElement xElement)
		{
			var xmlns = xElement.GetDefaultNamespace();

			var title = xElement.Element(xmlns + "title") != null ? xElement.Element(xmlns + "title").Value : string.Empty;
			var description = xElement.Element(xmlns + "summary") != null ? xElement.Element(xmlns + "summary").Value : string.Empty;
			var clickUrl = xElement.Element(xmlns + "link") != null ? xElement.Element(xmlns + "link").Attribute("href").Value : string.Empty;

			var feed = new Feed
			{
				Title = title,
				Description = description,
				ClickUrl = clickUrl,
				DisplayUrl = string.Empty
			};
			return feed;
		}
	}
}

