﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Cache;
using System.Xml.Linq;
using Eneta.Portal.Common;
using Eneta.Portal.Common.Helpers;
using Microsoft.SharePoint.Administration;
using Microsoft.SharePoint;
using System.Xml;
using System.ServiceModel.Syndication;
using System.Text.RegularExpressions;
using System.Web;
using System.Linq;
using Microsoft.SharePoint.Publishing;

namespace Eneta.Portal.FeedAggregator
{
    [CLSCompliant(false)]
    public class FeedAggregatorTimerJob : SPJobDefinition
    {
        private const int FeedItemContentMaxLength = 400;

        private FeedAggregatorJobSettings Settings
        {
            get
            {
                return WebApplication.GetChild<FeedAggregatorJobSettings>(Globals.FeedAggregatorJobSettingsId);
            }
        }

        public FeedAggregatorTimerJob()
        {
        }

        public FeedAggregatorTimerJob(string jobName, SPWebApplication webApplication)
            : base(jobName, webApplication, null, SPJobLockType.ContentDatabase)
        {
            Title = jobName;
        }

        public override void Execute(Guid targetInstanceId)
        {
            SPWeb currentWeb;

            try
            {
                currentWeb = GetCurrentWeb();
            }
            catch (Exception ex)
            {
                Logger.LogInfo("Unable to find current web","FeedAggregatorTimerJob");
                Logger.LogException(ex);
                return;
            }
            Logger.LogInfo("Current web: " + currentWeb.Url, "FeedAggregatorTimerJob");
            

            //TODO: Check if lists exists and throw proper exception if not

            var aggregators = new List<Aggregator>();
            SPList aggregatorsList;
            try
            {
                aggregatorsList = currentWeb.Site.RootWeb.Lists["Agreggator"];
            }
            catch (Exception ex)
            {
                Logger.LogInfo("EX(Execute:Find aggregator list) ","FeedAggregatorTimerJob");
                Logger.LogException(ex);
                throw new Exception("AggregatorList not found!", ex);
            }

            Logger.LogInfo("Getting aggregators list", "FeedAggregatorTimerJob");
            foreach (SPListItem item in aggregatorsList.Items)
            {
                try
                {
                    aggregators.Add(GetAggregator(item));
                }
                catch(Exception ex)
                {
                    Logger.LogWarning("Execute(): Error adding aggregator", "FeedAggregatorTimerJob");
                    Logger.LogException(ex);
                }
            }
            Logger.LogInfo("Aggregators received", "FeedAggregatorTimerJob");

            Logger.LogInfo("Updating aggregators cache","FeedAggregatorTimerJob");
            Logger.LogInfo("Aggregators found: " + aggregators.Count(),"FeedAggregatorTimerJob");
            // Update aggregators cache
            foreach(var aggr in aggregators)
                try
                {
                    Logger.LogInfo("Updating: " + aggr.Name, "FeedAggregatorTimerJob");
                    UpdateAggregatorFeedItems(aggr);
                    Logger.LogInfo("Updating succeeded", "FeedAggregatorTimerJob");
                }
                catch (Exception ex)
                {
                    Logger.LogInfo("Updating failed", "FeedAggregatorTimerJob");
                    Logger.LogException(ex);
                }
            Logger.LogInfo("Aggregators cache updated", "FeedAggregatorTimerJob");


            // Reload items cache list
            Logger.LogInfo("Reloading aggregators items cache", "FeedAggregatorTimerJob");
            aggregators.ForEach(a => UpdateListCache(currentWeb, a));
            Logger.LogInfo("Aggregators items cache reloaded", "FeedAggregatorTimerJob");

            currentWeb.Dispose();
        }

        private void UpdateListCache(SPWeb currentWeb, Aggregator aggregator)
        {
            SPList cacheList;

            try
            {
                cacheList = currentWeb.Lists[Settings.FeedItemsCacheListGuid];
            }
            catch (Exception)
            {
                Logger.LogInfo("Cannot find cache list: " + Settings.FeedItemsCacheListGuid, "FeedAggregatorTimerJob");
                throw;
            }

            var publishingWeb = PublishingWeb.GetPublishingWeb(currentWeb);
            var ct = currentWeb.AvailableContentTypes["BlogEntry"];

            var layouts = publishingWeb.GetAvailablePageLayouts(ct.Id);
            var blogEntryLayout = layouts[0];
            var pages = publishingWeb.GetPublishingPages();
            var pageList = currentWeb.Lists["Lehed"];

            foreach (var feedItems in aggregator.AggregatedFeedItems.GroupBy(f => f.BlogId))
            {
                Logger.LogWarning("FeedItems: " + feedItems.Key + ", " + feedItems.Count(),"");

                // Add new items
                foreach (var item in feedItems)
                {
                    var queryString = "<Where><Eq><FieldRef Name='URL'/><Value Type='URL'>{0}</Value></Eq></Where>";
                    queryString = string.Format(queryString, item.Url.OriginalString);

                    Logger.LogInfo("UpdateListCache(): executing query: " + queryString, GetType().ToString());
                    Logger.LogInfo("Original url: " + item.Url.OriginalString,"");
                    Logger.LogInfo("Absolute url: " + item.Url.AbsoluteUri, "");

                    var query = new SPQuery {Query = queryString};

                    var items = cacheList.GetItems(query);
                    if (items == null)
                    {
                        Logger.LogInfo("UpdateListCache(): items is null", GetType().ToString());
                        continue;
                    }
                    Logger.LogInfo("UpdateListCache(): items found: " + items.Count, GetType().ToString());
                    if (items.Count > 0)
                    {
                        continue;
                    }

                    //var listItem = cacheList.Items.Add();
                    //listItem[listItem.Fields["Agregaator"].InternalName] = aggregator.ID;
                    //listItem[listItem.Fields["Pealkiri"].InternalName] = item.Title;
                    //listItem[listItem.Fields["Summary"].InternalName] = item.Description;
                    //listItem[listItem.Fields["Feed"].InternalName] = item.FeedId;
                    //listItem[listItem.Fields["Kategooriad"].InternalName] = item.Categories;
                    //listItem[listItem.Fields["URL"].InternalName] = new SPFieldUrlValue { Description = item.Title, Url = item.Url.OriginalString };
                    //listItem[listItem.Fields["PublishDate"].InternalName] = item.PublishDate;
                    //listItem.Update();

                    var pageName = GetPageFileName(item.Title); //GetUniqueNameForTitle


                    var pageUrl = pageList.RootFolder.ServerRelativeUrl + "/" + pageName;
                    var counter = 1;

                checkpage:
                    var page = currentWeb.GetFile(pageUrl);
                    if (page != null)
                        if (page.Exists)
                        {
                            if (counter == 1)
                                pageUrl = pageUrl.Replace(".aspx", "-1.aspx");
                            else
                                pageUrl = pageUrl.Replace("-" + (counter - 1) + ".aspx", "-" + counter + ".aspx");
                            counter++;
                            goto checkpage;
                        }
                    
                    var publishingPage = pages.Add(pageUrl, blogEntryLayout);

                    var pubItem = publishingPage.ListItem;

                    pubItem["ContentTypeId"] = currentWeb.Site.RootWeb.ContentTypes["BlogEntry"].Id;
                    pubItem["URL"] = new SPFieldUrlValue { Url = item.Url.OriginalString };
                    pubItem["PublishDate"] = item.PublishDate;
                    pubItem["PublishingPageContent"] = item.Description;
                    pubItem["Title"] = item.Title;
                    var blogValue = new SPFieldLookupValue {
                                    LookupId = item.BlogId
                                };
                    pubItem["Blog"] = blogValue;
                    pubItem.Update();

                    pubItem.File.CheckIn(string.Empty, SPCheckinType.MajorCheckIn);
                    pubItem.File.Update();
                    pubItem.File.Publish(string.Empty);
                }
            }
        }

        /// <summary>
        /// Returns current web for service
        /// </summary>
        /// <returns></returns>
        private SPWeb GetCurrentWeb()
        {
            var webApplication = Parent as SPWebApplication;
            var site = webApplication.Sites[0];
            return site.AllWebs[Settings.AggregatorListWeb];
        }

        /// <summary>
        /// Builds Aggregator object (including feeds definitions) from SPLists values
        /// </summary>
        /// <param name="listItem"></param>
        /// <returns></returns>
        private Aggregator GetAggregator(SPListItem listItem)
        {
            var aggregator = new Aggregator(listItem.Title) { UniqueId = listItem.UniqueId, ID = listItem.ID };
            Logger.LogInfo("(GetAggregator) aggregatorName: " + listItem.Title, "FeedAggregatorTimerJob");

            SPList feedsList;
            try
            {
                feedsList = listItem.Web.Site.AllWebs["uudised/blogid"].Lists["Lehed"];
            }
            catch (Exception ex)
            {
                Logger.LogInfo("(GetAggregator) Cannot find feedsList", "FeedAggregatorTimerJob");
                Logger.LogInfo("EX(GetAggregator:feedsList) " + ex, "FeedAggregatorTimerJob");
                throw;
            }
            var query = new SPQuery();
            //query.ViewFields = @"<FieldRef Name='Link' /><FieldRef Name='KanneteArv' /><FieldRef Name='Title' />";
            query.Query = @"
<Where>
<Eq>
        <FieldRef Name='ContentType' />
        <Value Type='Text'>Blog</Value>
    </Eq>
</And>
</Where>";

            Logger.LogInfo("GetAggregator(): Executing query: " + query.Query, "FeedAggregatorTimerJob");

            SPListItemCollection items;

            try
            {
                items = feedsList.GetItems(query);
            }
            catch(Exception ex)
            {
                Logger.LogInfo("GetAggregator(): Error executing query", "FeedAggregatorTimerJob");
                Logger.LogException(ex);
                throw;
            }

            if(items == null)
            {
                Logger.LogInfo("GetAggregator(): items==null", "FeedAggregatorTimerJob");
                return null;
            }

            if (items.Count == 0)
            {
                Logger.LogInfo("GetAggregator(): items.Count = 0", "FeedAggregatorTimerJob");
                return null;
            }

            foreach (SPListItem item in items)
            {
                try
                {
                    if(item["RssUrl"]== null)
                    {
                        Logger.LogWarning("GetAggregator: RssUrl is null " + item.Url, GetType().ToString());
                        continue;
                    }
                    if(item.ModerationInformation == null)
                    {
                        Logger.LogWarning("GetAggregator(): Cannot find moderation information", GetType().ToString());
                        break;
                    }
                    if (item.ModerationInformation.Status != SPModerationStatusType.Approved)
                        continue;
                }
                    catch(Exception ex)
                    {
                        Logger.LogWarning("GetAggregator(): Error when getting feed source for item: " + item.ID,
                                          GetType().ToString());
                        Logger.LogException(ex);
                        continue;
                        
                    }

                Logger.LogInfo("GetAggregator(): adding feed: " + item.Title + " " + item["RssUrl"], "FeedAggregatorTimerJob");
                try
                {
                    var urlValue = new SPFieldUrlValue(item["RssUrl"].ToString());
                    aggregator.AddFeed(new Feed{
                                        RssUrl = new Uri(urlValue.Url),
                                        BlogId = item.ID                                    
                                        });
                }
                catch(Exception ex)
                {
                    Logger.LogWarning("GetAggregator(): Error adding feed", "FeedAggregatorTimerJob");
                    Logger.LogException(ex);
                }
            }
            return aggregator;
        }

        private void UpdateAggregatorFeedItems(Aggregator aggregator)
        {
            Logger.LogInfo("Update feed items: " + aggregator.Name, "FeedAggregatorTimerJob");
            var feedItemsTemp = new List<FeedItem>();

            // clear current items
            Logger.LogInfo("Clearing aggregated feed items", "FeedAggregatorTimerJob");
            aggregator.AggregatedFeedItems.Clear();
            Logger.LogInfo("Aggregated feed items removed", "FeedAggregatorTimerJob");

            // reload new items
            Logger.LogInfo("Reloading new items", "FeedAggregatorTimerJob");

            foreach (var feed in aggregator.Feeds)
            {
                Logger.LogInfo("Feed: " + feed.RssUrl, "FeedAggregatorTimerJob");
                try
                {
                    var items = BuildItemsForFeed(feed);
                    if (items == null)
                        Logger.LogWarning("Items are null!", "");
                    else
                    {
                        Logger.LogInfo("Item count in feed: " + items.Count, "");
                        feedItemsTemp.AddRange(items);
                    }

                }
                catch (Exception ex)
                {
                    Logger.LogException(ex);
                }
            }
            Logger.LogInfo("New items reloaded", "FeedAggregatorTimerJob");

            Logger.LogInfo("Adding feed items", "FeedAggregatorTimerJob");
            foreach (var feedItem in feedItemsTemp)
            {
                try
                {
                    Logger.LogInfo("Adding feed item: " + feedItem.Title, "FeedAggregatorTimerJob");
                    //if (!aggregator.AggregatedFeedItems.Exists(item => item.Equals(feedItem)))
                    //{ 
                        aggregator.AggregatedFeedItems.Add(feedItem); 
                    //}
                    Logger.LogInfo("Item added", "FeedAggregatorTimerJob");
                }
                catch (Exception ex)
                {
                    Logger.LogInfo("Item not added", "FeedAggregatorTimerJob");
                    Logger.LogException(ex);
                }
            }
            Logger.LogInfo("Feed items added", "FeedAggregatorTimerJob");

            Logger.LogInfo("Sorting items", "FeedAggregatorTimerJob");
            try
            {
                aggregator.AggregatedFeedItems.Sort((item1, item2) => item1.PublishDate.CompareTo(item2.PublishDate));
                aggregator.AggregatedFeedItems.Reverse();
                Logger.LogInfo("Items sorted", "FeedAggregatorTimerJob");
            }
            catch (Exception ex)
            {
                Logger.LogInfo("Error sorting items", "FeedAggregatorTimerJob");
                Logger.LogException(ex);
            }
            Logger.LogInfo("Feed items updated", "FeedAggregatorTimerJob");
        }

        /// <summary>
        /// Reads feed's url and creates from RSS/ATOM feed FeedItem objects
        /// </summary>
        /// <param name="feed"></param>
        /// <returns></returns>
        private IList<FeedItem> BuildItemsForFeed(Feed feed)
        {
            return LoadFeed(feed.RssUrl.AbsoluteUri, feed.BlogId);
        }

        private static IList<FeedItem> LoadFeed(string feed, int blogId)
        {
            //Console.WriteLine(feed.Key + " start");
            XElement rssFeed = null;
            var feedType = "RSS";
            //Console.WriteLine(feed.Key + " START: " + DateTime.Now);

            try
            {
                HttpWebRequest.DefaultCachePolicy = new HttpRequestCachePolicy(HttpRequestCacheLevel.NoCacheNoStore);
                WebRequest.DefaultCachePolicy = new RequestCachePolicy(RequestCacheLevel.NoCacheNoStore);

                var request = (HttpWebRequest)WebRequest.Create(feed);
                //request.Proxy = new WebProxy("http://cache.neti.ee:8080/", false);
                request.Timeout = 5000;
                request.CachePolicy = new HttpRequestCachePolicy(HttpRequestCacheLevel.NoCacheNoStore);
                using (var stream = request.GetResponse().GetResponseStream())
                using (var reader = new StreamReader(stream))
                {
                    var response = request.GetResponse();
                    if (response.ContentType.ToLower().StartsWith("application/atom+xml"))
                        feedType = "ATOM";

                    rssFeed = XElement.Load(reader);
                }
            }
            catch (Exception ex)
            {
                Logger.LogInfo("LoadFeed() exception: " + feed,"");
                Logger.LogException(ex);
                Console.WriteLine("ERR: " + feed);
                Console.WriteLine(ex);
                return null;
            }

            Logger.LogInfo("Feed readed: " + feed,"");
            Logger.LogInfo("Feed type: " + feedType, "");
            Logger.LogInfo("Root node: " + rssFeed.Document,"");

            //Console.WriteLine(feed.Key + " FINISH: " + DateTime.Now);
            //foreach (XElement xe in rssFeed.Elements("channel").Elements("item").First().Elements())
            //    Console.WriteLine(xe.Name);

            IList<FeedItem> k = new List<FeedItem>();
            if (feedType == "RSS")
            {
                var source = rssFeed.Elements("channel").Elements("item");
                if (source.Count() == 0)
                    source = rssFeed.Elements("item");

                foreach(var item in source)
                {
                    //foreach (var tag in item.Elements())
                    //    Logger.LogInfo("TAG: " + tag.Name, "");
                    var feedItem = new FeedItem();
                    feedItem.Title = item.Element("title").Value;

                    var obj = item.Element("{http://purl.org/rss/1.0/modules/content/}encoded");
                    if (obj != null)
                        feedItem.Description = GetContentAsPlainText(obj.ToString());
                    else
                    {
                        obj = item.Element("description");
                        if (obj != null)
                            feedItem.Description = GetContentAsPlainText(obj.ToString());
                    }
                    feedItem.Url = new Uri(item.Element("link").Value);
                    feedItem.PublishDate = Rfc822DateTime.Parse(item.Element("pubDate").Value);
                    feedItem.BlogId = blogId;
                    k.Add(feedItem);
                }
                //Console.WriteLine(feed.Key + " R: " + items.Count() + " " + feed.Value);        
            }
            else if (feedType == "ATOM")
            {
                var source = rssFeed.Elements("{http://www.w3.org/2005/Atom}entry");
                foreach (var item in source)
                {
                    var feedItem = new FeedItem();
                    feedItem.Title = item.Element("{http://www.w3.org/2005/Atom}title").Value;
                    var contentElem = item.Element("{http://www.w3.org/2005/Atom}content");
                    if (contentElem != null)
                        feedItem.Description = GetContentAsPlainText(contentElem.Value);
                    else
                        feedItem.Description =
                            GetContentAsPlainText(item.Element("{http://www.w3.org/2005/Atom}summary").Value);
                    feedItem.PublishDate = DateTime.Parse(item.Element("{http://www.w3.org/2005/Atom}published").Value);

                    var links = item.Elements("{http://www.w3.org/2005/Atom}link");
                    foreach (var link in links)
                    {
                        if (link.Attribute("rel").Value == "alternate" &&
                            link.Attribute("type").Value == "text/html")
                        {
                            feedItem.Url = new Uri(link.Attribute("href").Value);
                            break;
                        }
                    }
                    feedItem.BlogId = blogId;
                    k.Add(feedItem);
                }
                //Console.WriteLine(feed.Key + " A: " + items.Count() + " " + feed.Value);        
            }
            else
            {
                Logger.LogWarning("Unknown feed type!","");
            }
            Logger.LogInfo("K=" + k.Count, "");
            return k;
        }

        private static string GetContentAsPlainText(string txtContent)
        {
            var description = RegExHelper.RemoveHtmlTags(HttpUtility.HtmlDecode(txtContent));
            if (description.Length > FeedItemContentMaxLength)
                description = description.Substring(0, FeedItemContentMaxLength);
            if (description.LastIndexOf(' ') > 0)
                description = description.Substring(0, description.LastIndexOf(' ')) + " ...";
            return description;
        }

        private static string GetPageFileName(string name)
        {
            var replacements = new Dictionary<string, string>
                                   {
                                       {"õ", "o"},
                                       {"ä", "a"},
                                       {"ö", "o"},
                                       {"ü", "u"},
                                       {"Õ", "o"},
                                       {"Ä", "a"},
                                       {"Ö", "o"},
                                       {"Ü", "u"},
                                       {" ", "-"},
                                       {"#", ""},
                                       {"%", ""},
                                       {"&", ""},
                                       {"*", ""},
                                       {":", ""},
                                       {"<", ""},
                                       {">", ""},
                                       {"?", ""},
                                       {"/", ""},
                                       {"{", ""},
                                       {"|", ""},
                                       {"}", ""},
                                       {"'", ""},
                                       {"’", ""},
                                       {"´", ""},
                                       {"`", ""},
                                       {"!", ""},
                                       {"=", ""},
                                       {"\"", ""},
                                       {".", " "},
                                       {",", " "},
                                       {"_", " "}
                                   };

            var pageName = name;
            foreach (var s in replacements.Keys)
                pageName = pageName.Replace(s, replacements[s]);

            // Replace - with empty space
            pageName = pageName.Replace("-", " ");

            // Replace unwanted characters with space
            //pageName = Regex.Replace(pageName, @"[^a-z0-9\s+]", " ");
            // Replace multple white spaces with single space
            pageName = Regex.Replace(pageName, @"\s+", " ").Trim();
            pageName = Regex.Replace(pageName, @"\W+", " ").Trim();
            // Replace white space with -
            pageName = pageName.Replace(" ", "-");

            //while (pageName.IndexOf("--") >= 0)
            //    pageName = pageName.Replace("--", "-");
            pageName += ".aspx";
            pageName = pageName.ToLower();
            return pageName;
        }
    }
}