﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using System.Configuration;
using System.Net;
using HodhodNews.Extraction;
using Googler.DAL;
using System.Data;
using Googler.Utilities.Web;
using Googler.BLL;
using System.Xml.XPath;
using System.IO;

namespace HodhodNews.Aggregation
{
    public class GoogleAtomNewsAggregationProvider : NewsAggregationProvider
    {
        #region commented
        //public GoogleNewsAggregationProvider()
        //{
        //    this.Keywords = keywords;
        //    Googler = new GoogleReader();
        //    LatestNews = new List<AtomEntry>();
        //    CurrentNews = new List<AtomEntry>();
        //}
        //public void ExtractFullArticles(long newsId)
        //{
        //    DataContext context = new DataContext();
        //    var news = context.News.Where(n => n.OId == newsId).FirstOrDefault();//.First(n => n.ContentText == null);
        //    if (news == null)
        //    {
        //        return;
        //    }
        //    if (!string.IsNullOrWhiteSpace(news.ContentText))
        //    {
        //        return;
        //    }

        //    WebClient wc = new WebClient();

        //    var html = wc.DownloadString(news.Link);
        //    var ns = NewsExtractionProvider.Insance.Extract(new ExtracationOptions
        //    {
        //        NewsSource = html,
        //        NewsSummary = news.SummaryHtml.TrySubstractWords(0, 1),
        //        NewsTitle = news.Title
        //    });
        //    news.ContentText = (ns ?? new ExtractedNews()).Text;
        //    context.SaveChanges();
        //}
        //public void ExtractFullArticles()
        //{
        //    DataContext context = new DataContext();
        //    var news = context.News.First(n => n.ContentText == null);

        //    WebClient wc = new WebClient();

        //    var html = wc.DownloadString(news.Link);
        //    var ns = NewsExtractionProvider.Insance.Extract(new ExtracationOptions
        //    {
        //        NewsSource = html,
        //        NewsSummary = news.SummaryHtml.TrySubstractWords(0, 7),
        //        NewsTitle = news.Title
        //    });
        //    news.ContentText = (ns ?? new ExtractedNews()).Text;
        //    context.SaveChanges();
        //}
        //        /// <summary>
        ///// Save News List to database
        ///// </summary>
        ///// <param name="atoms">News List</param>
        //public void SaveNews(List<AtomEntry> atoms)
        //{
        //    foreach (AtomEntry atom in atoms)
        //    {
        //        AtomEntryDAL.Create(atom);
        //    }
        //}
        ////public static List<AtomEntry> GetAllNews()
        //{
        //    return AtomEntryDAL.GetAllNews();
        //}
        //public static AtomEntry GetById(int id)
        //{
        //    return AtomEntryDAL.GetByOId(id);
        //}
        //public static List<AtomEntry> GetTodayNews()
        //{
        //    return AtomEntryDAL.GetTodayNews();
        //}
        //public static List<AtomEntry> GetDayNews(DateTime date)
        //{
        //    return AtomEntryDAL.GetDayNews(date);
        //}

        //public static List<AtomEntry> GetNews(DateTime fromDate, DateTime toDate)
        //{
        //    return AtomEntryDAL.GetNews(fromDate, toDate);
        //}
        //public List<AtomEntry> LatestNews { get; set; }
        //public List<AtomEntry> CurrentNews { get; set; }
        //public string[] Keywords
        //{
        //    get;
        //    set;
        //}         
        //public GoogleReader Googler { get; set; }
        #endregion

        XNamespace ns;
        public List<News> GetLastHour(List<Keyword> keywords)
        {
            Dictionary<string, News> entries = new Dictionary<string, News>();
            List<XElement> listOfNewsLists = new List<XElement>();

            foreach (var key in keywords)
            {
                List<string> feeds = this.ReadPastHour(key.Name);
                foreach (string f in feeds)
                {
                    XElement news = XElement.Parse(f);
                    listOfNewsLists.Add(news);
                }
                ns = listOfNewsLists[0].Name.Namespace;
                foreach (XElement atom in listOfNewsLists)
                {
                    foreach (XElement elem1 in atom.Descendants(ns + "entry"))
                    {
                        if (!entries.ContainsKey(elem1.Element(ns + "id").Value))
                        {
                            var n = ConvertToNews(elem1);
                            if (n.Keywords.Find(k => k.ID == key.ID) == null)
                            {
                                n.Keywords.Add(key);
                            }
                            entries.Add(elem1.Element(ns + "id").Value, n);
                        }
                    }
                }

            }
           // ns = listOfNewsLists[0].Name.Namespace;
            //todo: sort returned atoms
            // atoms.Sort(new Comp            arison<AtomEntry>((a, b) => a.Updated.CompareTo(b.Updated)));
            //List<News> atoms = ConvertToAtomEntries(newsList);
            
            var atoms = entries.Values.ToList<News>();
            atoms.ForEach(a =>
            {
                a.Publisher = GetPublisherName(a);
                a.Summary = GetSummaryText(a);
            });



            return atoms;
        }
        private List<News> ConvertToAtomEntries(List<XElement> atoms)
        {
            Dictionary<string, News> entries = new Dictionary<string, News>();
            foreach (XElement atom in atoms)
            {
                foreach (XElement elem1 in atom.Descendants(ns + "entry"))
                {
                    if (!entries.ContainsKey(elem1.Element(ns + "id").Value))
                        entries.Add(elem1.Element(ns + "id").Value, ConvertToNews(elem1));
                }
            }
            return entries.Values.ToList<News>();
        }
        News ConvertToNews(XElement atom)
        {
            News n = new News();
            XNamespace ns = atom.Name.Namespace;
            n.EntryId = atom.Element(ns + "id").Value;
            n.Title = atom.Element(ns + "title").Value;
            n.PublishDate = Convert.ToDateTime(atom.Element(ns + "updated").Value);
            n.ModificationDate = Convert.ToDateTime(atom.Element(ns + "updated").Value);
            n.Link = GetUrl(atom.Element(ns + "link").Attribute("href").Value);
            n.SummaryHtml = atom.Element(ns + "content").Value;
            n.Publisher = GetSource(n.Link);
            n.Aggregator = "Google News";
            return n;
        }
        string GetSummaryText(News entry)
        {
            int summaryDiv = entry.SummaryHtml.IndexOf("class=\"lh\"");
            int firstFont = entry.SummaryHtml.IndexOf("<font", 1 + summaryDiv);
            int secondFont = entry.SummaryHtml.IndexOf("<font", 1 + firstFont);
            int startFrom = entry.SummaryHtml.IndexOf("<font", 1 + secondFont);
            startFrom = entry.SummaryHtml.IndexOf(">", 1 + startFrom) + 1;
            int endOfThirdFont = entry.SummaryHtml.IndexOf("</font>", startFrom);
            int textLength = endOfThirdFont - startFrom;

            return entry.SummaryHtml.Substring(startFrom, textLength);
        }
        private string GetSource(string Link)
        {
            Uri uri = new Uri(Link);
            return uri.Host;
        }

        private string GetUrl(string url)
        {
            try
            {
                int i = url.IndexOf("url=");
                url = System.Web.HttpUtility.UrlDecode(url.Substring(i + "url=".Length));
                return url;
            }
            catch
            {
                return url;
            }
        }
        string GetPublisherName(News entry)
        {


            int imageIndex = entry.SummaryHtml.IndexOf("<img"); ;
            if (imageIndex == -1)
            {
                return entry.Publisher;
            }
            int fontIndex = entry.SummaryHtml.IndexOf("<font", imageIndex);
            int titleIndex = entry.SummaryHtml.IndexOf(">", fontIndex);
            int titleLength = entry.SummaryHtml.IndexOf("<", titleIndex) - titleIndex;
            string title = entry.SummaryHtml.Substring(titleIndex + 1, titleLength);

            if (title.Length == 0)
                return entry.Publisher;

            return title.Replace(">", "").Replace("<", "");
        }




        public override List<News> Aggregate(AggregationOptions options)
        {
            return this.GetLastHour(options.Keywords);
        }



        public HttpRequestorResult Read(string searchQuery)
        {
            return postRequest(GetDefaultNameValue(searchQuery));
        }

        private HttpRequestorResult postRequest(string[] nameValue)
        {
            HttpRequestor requestor = new HttpRequestor();
            string url = "http://news.google.com/news/search";
            HttpRequestorResult result = requestor.Get(url, nameValue);
            return result;
        }
        private string PostRequest(string directLink)
        {
            HttpRequestor requestor = new HttpRequestor();
            //string url = "http://news.google.com/news/search";
            string result = requestor.Get(directLink);
            return result;
        }

        private string[] GetDefaultNameValue(string searchQuery)
        {
            return new string[]
            {
               "q",searchQuery,
               "output","rss"
            };
        }

        public List<string> ReadPastHour(string searchQuery)
        {
            HttpRequestorResult result = postRequest(GetGoogleNameValue(searchQuery, "h"));
            List<string> feeds = AppendChildFeeds(result);
            return feeds;
        }

        private List<string> AppendChildFeeds(HttpRequestorResult result)
        {
            List<string> feeds = new List<string>();
            feeds.Add(result.Response);

            foreach (string link in result.MoreLinks)
            {
                //todo: update to log errors
                string childFeed = PostRequest(link);
                if (!string.IsNullOrEmpty(childFeed))
                    feeds.Add(childFeed);
            }
            return feeds;
        }

        public List<string> ReadPastDay(string searchQuery)
        {
            HttpRequestorResult result = postRequest(GetGoogleNameValue(searchQuery, ""));
            List<string> feeds = AppendChildFeeds(result);
            return feeds;
        }

        public List<string> ReadPastMonth(string searchQuery)
        {
            HttpRequestorResult result = postRequest(GetGoogleNameValue(searchQuery, "m"));
            List<string> feeds = AppendChildFeeds(result);
            return feeds;
        }
        /******************************************************************
         * كل هذه الاختيارات لن تنفع
         * مع ال ار اس اس
         * افضل شيء هو احضار البيانات ار ار اس ثم اتباع الوصلة "جميع المقالات " ثم احضار الناتج الخاص بها ار اس اس ايضا
         * ويمكن البحث عنها بانها الوصلة التي تحتوي على 
         * http://news.google.com/news/story?pz=1&amp;ncl
         * في ال href الخاص بالوصلة
         * ****************************************************************/
        //http://news.google.com/news/search?pz=1&cf=all&ned=ar_me&hl=ar&q=test+news
        private string[] GetGoogleNameValue(string searchQuery, string time)
        {
            return new string[]
           {"q",searchQuery,//as_q search query for all words, as_epq for exact phrase
               //as_q can be q
               "pz","1",//??
               "ned","ar_me",//midel East, for egypt=ar_eg
               "hl","ar",//i think it means lang, i'm not sure yet
               "as_qdr",time,
            "output","atom"};
        }
        private string[] GetTimelyNameValue(string searchQuery, string time)
        {
            return new string[]
           {"as_q",searchQuery,//as_q search query for all words, as_epq for exact phrase
               //as_q can be q
               "pz","1",//??
               "cf","all",//??
               "ned","ar_me",//midel East, for egypt=ar_eg
               "hl","ar",//i think it means lang, i'm not sure yet
               "as_scoring","n",/* n: recent first
                                 * r: related news
                                 * o: older first
                                 * d: based on date with viewing repeated news
                                 * */
               //"btnG","%D8%A8%D8%AD%D8%AB",//Name of pressed button, this data mean:Ba7th=.بحث
               "as_drrb","q",//??
               "as_qdr",time,//h: hour, w:week, m:month,day is default
               //"as_mind","3",
               //"as_minm","2",
               //"as_maxd","5",
               //"as_maxm","3",
               //"as_occt","any",
            "output","atom"};
        }
    }
}
/**************************************************************8
 * exact phrase "gun fire" all words = Usa Pentagon
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_eg&hl=ar&q=Usa+pentagon+%22gun+fire%22&output=rss
 * past hour phrase = usa only------------------------------------------------------------------------------------------------------------------------------| hour=h
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_eg&hl=ar&as_q=usa+only&as_epq=&as_oq=&as_eq=&as_scoring=n&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=h&as_mind=3&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * اليوم الماضي فقط-----------------------------------------------------------------------------------------------------------------------------------------|Default
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_eg&hl=ar&as_q=Usa+pentagn&as_epq=gun+fire&as_oq=&as_eq=&as_scoring=r&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=d&as_mind=3&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * past week---------------------------------------------------------------------------------------------------------------------------------------------------|
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_eg&hl=ar&as_q=brotherhood&as_epq=&as_oq=&as_eq=&as_scoring=n&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=w&as_mind=3&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * past month-------------------------------------------------------------------------------------------------------|-------------------------------------------|--------|
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_eg&hl=ar&as_q=brotherhood&as_epq=&as_oq=&as_eq=&as_scoring=n&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=m&as_mind=3&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_me&hl=ar&as_q=brotherhood&as_epq=&as_oq=&as_eq=&as_scoring=r&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=m&as_mind=12&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_me&hl=ar&as_q=%D8%A7%D9%8&as_epq=&as_oq=&as_eq=&as_scoring=d&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=m&as_mind=12&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any
 * 
 * Studing page number
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_me&hl=ar&as_scoring=n&as_q=%D8%A7%D9%84%D9%82%D8%AF%D8%B3&btnG=%D8%A8%D8%AD%D8%AB&as_drrb=q&as_qdr=m&as_mind=12&as_minm=2&as_maxd=5&as_maxm=3&as_nsrc=&as_occt=any&start=10
 * http://news.google.com/news/search?pz=1&cf=all&ned=ar_me&hl=ar&as_scoring=n&as_maxm=3&q=%D8%A7%D9%84%D9%82%D8%AF%D8%B3&as_qdr=m&as_drrb=q&as_mind=12&as_minm=2&cf=all&as_maxd=5&start=10
 * 
 * ned=> means news local, for egypt=ar_eg, for middle east=ar_me
 * hl => i think it means languagh
 * **********************************************************/
