﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Xml;
namespace rtw
{
     

     
    class worker
    {
        public static void log(string mes)
        {
            Console.WriteLine(mes);
        }
       public static void go()
        {

       
            //watch for new
            var accum = new List<entry>();
            accum=cont();
            one_fetchrep(accum);

           entrywriter.write_entries("fetched",false,accum);

            log("--------------------------");
           log("Watch live...");
           int iter = 0;
           for (int i = 0; i < 10000000; i++)
           {
           try
           {
               accum=live(accum);
           }
           catch (Exception e)
           {
               log("------------EXCEPTION--------------");
               log(iter + " iter");
               log(e.Message);
               log(e.StackTrace);
               System.Threading.Thread.Sleep(5000);
               log("--------------------------");
           }
           iter++;
           }
        }
       static List<entry> live(List<entry> accum)
       {
           

               if (accum.Count > 1000)
               {
                   //чтобы не потратить всю память
                   log("REMOVE");
                   accum.RemoveRange(1, 1000);
               }

               List<entry> e1 = fetchonepage(1);

               List<entry> newEntries = findnew(accum, e1);

               accum = merge(accum, e1);


               System.Threading.Thread.Sleep(5000);
               if (newEntries.Count == 0)
               {
                  
                   
               }
               else
               {
                   log("new " + newEntries.Count + "items");
                   entrywriter.write_entries("fetched", false, newEntries);
                   log("Accumulated: ");
                   one_fetchrep(accum);
               }
               return accum;
           
       }
       static List<entry> cont()
       {
           DateTime lastKnownTime = new DateTime(2010, 5, 15, 17, 30, 00);


           //fetch old
           List<entry> hist = new List<entry>();

           int startpage = 1;
           int y = startpage;
           for (; y < 999; y++)
           {
               log("Fetch page  " + (y));
               try
               {
                   List<entry> e1 = fetchonepage(y);
                   log("WRITE " + e1.Count + " entries");
                   entrywriter.write_entries("fetched",false, e1);

                   hist = merge(hist, e1);

                   entry last = e1[e1.Count - 1];
                   if (as_dt(last) < lastKnownTime)
                   {
                       log("Reach lastknown time " + lastKnownTime.ToLongTimeString());
                       break;
                   }

               }
               catch (System.Net.WebException we)
               {
                   log("     " + we.Message);
                   var webr = (System.Net.HttpWebResponse)we.Response;
                   if (webr.StatusCode == System.Net.HttpStatusCode.Forbidden)
                   {
                       break;
                   }
                   throw;
               }
           }

           log("Fetched " + (y - startpage) + " historical pages");
           return hist;
       }
        static DateTime as_dt(entry e)
        {
            DateTime dt1 = DateTime.Parse(e.published);
            return dt1;
        }

       

     
        static void one_fetchrep(List<entry> e1)
        {
            if (e1.Count < 1)
            {
                return;
            }

            entry first = e1[0];
            entry last = e1[e1.Count - 1];
            string slast = "";

            DateTime dt1 = DateTime.Parse(last.published);
            DateTime dt2 = DateTime.Parse(first.published);

            if (dt2 < dt1)
            {
                DateTime t = dt2;
                dt2 = dt1;
                dt1 = t;
            }

            var diap = dt2 - dt1;
            string diaprep = string.Format("{0:00}", diap.TotalMinutes) + "m";

            if (diap.TotalMinutes == 0)
            {
                diaprep = string.Format("{0:00}", diap.TotalSeconds) + "s";
            }

            string srep = dt1.ToLongTimeString() + " +" + diaprep + "  " + e1.Count + " items";


            Console.WriteLine(srep);
        }

        
        static List<entry> findnew(List<entry> accum, List<entry> newfetch)
        {
            List<entry> res = new List<entry>();

            foreach (var i2 in newfetch)
            {
                if (accum.FindLastIndex((t) => t.twitid == i2.twitid) < 0)
                {
                    res.Add(i2);
                }



            }
            return res;
        }
        static List<entry> merge(List<entry> e1, List<entry> e2)
        {
            List<entry> res = new List<entry>();
            res.AddRange(e1);

            foreach (var i2 in e2)
            {
                if (res.FindLastIndex((t) => t.twitid == i2.twitid) < 0)
                {
                    res.Add(i2);
                }



            }
            //sort by time
            //res.Sort((e, b) => e.published.CompareTo(b.published));

            res.Sort(delegate(entry a, entry b)
            {
                DateTime dt1 = DateTime.Parse(a.published);
                DateTime dt2 = DateTime.Parse(b.published);
                return dt1.CompareTo(dt2);
            });
            return res;

        }
        static List<entry> fetchonepage(int pageNum)
        {

            //test
            System.Net.CookieCollection cookies = null;
            var res = fetcher.get("http://search.twitter.com/search.atom?q=accident&page=" + pageNum + "&rpp=50", ref cookies);
            var xml = fetcher.getAnswer(res);

      //      System.IO.File.WriteAllText("out.xml", xml);

            XmlDocument doc = new XmlDocument();
            doc.LoadXml(xml);
            XmlElement root = doc.DocumentElement;
            List<XmlElement> xentries = new List<XmlElement>();
            foreach (var e in root.ChildNodes)
            {
                XmlElement ce = (XmlElement)e;
                if (ce.Name == "entry")
                {
                    xentries.Add(ce);
                }
            }

            XmlNamespaceManager namespaceManager = new
            XmlNamespaceManager(doc.NameTable);
            namespaceManager.AddNamespace("ns", "http://www.w3.org/2005/Atom");

            var entries = new List<entry>();
            foreach (var e in xentries)
            {



                entry ntry = new entry();

                XmlElement z0 = (XmlElement)e.SelectSingleNode("ns:id", namespaceManager);
                ntry.twitid = z0.InnerXml;

                XmlElement z1 = (XmlElement)e.SelectSingleNode("ns:published", namespaceManager);
                ntry.published = z1.InnerXml;
                XmlElement z2 = (XmlElement)e.SelectSingleNode("ns:title", namespaceManager);
                ntry.title = z2.InnerXml;
                XmlElement z3 = (XmlElement)e.SelectSingleNode("ns:author/ns:name", namespaceManager);
                ntry.author = z3.InnerXml;

                //короткая версия имени
                {
                    int prefpos = ntry.author.IndexOf("(");
                    ntry.author = ntry.author.Substring(0, prefpos - 1);

                }

                //короткая версия id
                {
                    int prefpos = ntry.twitid.IndexOf("2005:");
                    ntry.twitid = ntry.twitid.Substring(prefpos + 5, ntry.twitid.Length - prefpos - 5);

                }
                entries.Add(ntry);

            }
            one_fetchrep(entries);
            return entries;
        }
    }
}
