﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Indexing;
using Common;
using System.Threading;
using Notifying;
using System.Net;
using Site_Extractor;
using Dal;
using Lucene.Net.Index;
using Lucene.Net.Analysis.Standard;
using Amib.Threading;

namespace Server
{

    public class Server
    {
        public event ServerEventDelegate ServerEvent;

        public string MAIN_INDEX_PATH;
        public const int NUM_OF_THREADS_PER_SITE = 3;
        public const int MIN_SITE_INDEX = 0;
        public const int MAX_SITE_INDEX = 3;
        string _connectionString, _prefixFilePath;
        List<Site> _sites;
        List<GenericForum> _forums;
        IWorkItemsGroup[] _siteWorkItemGroups;
        Thread[] _downloaderThreads;
        Thread[] _updaterThreads;
        //public Notifier _downloaderNotifier; //Allows to plug in a notifier as needed.
        //public Notifier _updaterNotifier; //Allows to plug in a notifier as needed.
        //Notifier _notifier;
        static object Object = new object();
        string _mainIndexPath;
        LuceneManager.LuceneManager _luceneManager; //Main indexWriter - all indexes are merged into this.
        SmartThreadPool _threadPool;
        ManualResetEvent _downloaderExitThreadEvent; //For Abort.
        ManualResetEvent _updaterExitThreadEvent; //For Abort.

        public Server(List<Site> Sites, List<GenericForum> forums, string ConnectionString, string PrefixFilePath, string indexPath)
        {
            MAIN_INDEX_PATH = indexPath;
            _forums = forums;
            _connectionString = ConnectionString;
            _sites = Sites;
            _prefixFilePath = PrefixFilePath;
            _downloaderThreads = new Thread[_sites.Count];
            _updaterThreads = new Thread[_sites.Count];
            _mainIndexPath = indexPath;
            _updaterExitThreadEvent = new ManualResetEvent(false);
            _downloaderExitThreadEvent = new ManualResetEvent(false);
            _luceneManager = new LuceneManager.LuceneManager(false, false, _prefixFilePath,
                MAIN_INDEX_PATH, MAIN_INDEX_PATH);
          

            _siteWorkItemGroups = new IWorkItemsGroup[_sites.Count];
            //Initiate Lucene IndexWriter. This instance will be passed down to every indexing class/method.

        }

        /*  Doesnt work right now.
         * 
        public void CreateDnsLookupHash()
        {
            HtmlDownloader.DnsLookup = new System.Collections.Hashtable();
            foreach (Site site in _sites)
            {
                foreach (string domain in site.Domains)
                {
                    if (domain.Equals(string.Empty)) continue;
                    IPAddress[] addresslist = Dns.GetHostAddresses(domain);
                    foreach (IPAddress ip in addresslist)
                    {
                        if (!HtmlDownloader.DnsLookup.Contains(domain))
                            HtmlDownloader.DnsLookup.Add(domain, ip.ToString());
                    }
                }
            }
        }*/

        /// <summary>
        /// This is the main index cycle function.
        /// </summary>
        /// <param name="FromDate"></param>
        public void DoDownloadCycle(DateTime FromDate)
        {
            //_downloadLuceneManager =
            //     new LuceneManager.LuceneManager(
            //     true, false, _prefixFilePath, TEMP_DOWNLOADER_PATH, _mainIndexPath);


            for (int i = 0; i < _sites.Count; ++i)
            {

                _downloaderThreads[i] = new Thread(new ParameterizedThreadStart(SiteDownloaderThread));
                _downloaderThreads[i].Name = "Downloader Thread " + i;
                _downloaderThreads[i].Start(new object[] { _sites[i], FromDate });
                //_siteWorkItemGroups[i] = _threadPool.CreateWorkItemsGroup(NUM_OF_THREADS_PER_SITE);
                //SiteDownloader(_sites[i], _siteWorkItemGroups[i], FromDate);
            }
            for (int i = 0; i < _sites.Count; ++i)
            {
                //_siteWorkItemGroups[i].WaitForIdle();
                _downloaderThreads[i].Join();

            }
            /*
			for (int i=0 ; i< _forums.Count; i++)
			{
				JobManager.AddJob(IndexForumThread, new object[] { _forums[i], FromDate});
			}
            */
            Thread.Sleep(2000);
            _luceneManager.OptimizeIndex();
            //MergeDownloaderIndex();
            //Indexing Done.
            Console.Write("Download cycle ended at : " + DateTime.Now.ToString());
            //All sites were crawled, now run all queries and notify.

        }


        public void DoDeepCrawlCycle(DateTime FromDate)
        {
          
            Console.WriteLine("Deep Crawl Cycle Started at " + DateTime.Now.ToString());
            for (int i = MIN_SITE_INDEX; i < MAX_SITE_INDEX; ++i)
            {

                _siteWorkItemGroups[i] = _threadPool.CreateWorkItemsGroup(NUM_OF_THREADS_PER_SITE);
                SiteDeepCrawler(_sites[i], _siteWorkItemGroups[i], FromDate);
            }
            for (int i = MIN_SITE_INDEX; i < MAX_SITE_INDEX; ++i)
            {
                _siteWorkItemGroups[i].WaitForIdle();
                }
         
            Thread.Sleep(2000);
            //_mainLuceneManager.OptimizeIndex();
            Console.WriteLine("DeepCrawl cycle ended at : " + DateTime.Now.ToString());
         
        }



        public void DoUpdateCycle(DateTime FromDate)
        {
            // _updateLuceneManager =
            //      new LuceneManager.LuceneManager(
            //      true, false, _prefixFilePath, TEMP_UPDATER_PATH, _mainIndexPath);

            for (int i = 0; i < _sites.Count; ++i)
            {

                 _updaterThreads[i] = new Thread(new ParameterizedThreadStart(SiteUpdaterThread));
                 _updaterThreads[i].Name = "Updater Thread " + i;
                _updaterThreads[i].Start(new object[] { _sites[i], FromDate });
                //_siteWorkItemGroups[i] = _threadPool.CreateWorkItemsGroup(NUM_OF_THREADS_PER_SITE);
                //SiteUpdater(_sites[i], _siteWorkItemGroups[i], FromDate);
            }
            for (int i = 0; i < _sites.Count; ++i)
            {
                //_siteWorkItemGroups[i].WaitForIdle();
                _updaterThreads[i].Join();

            }
            /*
			for (int i=0 ; i< _forums.Count; i++)
			{
				JobManager.AddJob(IndexForumThread, new object[] { _forums[i], FromDate});
			}
            */
            Thread.Sleep(2000);
            _luceneManager.OptimizeIndex();
            //_updateLuceneManager.CloseIndexWriter();
            //MergeUpdaterIndex();
            //Indexing Done.
            Console.Write("Update Cycle ended at : " + DateTime.Now.ToString());
            //All sites were crawled, now run all queries and notify.
            //_updaterNotifier.NotifyUsersRealTime();

        }


        private void SiteDownloaderThread(object obj)
        {
            string linkToIndex = string.Empty;
            if (obj == null) return;
            if (!(obj is object[])) return;
            var objs = obj as object[];
            if (objs.Length != 2) return;
            foreach (object o in objs) if (o == null) return;
            var site = objs[0] as Site;
            var date = (DateTime)objs[1];
            SiteDownloader(site, date);

        }

        private void SiteUpdaterThread(object obj)
        {
            string linkToIndex = string.Empty;
            if (obj == null) return;
            if (!(obj is object[])) return;
            var objs = obj as object[];
            if (objs.Length != 2) return;
            foreach (object o in objs) if (o == null) return;
            var site = objs[0] as Site;
            var date = (DateTime)objs[1];
            SiteUpdater(site, date);

        }


        /// <summary>
        /// Indexes an entire site and merges the newly indexed catalog into the main one.
        /// </summary>
        /// <param name="Site"></param>
        /// <returns></returns>
        private void SiteDownloader(Site Site, DateTime FromDate)
        {
            
            try
            {
               
                while (!_downloaderExitThreadEvent.WaitOne(0, false))
                {

                    //Start a thread workitemgroup for this site.
                    SmartThreadPool stp = new SmartThreadPool(1000, 3, 0);
                    Workers.Downloader d = new Workers.Downloader(Site, stp,
                        NUM_OF_THREADS_PER_SITE, _prefixFilePath, FromDate, _luceneManager);
                    d.DownloadStories(_mainIndexPath);
                    d.DownloadBlogs(_mainIndexPath);
                    d.DownloadForums(_mainIndexPath);
                    stp.WaitForIdle();
                    _luceneManager.FlushIndexWriter();
                    if (null != ServerEvent)
                    {
                        ServerEvent(new ServerEventArgs(Site.SiteNames.SiteName, "Downloader"));
                    }
                }

            }
            catch (Exception e)
            {
                Console.WriteLine("Server : " + Site.SiteNames.SiteName + " Downloader Thread Exception: {0}", e.Message);
            }
        }

        private void SiteDeepCrawler(Site Site, IWorkItemsGroup workGroup, DateTime FromDate)
        {
            try
            {
                //Start a thread workitemgroup for this site.
                Workers.DeepCrawler d = new Workers.DeepCrawler(Site, workGroup,
                    NUM_OF_THREADS_PER_SITE, _prefixFilePath, FromDate, _luceneManager);
                d.DeepCrawlStories(_mainIndexPath);
                //d.DownloadBlogs(_mainIndexPath);
                //d.DownloadForums(_mainIndexPath);


            }
            catch (Exception e)
            {
                Console.WriteLine("Server : " + Site.SiteNames.SiteName + " Downloader Thread Exception: {0}", e.Message);
            }
        }

        /// <summary>
        /// Cycles through all DB Urls and checks them for updates. merges temp index to main.
        /// </summary>
        /// <param name="Site"></param>
        /// <returns></returns>
        private void SiteUpdater(Site Site, DateTime FromDate)
        {
          
            try
            {
             
                while (!_updaterExitThreadEvent.WaitOne(0, false))
                {
                    //Start a thread workitemgroup for this sites.
                    SmartThreadPool stp = new SmartThreadPool(1000, 3, 0);
                    Workers.Updater u = new Workers.Updater(Site, stp,
                        NUM_OF_THREADS_PER_SITE, _prefixFilePath, FromDate, _luceneManager);
                    u.Update(_mainIndexPath);
                    stp.WaitForIdle();
                    _luceneManager.FlushIndexWriter();
                    if (null != ServerEvent)
                    {
                        ServerEvent(new ServerEventArgs(Site.SiteNames.SiteName, "Updater"));
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("Server : " + Site.SiteNames.SiteName + " Updater Thread Exception: {0}", e.Message);
            }
        }

        public void ExitDownloader()
        {
            _downloaderExitThreadEvent.Set();
        }
        public void ExitUpdater()
        {
            _updaterExitThreadEvent.Set();
        }


    }

    public delegate void ServerEventDelegate(ServerEventArgs e);

    public class ServerEventArgs
    {
        public string _siteName;
        public string _actionType;

        public ServerEventArgs(string siteName, string actionType)
        {
            _siteName = siteName;
            _actionType = actionType;
        }

    }
}
