﻿using System;
using System.Collections.Generic;
using System.Configuration;
using System.Text;
using System.Threading;
using CrawlServiceCore.Configuration;
using Microsoft.Practices.EnterpriseLibrary.Logging;
using Microsoft.SharePoint;
using System.Linq;

namespace CrawlServiceCore.Crawl
{
    public class CrawlBase
    {
        #region Properties
        public LogWriter TheLog { get; set; }
        public IHandleCrawlInformation InfoHandler { get; set; }
        #endregion

        #region Constructors
        public CrawlBase(LogWriter log)
        {
            TheLog = log;
        }

        public CrawlBase(LogWriter log, IHandleCrawlInformation infoHandler)
        {
            TheLog = log;
            InfoHandler = infoHandler;
        }
        #endregion

        /// <summary>
        /// Method used to start the crawling of configured sites and lists
        /// </summary>
        /// <returns>True returned if all completed successfully; False returned if an error occurred</returns>
        public bool StartCrawling()
        {
            try
            {
                //Define local run variables
                List<WaitHandle> siteWaitHandleList = new List<WaitHandle>();
                CrawlListDelegate del = new CrawlListDelegate(CrawlListService);

                //Step 1: Read Configuration details
                CrawlDetailsSection configSection = ReadConfigFile();
                if (configSection != null)
                {
                    foreach (CrawlSiteElementCollection site in configSection.CrawlDetails)
                    {
                        //Step 2: Kick off Crawls for each list
                        siteWaitHandleList.Add(del.BeginInvoke(site, null, null).AsyncWaitHandle);
                    }

                    //Step 3: Wait for all hanlde to complete
                    WaitHandle.WaitAll(siteWaitHandleList.ToArray());
                }
                return true;
            }
            catch (Exception ex)
            {
                TheLog.Write("Error from CrawlBase::StartCrawling stack trace: " + ex.ToString(), "Error");
                return false;
            }
        }
        
        #region Async Methods
        protected delegate void CrawlListDelegate(CrawlSiteElementCollection siteCollection);

        /// <summary>
        /// Based on a site url an asycronous method is started for each list found in the configuration file to be crawled.
        /// This method completes when all lists have been crawled.
        /// </summary>
        /// <param name="siteCollection">The Site element collection from the configuration file</param>
        protected void CrawlListService(CrawlSiteElementCollection siteCollection)
        {
            try
            {
                List<WaitHandle> listDataWaitHandles = new List<WaitHandle>();
                GetListDataDelegate delList = new GetListDataDelegate(GetListData);
                int count = siteCollection.CollectionCount;
                string listName = string.Empty;
                for (int i = 0; i < count; i++)
                {
                    listName = siteCollection[i].ListName;
                    listDataWaitHandles.Add(delList.BeginInvoke(siteCollection.SiteUrl, siteCollection[i], null, null).AsyncWaitHandle);
                }

                WaitHandle.WaitAll(listDataWaitHandles.ToArray());
            }
            catch (Exception ex)
            {
                TheLog.Write("error from CrawlListService stack trace: " + ex.ToString(), "Error");
            }
        }

        protected delegate void GetListDataDelegate(string siteUrl, CrawlListElement listDetails);

        /// <summary>
        /// Based on the site url and the Crawl List Element details, a given list is quiered for items as defined in
        /// the configuration file.
        /// The requested fields are then sent to the InfoHandler.
        /// </summary>
        /// <param name="siteUrl">List's site url</param>
        /// <param name="listDetails">Configuration details for a given list</param>
        protected void GetListData(string siteUrl, CrawlListElement listDetails)
        {
            try
            {
                using (SPSite site = new SPSite(siteUrl))
                {
                    using (SPWeb web = site.OpenWeb())
                    {
                        SPList list = web.Lists[listDetails.ListName];
                        if (list != null)
                        {
                            SPQuery query = new SPQuery();
                            SPField userFld, descFld;
                            query.Query = listDetails.Query;
                            SPListItemCollection tasks = list.GetItems(query);
                            foreach (SPListItem item in tasks)
                            {
                                if (InfoHandler != null)
                                {
                                    userFld = item.Fields.GetFieldByInternalName(listDetails.UsernameField);
                                    descFld = item.Fields.GetFieldByInternalName(listDetails.DescriptionField);

                                    InfoHandler.SendInformation(userFld.GetFieldValueAsText(item[listDetails.UsernameField])
                                        , descFld.GetFieldValueAsText(item[listDetails.DescriptionField])
                                        , CreateItemDisplayLink(web.Url, item.ID, list.Forms[PAGETYPE.PAGE_DISPLAYFORM].Url));
                                }
                            }
                        }//end null check
                    }//end spweb
                }//end spsite
            }
            catch (Exception ex)
            {
                TheLog.Write("error from GetListData stack trace: " + ex.ToString(), "Error");
            }
        }
        #endregion

        #region Helper Methods
        /// <summary>
        /// Reads in the current configuration file and extracts the CrawlSites custom section to drive the list crawling.
        /// </summary>
        /// <returns></returns>
        private CrawlDetailsSection ReadConfigFile()
        {
            try
            {
                CrawlDetailsSection myCrawl =
                   ConfigurationManager.GetSection("CrawlSection") as CrawlDetailsSection;

                return myCrawl;
            }
            catch (ConfigurationErrorsException err)
            {
                TheLog.Write("Error in CrawlBase::ReadConfigFile Stack Trace: " + err.ToString(), "Error");
            }
            return null;
        }

        /// <summary>
        /// Simple string builder used to generate the taks display link.
        /// </summary>
        /// <param name="siteUrl">SPWeb url of the current list</param>
        /// <param name="itemID">ID value for the current item</param>
        /// <param name="displayUrl">URL to the display form for the current list item.</param>
        /// <returns></returns>
        private string CreateItemDisplayLink(string siteUrl, int itemID, string displayUrl)
        {
            return string.Format("{0}/{1}?ID={2}", siteUrl, displayUrl, itemID);
        }
        #endregion
    }
}
