﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Configuration;
using System.IO;
using System.Text.RegularExpressions;
using System.Threading;
using PreguntaAlExperto.RulesLayer.Services;
using PreguntaAlExperto.RulesLayer.Entities;
using PreguntaAlExperto.TransporterObjectsFramework;
using System.Web;
using Common;

namespace Crawler
{
    #region Crawler Class 
    class Crawler
    {
        #region Properties
        private  Catalog Catalog
        {
            get { return Catalog.Instance; }
        }

        private string RepositoryPath
        {
            get { return ConfigurationManager.AppSettings["FileRepository"]; }
        }

        private string Url
        {
            get { return @"http://localhost:3071/articles/"; }
        }
        #endregion

        #region Constructors
        public Crawler()
        {
        }
        #endregion

        #region Members

        private void CrawlFile(Object param)
        {
            CrawlArg arg = (CrawlArg)param;
            Article article = arg.Article;

            String fileName = Path.GetFileNameWithoutExtension(article.Path);
            String filePath = Path.Combine(RepositoryPath, fileName + ".txt");
            String fileUrl = Url + fileName + ".aspx";

            StreamReader reader = new StreamReader(filePath, Encoding.UTF7);
            String content = reader.ReadToEnd();
            reader.Close();

            String wordsOnly = HtmlHelpers.StripHtml(content);

            Regex r = new Regex(@"\s+", RegexOptions.CultureInvariant);
            wordsOnly = r.Replace(wordsOnly, " ");

            wordsOnly = String.Format("{0} {1} {2} {3} {4}",
                article.Title,
                article.Short_Description,
                article.AllCategories.ToString(),
                article.PublisherEntity.Name,
                wordsOnly);

            string[] wordsOnlyA = wordsOnly.Split(
                new string[] { " ", ".", ",", "@" }, 
                StringSplitOptions.RemoveEmptyEntries 
                );

            File inFile = new File(fileUrl
                                , article.Title 
                                , article.Short_Description 
                                , DateTime.Now
                                , content.Length );


            string key = "";
            foreach (string word in wordsOnlyA)
            {
                key = Regex.Replace(word, @"[^a-z0-9,.áéíóúüñÑ]", "", RegexOptions.IgnoreCase);
                key = key.ToLower();
                key = key.Replace('á', 'a')
                        .Replace('é', 'e')
                        .Replace('í', 'i')
                        .Replace('ó', 'o')
                        .Replace('ú', 'u')
                        .Replace('ü', 'u')
                        .Replace('ñ', 'n');

                Catalog.Add(key, inFile);
            }

            arg.Handle.Set();
        }

        public void Crawl()
        {
            ArticlesService service = new ArticlesService();
            BaseList<Article> articles = service.getAllArticles();

            AutoResetEvent[] autoEvents = new AutoResetEvent[articles.Count];

            int i = 0;
            foreach (Article article in articles)
            {
                autoEvents[i] = new AutoResetEvent(false);

                ThreadPool.QueueUserWorkItem(new WaitCallback(CrawlFile),
                    new CrawlArg() { Article = article, Handle = autoEvents[i++] } );
            }

            WaitHandle.WaitAll(autoEvents);
            Catalog.Save();
        }
        #endregion
    }
    #endregion

    #region CrawlArg Class (Util)
    public class CrawlArg
    {
        public AutoResetEvent Handle { get; set; }
        public Article Article { get; set; }
    }
    #endregion
}
