﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
using System.IO;
using HtmlAgilityPack;
using System.Threading.Tasks;
using Chinesys.Domain.Helpers;

namespace Chinesys.Domain.Crawling
{
    public class Spider
    {
        //TODO: move this paths to the config file.
        static DateTime date = DateTime.Today;
        static string productsPath = string.Format(@"D:\Dev\Sources\DealExtremeCrawler\DealExtremeCrawler\Outputs\Products_{0}.txt", date.ToString("yyyyMM"));
        static string productsXmlPath = string.Format(@"D:\Dev\Sources\DealExtremeCrawler\DealExtremeCrawler\Outputs\Products_{0}.xml", date.ToString("yyyyMM"));
        static string productErrorPath = string.Format(@"D:\Dev\Sources\DealExtremeCrawler\DealExtremeCrawler\Outputs\ProductParsingErrors_{0}.txt", date.ToString("yyyyMM"));
        static string linkErrorsPath = string.Format(@"D:\Dev\Sources\DealExtremeCrawler\DealExtremeCrawler\Outputs\LinkErrors_{0}.txt", date.ToString("yyyyMM"));

        public Spider()
        {
            Products = new List<Product>();
            Links = new List<string>();
            ParsingErrors = new List<ProductError>();
            LinkErrors = new List<LinkError>();
            SuccededCrawled = new List<string>();
        }

        public List<ProductError> ParsingErrors { get; set; }
        public List<LinkError> LinkErrors { get; set; }
        public List<string> SuccededCrawled { get;set; }
        public List<string> Links { get; set; }
        public List<Product> Products { get; set; }
        private string siteDomain;

        public static void CrawlAndSave(string siteUrl)
        {
            Spider spider = new Spider();
            spider.CrawlSite(siteUrl);
                        
            SerializationHelper.BinarySerialize(spider.Products, productsPath);
            SerializationHelper.GenerateTextFile(productErrorPath, spider.GetProductErrorsFormatted());
            SerializationHelper.GenerateTextFile(linkErrorsPath, spider.GetLinkErrorsFormatted());
        }

        public void CrawlSite(string siteUrl)
        {
            DateTime startTime = DateTime.Now;
            Console.WriteLine("Starting Site Crawl at {0}:{1}:{2}", startTime.Hour, startTime.Minute, startTime.Second);

            //Gets the HtmlDocument for the Main Site
            siteDomain = siteUrl;
            string html = GetWebText(siteDomain);
            HtmlDocument doc = new HtmlDocument();
            doc.LoadHtml(html);
            

            //Collect all the links
            Links.AddRange(GetCategoryLinks(doc, LinkErrors));

            #region Parallel Process (Multi-Thread)
            ////Multi-Thread for opening many sockets at the same time.
            //Parallel.ForEach(Links, link =>
            //{
            //    string category = link.Substring(link.IndexOf(@"/c/") + 3, (link.IndexOf("?") - (link.IndexOf(@"/c/") + 3)));

            //    try
            //    {
            //        Console.WriteLine("Start Crawling Category: '{0}'", category);
            //        CrawlPage(link, ParsingErrors, Products);
            //        SuccededCrawled.Add(string.Format("Crawling Category '{0}' Succeded", category));
            //        Console.WriteLine("Success Crawling Category '{0}' !!!", category);
            //    }
            //    catch (Exception ex)
            //    {
            //        Console.WriteLine("Could not Crawl: {0}", category);

            //        LinkErrors.Add(new LinkError()
            //        {
            //            Url = link,
            //            Message = string.Format("Could not Crawl category: {0} - Error: {1}", category, ex.Message)
            //        });
            //    }
            //}); 
            #endregion
                
            #region Sequential Process
            ////Sequential process of each page
            foreach (string link in Links)
            {
                string category = link.Substring(link.IndexOf(@"/c/") + 3, (link.IndexOf("?") - (link.IndexOf(@"/c/") + 3)));

                try
                {
                    Console.Write("Start Crawling Category: '{0}'... ", category);
                    CrawlPage(link, ParsingErrors, Products);
                    SuccededCrawled.Add(string.Format("Crawling Category '{0}' Succeded", category));
                    Console.WriteLine("SUCCESS!");
                }
                catch (Exception ex)
                {
                    Console.WriteLine("FAILED");

                    LinkErrors.Add(new LinkError()
                    {
                        Message = string.Format("Could not Crawl category: {0} - Error: {1}", category, ex.Message)
                    });
                }
            }  
            #endregion             
            

            DateTime endTime = DateTime.Now;
            Console.WriteLine("Finished Site Crawling at {0}:{1}:{2}", endTime.Hour, endTime.Minute, endTime.Second);

            TimeSpan span = endTime - startTime;

            Console.WriteLine("Total Elapsed Time: {0} minutes.", span.TotalMinutes);

            PrintResults();
        }

        private void CrawlPage(string url, List<ProductError> parsingErrors, List<Product> products)
        {   
            Product product;

            string html = GetWebText(url);
            
            HtmlDocument doc = new HtmlDocument();
            doc.LoadHtml(html);

            //Get all products in the current page
            int count = -1;
            foreach (HtmlNode productNode in doc.DocumentNode.SelectNodes("//div[@class='productsList']"))
            {
                count++;

                product = new Product();

                #region Image
                //Image
                try
                {
                    List<HtmlNode> a = productNode.ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "a").ToList();
                    product.ImageSrc = a[0].ChildNodes[1].Attributes.Where(o => o.Name == "src").Select(o => o.Value).First();
                }
                catch (Exception e)
                {
                    parsingErrors.Add(new ProductError()
                    {
                        Url = url,
                        LineNumber = count.ToString(),
                        ProductPart = "Image",
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });

                }
                #endregion

                List<HtmlNode> productDivs = productNode.ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "div").First()
                                                        .ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "div").ToList();

                #region Description
                try
                {
                    //Description
                    product.Description = productDivs[0].ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "a")
                                                     .Select(o => o.InnerText)
                                                     .First().Trim();
                }
                catch (Exception e)
                {
                    parsingErrors.Add(new ProductError()
                    {
                        Url = url,
                        LineNumber = count.ToString(),
                        ProductPart = "Description",
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });
                }
                #endregion

                #region Rating & Reviews
                try
                {
                    HtmlNode ratingAnchor = productDivs[1].ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "a").First();

                    //Reviews
                    product.TotalReviews = ratingAnchor.InnerText.Trim();

                    //Rating
                    product.Rating = ratingAnchor.ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "img").First()
                                                 .Attributes.Where(o => o.Name == "alt")
                                                 .Select(o => o.Value).First();
                }
                catch (Exception e)
                {
                    parsingErrors.Add(new ProductError()
                    {
                        Url = url,
                        LineNumber = count.ToString(),
                        ProductPart = "Rating & Review",
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });
                }
                #endregion

                #region Price
                try
                {
                    //Price
                    product.Price = productDivs[2].ChildNodes.Single(o => o.NodeType == HtmlNodeType.Element && o.Name == "b").InnerText.Trim();
                }
                catch (Exception e)
                {
                    parsingErrors.Add(new ProductError()
                    {
                        Url = url,
                        LineNumber = count.ToString(),
                        ProductPart = "Price",
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });
                }
                #endregion

                #region Category
                try
                {
                    product.Category = productDivs[2].ChildNodes.Single(o => o.NodeType == HtmlNodeType.Element && o.Name == "div")
                                                     .ChildNodes.Single(o => o.NodeType == HtmlNodeType.Element && o.Name == "a")
                                                     .InnerText.Trim();

                    product.Category = product.Category.Substring(product.Category.IndexOf(';') + 1).Trim();
                }
                catch (Exception e)
                {
                    parsingErrors.Add(new ProductError()
                    {
                        Url = url,
                        LineNumber = count.ToString(),
                        ProductPart = "Category",
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });
                }
                #endregion


                products.Add(product);
            }

            #region Recursive fetch next page (don't used when multithreading)
            ////Fetch next page
            //string domain = "http://www.dealextreme.com";
            //HtmlNode pagerDiv = doc.DocumentNode.SelectNodes("//div[@class='ProductPager']").First();
            //HtmlNode nextPageHref = pagerDiv.ChildNodes.Where(o => o.NodeType == HtmlNodeType.Element && o.Name == "a" && o.InnerText == "Next Page").FirstOrDefault();
            //if (nextPageHref != null)
            //{
            //    string nextUrl = domain + nextPageHref.Attributes.Where(o => o.Name == "href").First().Value;
            //    if (nextUrl.Length != domain.Length)
            //        CrawlPage(nextUrl, parsingErrors, products);
            //} 
            #endregion

        }

        private string GetWebText(string url)
        {
            HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(url);
            request.UserAgent = "SomeUser";

            WebResponse response = request.GetResponse();

            Stream stream = response.GetResponseStream();

            StreamReader reader = new StreamReader(stream);
            string htmlText = reader.ReadToEnd();
            return htmlText;
        }
                
        private List<string> GetCategoryLinks(HtmlDocument doc, List<LinkError> errors)
        {
            List<string> links = new List<string>();
                        
            int count = -1;
            foreach (HtmlNode link in doc.DocumentNode.SelectNodes("//a[@class='sublevel' and @href]"))
            {
                try
                {
                    //Aprovechando que DX usa queryString en la url para manejar el paginado me traigo en un solo request todos los productos de esa categoria.
                    links.Add(siteDomain + link.Attributes.Single(o => o.Name == "href").Value + "?page=1&pagesize=10000&pagesort=relevence");
                }
                catch (Exception e)
                {
                    errors.Add(new LinkError()
                    {
                        LineNumber = count.ToString(),
                        Message = e.Message,
                        CallStack = e.StackTrace,
                        InnerException = e.InnerException != null ? e.InnerException.Message : "null"
                    });
                }
            }

            //List<string> pageNumberLinks = new List<string>();
            //foreach (string link in links)
            //{
            //    try
            //    {
            //        pageNumberLinks.AddRange(GetAllPageNumberLinks(link));
            //    }
            //    catch
            //    {
            //        LinkErrors.Add(new LinkError() { Message = string.Format("Error trying to get page numbers for url {0}", link) });
            //    }

            //}

            //links.AddRange(pageNumberLinks);

            return links;
        }

        private List<string> GetAllPageNumberLinks(string url)
        {
            string html = GetWebText(url);
            HtmlDocument doc = new HtmlDocument();
            doc.LoadHtml(html);

            string pageParameter = "~page.{0}~pagesize.52~pagesort.relevence";
            HtmlNode pagerDiv = doc.DocumentNode.SelectNodes("//div[@class='ProductPager']").First();
            List<HtmlNode> pages = pagerDiv.ChildNodes
                                           .Where(o => o.NodeType == HtmlNodeType.Element 
                                                               && o.Name == "a" 
                                                               && o.Attributes.Where(a => a.Name == "class" && a.Value == "ProductPagerButton").Count() > 0)
                                           .ToList();

            int maxPage = 1;
            maxPage = pages.Max(o => TextToInt(o.InnerText));

            List<string> allPageLinks = new List<string>();

            if (maxPage != 1)
            {
                for(int i = 2; i <= maxPage; i++)
                {
                    allPageLinks.Add(string.Format(url + pageParameter, i));  
                }                
            }

            return allPageLinks;
 
        }

        private int TextToInt(string text)
        {
            int ret = 0;
            if (int.TryParse(text, out ret))
                return ret;

            return 0;
        }



        private void PrintResults()
        {
            Console.WriteLine("Total Products Crawled: {0}", Products.Count);
            Console.WriteLine("Total Product Errors: {0}", ParsingErrors.Count);
            Console.WriteLine("Total Link Errors: {0}", LinkErrors.Count);
        }

        public string GetProductErrorsFormatted()
        {
            StringBuilder sb = new StringBuilder();

            List<string> errorUrls = ParsingErrors.Select(o => o.Url).Distinct().ToList();

            foreach (string url in errorUrls)
            {
                sb.AppendLine("**************************************************************************");
                sb.AppendLine("Errors in URL: ({0})");
                sb.AppendLine("**************************************************************************");

                foreach (ProductError e in ParsingErrors)
                {
                    sb.AppendFormat("{0} ({1}) - {2}",
                                      e.LineNumber,
                                      e.ProductPart,
                                      e.Message);                    
                }
            }

            return sb.ToString();
        }

        public string GetLinkErrorsFormatted()
        {
            StringBuilder sb = new StringBuilder();
            
            foreach (LinkError err in LinkErrors)
            {
                sb.AppendLine(err.Message);                             
            }

            return sb.ToString();
        }
    }
}
