﻿using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;
using HtmlAgilityPack;

namespace Homework1
{
    public class WebPage
    {
        private const int MaxTitleLength = 5000;
        private const int WebClientTimeout = 5*1000;

        public readonly Uri Url;
        public string Title;
        public string Summary;
        public string Content;
        public string Text;
        public readonly ConcurrentQueue<WebPage> InternalLinks = new ConcurrentQueue<WebPage>();
        public readonly ConcurrentQueue<WebPage> ExternalLinks = new ConcurrentQueue<WebPage>();

        private DbSchema.WebpagesRow _webpagesRow;
        public DbSchema.WebpagesRow WebpagesRow
        {
            get
            {
                lock (AddWebpagesLock)
                {
                    if (_webpagesRow == null)
                    {
                        // Check if there is already a Webpage row in the schema with the same URL
                        var rows = DB.Schema.Webpages.Where(r => r.Url.Equals(Url.ToString())).ToList();
                        if (rows.Count == 1)
                        {
                            _webpagesRow = rows[0];
                        }
                        else if (rows.Count == 0)
                        {
                            // If not...make a new row
                            _webpagesRow = DB.Schema.Webpages.AddWebpagesRow(Guid.NewGuid(),
                                                                             Title.Length > MaxTitleLength
                                                                                 ? Title.Substring(0, 5000)
                                                                                 : Title,
                                                                             Url.Host,
                                                                             Url.ToString(),
                                                                             Content,
                                                                             Text);
                        }
                        else
                        {
                            Console.WriteLine("The url \"{0}\" is somehow in the schema twice!", Url);
                        }
                    }
                    return _webpagesRow;
                }
            }
        }

        public WebPage(string title, Uri url, string summary)
        {
            Url = url;
            Title = title;
            Summary = summary;
            try
            {
                using (var wc = new WebClientWithTimeout(WebClientTimeout) {Proxy = null, })
                {
                    Content = wc.DownloadString(url);
                }
            }
            catch
            {
                Content = null; // Unable to download the page content (e.g. 403 error, etc...)
            }
        }

        private static readonly object AddLinksLock = new object();
        private static readonly object AddWebpagesLock = new object();

        private void GetTextFromContent(HtmlDocument doc = null)
        {
            try
            {
                if (Content == null)
                {
                    return;
                }

                if (doc == null)
                {
                    doc = new HtmlDocument();
                    doc.LoadHtml(Content);
                }

                var textBuilder = new StringBuilder();
                var textNodes = doc.DocumentNode.SelectNodes("//*[not(self::script)]/text()[normalize-space(.) != '']");//"//text()[normalize-space(.) != '']");
                if (textNodes != null)
                {
                    foreach (var node in textNodes)
                    {
                        textBuilder.AppendLine(node.InnerText);
                    }

                    Text = textBuilder.ToString();
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }

        /// <summary>
        /// Obtains the links for the web page, and, if currentDepth is less than
        /// maxDepth, it will recursively call GetLinks on each of the linked web pages
        /// </summary>
        /// <param name="maxDepth">Max depth to follow links to</param>
        /// <param name="maxTraverse"></param>
        /// <param name="currentDepth">Current path depth</param>
        public void GetLinks(int maxDepth, int maxTraverse, int currentDepth = 0)
        {
            // Content can be null if the webpage's source wasn't able to be downloaded
            if (Content == null)
            {
                return;
            }

            try
            {
                //var web = new HtmlWeb();
                //var doc = web.Load(Url.ToString());
                var doc = new HtmlDocument();
                doc.LoadHtml(Content);

                GetTextFromContent(doc);

                // Grab all <a> tags with an href attribute that is not empty and doesn't start with '#' (an anchor link)
                var links = doc.DocumentNode.SelectNodes("//a[@href != '' and not(starts-with(@href, '#'))]");
                if (links == null)
                {
                    var temp = WebpagesRow; // Call this just to make the WebpagesRow for the result
                    return;
                }

                var linkCount = 0;
                Parallel.ForEach(links, new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }, link =>
                {
                    try
                    {
                        if (linkCount >= maxTraverse)
                        {
                            return;
                        }

                        Interlocked.Increment(ref linkCount);

                        var title = link.InnerText;

                        Uri linkUrl;
                        var hrefValue = link.GetAttributeValue("href", "local");

                        if (hrefValue.StartsWith("//"))
                        {
                            linkUrl = new Uri(Url.Scheme + ":" + hrefValue); // Link which inherits parent protocol
                        }
                        else if (hrefValue.StartsWith("/"))
                        {
                            linkUrl = new Uri(Url.GetBase() + hrefValue); // Relative path which doesn't traverse up the path
                        }
                        else if (hrefValue.StartsWith(".."))
                        {
                            linkUrl = new Uri(Url.GetBaseWithPath() + "/" + hrefValue); // Relative path which traverses up
                        }
                        else if (!hrefValue.Contains("://"))
                        {
                            linkUrl = new Uri(Url.GetBase() + "/" + hrefValue); // Relative path which doesn't traverse up the path and didn't start with a / (hopefully...)
                        }
                        else
                        {
                            linkUrl = new Uri(hrefValue); // Normal link (hopefully...)
                        }


                        var result = new WebPage(title, linkUrl, Summary);

                        var isInternal = true;
                        if (linkUrl.GetRealHost() == Url.GetRealHost())
                        {
                            InternalLinks.Enqueue(result);
                        }
                        else
                        {
                            ExternalLinks.Enqueue(result);
                            isInternal = false;
                        }

                        // Get links of the results also, if we haven't gone to a depth of 2 links yet
                        if (currentDepth < maxDepth)
                        {
                            result.GetLinks(maxDepth, maxTraverse, currentDepth + 1); // Grab links of the results, this will also end up creating a Webpages row
                        }
                        else
                        {
                            result.GetTextFromContent();
                            var temp = result.WebpagesRow; // Call this just to make the WebpagesRow for the result
                        }

                        lock (AddLinksLock)
                        {
                            DB.Schema.Links.AddLinksRow(Guid.NewGuid(),
                                                        WebpagesRow,
                                                        currentDepth < maxDepth ? result.WebpagesRow : null,
                                                        linkUrl.Host,
                                                        linkUrl.ToString(),
                                                        title,
                                                        isInternal);
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex);
                    }
                });
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }
    }

    // Code partially based on answers.oreilly.com/topic/2165-how-to-search-google-and-bing-9n-c
    public static class GoogleQuery
    {
        private const int WebClientTimeout = 5 * 1000;

        /// <summary>
        /// Runs the Google search and calls GetLinks for the first 'maxResults' number of pages returned
        /// </summary>
        /// <param name="searchExpression">The search query</param>
        /// <param name="maxResults">Maximum number of results to get the links of</param>
        /// <param name="maxDepth">Max path depth to call GetLinks on</param>
        /// <returns>A list of the first 'maxResults' number of pages</returns>
        public static List<WebPage> GoogleSearch(string searchExpression, int maxResults, int maxDepth, int maxTraverse)
        {
            const string urlTemplate = "https://www.googleapis.com/customsearch/v1?&key=AIzaSyANIiM3VBxdWb3i637RWy97U1lj-gYPS3I&cx=009981399320727436698:osjvbj1fqb4&alt=atom&q={0}";
            var searchUrl = new Uri(string.Format(urlTemplate, searchExpression));
            var resultsList = new List<WebPage>();
            try
            {
                using (var wc = new WebClientWithTimeout(WebClientTimeout) { Proxy = null })
                {
                    var page = wc.DownloadString(searchUrl);

                    using (var reader = XmlReader.Create(new StringReader(page)))
                    {
                        var resultCount = 0;
                        while (resultCount < maxResults && reader.Read())
                        {
                            reader.ReadToFollowing("entry");

                            reader.ReadToFollowing("id");
                            var idVal = reader.ReadString();
                            if (string.IsNullOrWhiteSpace(idVal))
                            {
                                continue;
                            }
                            var url = new Uri(idVal);

                            reader.ReadToFollowing("title");
                            var title = reader.ReadString();

                            reader.ReadToFollowing("summary");
                            var summary = reader.ReadString();

                            var result = new WebPage(title, url, summary);
                            resultsList.Add(result);

                            result.GetLinks(maxDepth, maxTraverse); // Grab links of the results, this will also end up creating a Webpages row
                            resultCount++;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
            return resultsList;
        }
    }

    public class WebClientWithTimeout : WebClient
    {
        private readonly int _timeout;

        public WebClientWithTimeout(int timeout)
        {
            _timeout = timeout;
        }

        protected override WebRequest GetWebRequest(Uri address)
        {
            var w = base.GetWebRequest(address);
            w.Timeout = _timeout;
            return w;
        }
    }
}
