﻿using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Net;
using System.Collections;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;

namespace Crawler {
    class Crawler {
        private int count;
        private Hashtable urls;

        public int Count { get => count; }
        public Hashtable Urls { get => urls; }

        public async Task<bool> CrawlAsync(string startUrl, int maxNum) {
            return await Task.Run(() => Crawl(startUrl, maxNum));
        }

        public bool Crawl(string startUrl, int maxNum) {
            try {
                if (!new Uri(startUrl).IsAbsoluteUri) {
                    return false;
                }
            } catch (UriFormatException) {
                return false;
            }
            count = 0;
            urls = new Hashtable {
                    { startUrl, false }
                };
            while (true) {
                string current = null;
                foreach (string url in urls.Keys) {
                    if ((bool)urls[url]) {
                        continue;
                    }
                    current = url;
                }
                if (current == null || count > maxNum) {
                    break;
                }
                string html = DownLoad(current);
                urls[current] = true;
                count++;
                Parse(current, html);
            }
            return true;

            //if (Regex.IsMatch(startUrl, "^[^:/]+://.+")) {
            //    int count = 0;
            //    urls = new Hashtable {
            //    { startUrl, false }
            //};
            //    while (true) {
            //        string current = null;
            //        foreach (string url in urls.Keys) {
            //            if ((bool)urls[url]) {
            //                continue;
            //            }
            //            current = url;
            //        }
            //        if (current == null || count > maxNum) {
            //            break;
            //        }
            //        string html = DownLoad(current);
            //        urls[current] = true;
            //        count++;
            //        Parse(current, html);
            //    }
            //    return true;
            //} else {
            //    return false;
            //}
        }

        private string DownLoad(string url) {
            try {
                WebClient webClient = new WebClient();
                webClient.Encoding = Encoding.UTF8;
                string html = webClient.DownloadString(url);
                File.WriteAllText(count.ToString() + (Path.HasExtension(url) ? Path.GetExtension(url) : ".html"), html, Encoding.UTF8);
                return html;
            } catch (Exception e) {
                Console.WriteLine(e.Message);
                return "";
            }
        }

        private void Parse(string rootUrl, string html) {
            //string scheme = rootUrl.Split('/')[0];
            //string server = rootUrl.Split('/')[2];
            //string url;
            Uri baseUri = new Uri(rootUrl);
            MatchCollection matches = new Regex(@"(href|HREF)\s*=\s*[""](?<Url>[^""]+[.](html|aspx|jsp))[""?#]").Matches(html);
            foreach (Match match in matches) {
                try {
                    Uri uri = new Uri(baseUri, match.Groups["Url"].Value);
                    if (uri.Host == baseUri.Host) {
                        if (urls[uri.AbsoluteUri] == null) {
                            urls[uri.AbsoluteUri] = false;
                        }
                    }
                } catch (UriFormatException) { }

                //url = match.Groups["Url"].Value;
                //Console.WriteLine(url);
                //if (url.StartsWith("//")) {
                //    url = scheme + url;
                //} else if (url.StartsWith("/")) {
                //    url = scheme + "//" + server + url;
                //} else if (url.StartsWith("./")) {
                //    url = Regex.Replace(rootUrl, "/[^/]+$", url.Substring(1));
                //} else if (url.StartsWith("../")) {
                //    int count = 0;
                //    while (url[3 * count] == '.' && url[3 * count + 1] == '.' && url[3 * count + 2] == '/') {
                //        count++;
                //    }
                //    url = Regex.Replace(rootUrl, "(/[^/]+){" + count + "}$", url.Substring(3 * count));
                //}
                //if (urls[url] == null) {
                //    urls[url] = false;
                //}
            }
        }
    }
}
