﻿using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;

namespace crawler
{
    class Crawler
    {
        public string startUrl { get; set; }
        public string fileFilter { get; set; }
        public string hostFilter { get; set; }
        public event Action Stopped;
        public event Action <string, string> Downloaded;
        ConcurrentDictionary<string, bool> urls = new ConcurrentDictionary<string, bool>();
        ConcurrentQueue<string> que = new ConcurrentQueue<string>();
        public int num = 0;
        public string DownLoad(string url)
        {
            WebClient webClient = new WebClient();
            webClient.Encoding = Encoding.UTF8;
            string content = webClient.DownloadString(url);
            string fileName = "G://CrawlerData//";
            if (Regex.IsMatch(url, @".*.html?$")) fileName += num.ToString() + ".html";
            else if (Regex.IsMatch(url, @".*.jsp?$")) fileName += num.ToString() + ".jsp";
            else if (Regex.IsMatch(url, @".*.aspx?$")) fileName += num.ToString() + ".aspx";
            else fileName = num.ToString();
            if (!Directory.Exists("G://CrawlerData")) Directory.CreateDirectory("G://CrawlerData");
            File.WriteAllText(fileName, content, Encoding.UTF8);
            return content;
        }
        public void Func()
        {
            string curUrl = "";
            que.TryDequeue(out curUrl);
            try
            {
                string content = DownLoad(curUrl);
                urls[curUrl] = true;
                Downloaded(curUrl, "爬取成功");
                var matchUrls = new Regex(@"<a.+?(href|HREF)=[""'](?<url>[^""'#>]+)[""'].*>").Matches(content);
                foreach (Match Url in matchUrls)
                {
                    string url = Url.Groups["url"].Value;

                    if (url == null || url.Equals("")) continue;
                    url = Parse(url, curUrl);
                    Match urlMatch = Regex.Match(url, @"^(?<url>https?://(?<host>(www.)?[-a-zA-Z0-9@:%._+~#=]{1,256}.[a-zA-Z0-9()]{1,6})/?)(.*/)*(?<file>(.*(.html|.jsp|.aspx))?)$");
                    string host = urlMatch.Groups["host"].Value;
                    string file = urlMatch.Groups["file"].Value;
                    if (file.Equals("")) file = "index.html";
                    if (!Regex.IsMatch(host, hostFilter) || !Regex.IsMatch(file, fileFilter) || urls.ContainsKey(url)) continue;
                    urls.GetOrAdd(url, false);
                    que.Enqueue(url);
                }
            }
            catch (Exception e)
            {
                Downloaded(curUrl, e.Message);
            }
            finally
            {
                num++;
            }
        }
        public string Parse(string url, string curUrl)
        {
            if (url.Contains("://"))
            {
                return url;
            }
            if (url.StartsWith("//"))
            {
                return "http:" + url;
            }
            if (url.StartsWith("/"))
            {
                Match urlMatch = Regex.Match(curUrl, @"^(?<url>https?://(?<host>(www.)?[-a-zA-Z0-9@:%._+~#=]{1,256}.[a-zA-Z0-9()]{1,6})/?)(.*/)*(?<file>(.*(.html|.jsp|.aspx))?)$");
                string site = urlMatch.Groups["site"].Value;
                return site.EndsWith("/") ? site + url.Substring(1) : site + url;
            }
            if (url.StartsWith("../"))
            {
                int index = curUrl.LastIndexOf('/');
                return Parse(url.Substring(3), curUrl.Substring(0, index));
            }
            if (url.StartsWith("./"))
            {
                return Parse(url.Substring(2), curUrl);
            }
            int endIndex = curUrl.LastIndexOf("/");
            return curUrl.Substring(0, endIndex) + "/" + url;
        }
        public void Start()
        {
            String temp;
            while (que.Count > 0) que.TryDequeue(out temp);
            urls.GetOrAdd(startUrl, false);
            que.Enqueue(startUrl);
            ConcurrentBag<Task> tasks = new ConcurrentBag<Task>();
            while (tasks.Count < 40 && urls.Count <= 100)
            {
                if (num < tasks.Count) continue;
                if (num >= tasks.Count && que.IsEmpty) break;
                tasks.Add(Task.Run(() => Func()));
            }
            ]
        }
    }
}
