﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;

namespace crawler
{
    class Crawler
    {
        public string startUrl { get; set; }
        public string fileFilter { get; set; }
        public string hostFilter { get; set; }
        public event Action Stopped;
        public event Action <string, string> Downloaded;
        Dictionary<string, bool> urls = new Dictionary<string, bool>();
        Queue<string> que = new Queue<string>();
        public int num = 0;
        public string DownLoad(string url)
        {
            num++;
            WebClient webClient = new WebClient();
            webClient.Encoding = Encoding.UTF8;
            string content = webClient.DownloadString(url);
            string fileName = "G://CrawlerData//";
            if (Regex.IsMatch(url, @".*.html?$")) fileName += num.ToString() + ".html";
            else if (Regex.IsMatch(url, @".*.jsp?$")) fileName += num.ToString() + ".jsp";
            else if (Regex.IsMatch(url, @".*.aspx?$")) fileName += num.ToString() + ".aspx";
            else fileName = num.ToString();
            if (!Directory.Exists("G://CrawlerData")) Directory.CreateDirectory("G://CrawlerData");
            File.WriteAllText(fileName, content, Encoding.UTF8);
            return content;
        }
        public string Parse(string url, string curUrl)
        {
            if (url.Contains("://"))
            {
                return url;
            }
            if (url.StartsWith("//"))
            {
                return "http:" + url;
            }
            if (url.StartsWith("/"))
            {
                Match urlMatch = Regex.Match(curUrl, @"^(?<url>https?://(?<host>(www.)?[-a-zA-Z0-9@:%._+~#=]{1,256}.[a-zA-Z0-9()]{1,6})/?)(.*/)*(?<file>(.*(.html|.jsp|.aspx))?)$");
                string site = urlMatch.Groups["site"].Value;
                return site.EndsWith("/") ? site + url.Substring(1) : site + url;
            }
            if (url.StartsWith("../"))
            {
                int index = curUrl.LastIndexOf('/');
                return Parse(url.Substring(3), curUrl.Substring(0, index));
            }
            if (url.StartsWith("./"))
            {
                return Parse(url.Substring(2), curUrl);
            }
            int endIndex = curUrl.LastIndexOf("/");
            return curUrl.Substring(0, endIndex) + "/" + url;
        }
        public void Start()
        {
            que.Clear();
            urls.Add(startUrl, false);
            que.Enqueue(startUrl);
            while (urls.Count < 100 && que.Count > 0)
            {
                string curUrl = que.Dequeue();
                try
                {
                    string content = DownLoad(curUrl);
                    urls[curUrl] = true;
                    Downloaded(curUrl, "爬取成功");
                    var matchUrls = new Regex(@"<a.+?(href|HREF)=[""'](?<url>[^""'#>]+)[""'].*>").Matches(content);
                    foreach (Match Url in matchUrls)
                    {
                        string url = Url.Groups["url"].Value;

                        if (url == null || url.Equals("")) continue;
                        url = Parse(url, curUrl);
                        Match urlMatch = Regex.Match(url, @"^(?<url>https?://(?<host>(www.)?[-a-zA-Z0-9@:%._+~#=]{1,256}.[a-zA-Z0-9()]{1,6})/?)(.*/)*(?<file>(.*(.html|.jsp|.aspx))?)$");
                        string host = urlMatch.Groups["host"].Value;
                        string file = urlMatch.Groups["file"].Value;
                        if (file.Equals("")) file = "index.html";
                        if (!Regex.IsMatch(host, hostFilter) || !Regex.IsMatch(file, fileFilter) || urls.ContainsKey(url)) continue;
                        urls.Add(url, false);
                        que.Enqueue(url);
                    }
                }
                catch (Exception e)
                {
                    Downloaded(curUrl,  e.Message);
                }
            }
            Stopped();
        }
    }
}
