﻿using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.IO;
using System.Threading;
using System.Windows.Forms;
using System.Collections.Concurrent;
using System.Net.Http;

namespace homework16
{
    public partial class Form1 : Form
    {
        private TextBox urlTextBox;
        private Button startButton;
        private ListBox crawledList;
        private ListBox errorList;
        private Crawler crawler;
        private System.Windows.Forms.Timer updateTimer;
        public Form1()
        {
            InitializeComponent();
            this.Text = "简易爬虫工具";
            this.Width = 800;
            this.Height = 600;

            urlTextBox = new TextBox { Left = 10, Top = 10, Width = 600 };
            startButton = new Button { Text = "开始爬取", Left = 620, Width = 100, Top = 10 };
            startButton.Click += StartButton_Click;
            //crawledList = new ListBox { Left = 10, Top = 50, Width = 370, Height = 400 };
            // errorList = new ListBox { Left = 390, Top = 50, Width = 370, Height = 400 };

            this.Controls.Add(urlTextBox);
            this.Controls.Add(startButton);
            Label crawledLabel = new Label { Text = "已经爬取的URL:", TextAlign = ContentAlignment.MiddleCenter, Left = 10, Top = 30, Width = 100 };
            this.Controls.Add(crawledLabel);

            crawledList = new ListBox { Left = 10, Top = 50, Width = 370, Height = 400 };
            this.Controls.Add(crawledList);
            Label errorLabel = new Label { Text = "错误的URL:", TextAlign = ContentAlignment.MiddleCenter, Left = 390, Top = 30, Width = 100 };
            this.Controls.Add(errorLabel);

            errorList = new ListBox { Left = 390, Top = 50, Width = 370, Height = 400 };
            this.Controls.Add(errorList);

            updateTimer = new System.Windows.Forms.Timer();
            updateTimer.Interval = 1000; // 设置更新间隔为1000毫秒
            updateTimer.Tick += UpdateTimer_Tick;
            updateTimer.Start();
            Application.EnableVisualStyles();
            // Application.SetCompatibleTextRenderingDefault(false);
        }
        private void UpdateTimer_Tick(object sender, EventArgs e)
        {
            if (crawler != null)
            {
                foreach (string url in crawler.CrawledUrls)
                {
                    if (!crawledList.Items.Contains(url))
                    {
                        crawledList.Items.Add(url); // 更新已爬取的URL列表
                    }
                }

                foreach (string errorUrl in crawler.ErrorUrls)
                {
                    if (!errorList.Items.Contains(errorUrl))
                    {
                        errorList.Items.Add(errorUrl); // 更新错误的URL列表
                    }
                }
            }
        }

        private void Form1_Load(object sender, EventArgs e)
        {

        }
        private void StartButton_Click(object sender, EventArgs e)
        {
            crawledList.Items.Clear();
            errorList.Items.Clear();
            crawler = new Crawler(urlTextBox.Text);
            new Thread(crawler.Crawl).Start();
            // 实际应用中，您可能需要考虑如何从Crawler中获取已爬取和错误的URL列表，并将其显示在对应的ListBox控件中。
        }


    }


    public class Crawler
    {
        private Hashtable urls = new Hashtable();
        private int count = 0;
        private string startUrl;
        public List<string> CrawledUrls { get; } = new List<string>();
        public List<string> ErrorUrls { get; } = new List<string>();

        public Crawler(string startUrl)
        {
            this.startUrl = startUrl;
            urls[startUrl] = false; // 初始URL设置为未爬取状态
        }

        public void Crawl()
        {
            Console.WriteLine("开始爬行了....");
            while (true)
            {
                string currentUrl = null;
                foreach (string url in urls.Keys)
                {
                    if ((bool)urls[url]) continue; // 已经下载过的，跳过
                    currentUrl = url;
                    break;
                }
                if (currentUrl == null || count > 10) break; // 主域内的已爬取完成，或爬取数量超过10个，则终止

                Console.WriteLine("爬行" + currentUrl + "页面!");
                string html = Download(currentUrl);
                urls[currentUrl] = true;
                count++;
                if (html == "") continue;
                Parse(html, currentUrl); // 解析，并加入新的链接
                Console.WriteLine("爬行结束");
            }
        }

        public string Download(string url)
        {
            try
            {
                WebClient webClient = new WebClient();
                webClient.Encoding = Encoding.UTF8;
                string html = webClient.DownloadString(url);
                string fileName = count.ToString();
                File.WriteAllText(fileName, html, Encoding.UTF8);
                CrawledUrls.Add(url);
                return html;
            }
            catch (Exception ex)
            {
                ErrorUrls.Add(url); // 添加到错误的URL列表中
                Console.WriteLine(ex.Message);
                return "";
            }
        }

        private void Parse(string html, string pageUrl)
        {
            var newUrls = new Regex(@"(href|HREF)[]*=[]*[""'](?<url>[^""'#>]+)[""']").Matches(html);
            foreach (Match match in newUrls)
            {
                string urlValue = match.Groups["url"].Value;
                urlValue = FixUrl(urlValue, pageUrl); // 转换为绝对路径
                if (urlValue == null || urls.ContainsKey(urlValue)) continue;
                urls[urlValue] = false; // 新的URL添加进待爬取队列并设置为未爬取
            }
        }

        // 转换相对路径为绝对路径
        private string FixUrl(string url, string pageUrl)
        {
            if (url.Contains("://"))
            {
                // 已经是绝对路径
                return url;
            }
            if (url.StartsWith("//"))
            {
                // 协议相对路径
                Uri pageUri = new Uri(pageUrl);
                return pageUri.Scheme + ":" + url;
            }
            if (url.StartsWith("/"))
            {
                // 根相对路径
                Uri pageUri = new Uri(pageUrl);
                return pageUri.Scheme + "://" + pageUri.Host + url;
            }
            if (url.StartsWith("."))
            {
                // 相对路径
                Uri pageUri = new Uri(pageUrl);
                return new Uri(pageUri, url).AbsoluteUri;
            }

            // 如果不是上述的情况，例如javascript:等情况，不进行处理
            return null;
        }
    }
}