﻿using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using HtmlAgilityPack;

class SimpleCrawler
{
    static HttpClient client = new HttpClient();
    static HashSet<string> visited = new HashSet<string>();
    static Queue<string> toVisit = new Queue<string>();
    static Uri baseUri;

    static async Task Main(string[] args)
    {
        string startUrl = "https://www.example.com/";
        baseUri = new Uri(startUrl);
        toVisit.Enqueue(startUrl);

        while (toVisit.Count > 0)
        {
            string url = toVisit.Dequeue();

            if (visited.Contains(url))
                continue;

            try
            {
                Console.WriteLine("Crawling: " + url);
                var response = await client.GetAsync(url);
                if (!response.IsSuccessStatusCode)
                    continue;

                string html = await response.Content.ReadAsStringAsync();

                visited.Add(url);

                var doc = new HtmlDocument();
                doc.LoadHtml(html);

                // 解析所有<a href="">链接
                foreach (var link in doc.DocumentNode.SelectNodes("//a[@href]") ?? new HtmlNodeCollection(null))
                {
                    var href = link.GetAttributeValue("href", string.Empty);
                    if (string.IsNullOrEmpty(href))
                        continue;

                    Uri linkUri;
                    if (Uri.TryCreate(href, UriKind.Absolute, out linkUri) == false)
                    {
                        // 处理相对URL
                        linkUri = new Uri(baseUri, href);
                    }

                    // 只添加和起始域名相同的链接
                    if (linkUri.Host == baseUri.Host)
                    {
                        string absoluteUrl = linkUri.AbsoluteUri;
                        if (!visited.Contains(absoluteUrl) && !toVisit.Contains(absoluteUrl))
                            toVisit.Enqueue(absoluteUrl);
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine("Error crawling " + url + ": " + ex.Message);
            }
        }

        Console.WriteLine("Crawling finished. Total pages crawled: " + visited.Count);
    }
}
