package main

import (
	"fmt"
	"time"

	"gitee.com/oscstudio/web-harvester/pkg/crawler"
)

// 简单 Logger 实现
type simpleLogger struct{}

func (l *simpleLogger) Info(format string, args ...interface{}) {
	fmt.Printf("[INFO] "+format+"\n", args...)
}
func (l *simpleLogger) Error(format string, args ...interface{}) {
	fmt.Printf("[ERROR] "+format+"\n", args...)
}

func main() {
	c := crawler.NewCrawler(
		crawler.WithMaxCount(150),
		crawler.WithMaxDepth(4),
		crawler.WithDelay(500*time.Millisecond),
		crawler.WithConcurrency(15),
		crawler.WithSameDomainOnly(true),
		crawler.WithHeader("User-Agent", "MyCrawlerBot/1.0"),
		crawler.WithConvertToMarkdown(true),
		crawler.WithLogger(&simpleLogger{}),
	)

	results, err := c.Crawl("https://example.com")
	if err != nil {
		fmt.Println("抓取出错:", err)
		return
	}

	for i, page := range results {
		fmt.Printf("\n--- 第%d页 ---\nURL: %s\n抓取时间: %s\n", i+1, page.URL, page.FetchedAt.Format(time.RFC3339))
		fmt.Printf("Markdown内容预览:\n%s\n", page.Markdown)
	}

	// 演示异步模式
	fmt.Println("\n=== 异步模式演示 ===")
	asyncDemo()
}

// 异步模式演示
func asyncDemo() {
	asyncCrawler := crawler.NewCrawler(
		crawler.WithMaxCount(150),
		crawler.WithMaxDepth(4),
		crawler.WithDelay(300*time.Millisecond),
		crawler.WithConcurrency(20), // 更高的并发数
		crawler.WithSameDomainOnly(true),
		crawler.WithHeader("User-Agent", "AsyncCrawlerBot/1.0"),
		crawler.WithConvertToMarkdown(true),
		crawler.WithLogger(&simpleLogger{}),
		crawler.WithAsync(true), // 启用异步模式
	)

	fmt.Println("开始异步抓取...")
	start := time.Now()

	results, err := asyncCrawler.Crawl("https://example.com")
	if err != nil {
		fmt.Println("异步抓取出错:", err)
		return
	}

	duration := time.Since(start)
	fmt.Printf("异步抓取完成，用时: %v，共抓取 %d 页\n", duration, len(results))

	for i, page := range results {
		fmt.Printf("\n--- 第%d页 ---\nURL: %s\n抓取时间: %s\n", i+1, page.URL, page.FetchedAt.Format(time.RFC3339))
		fmt.Printf("Markdown内容预览:\n%s\n", page.Markdown)
	}
}
