package crawler

import "time"

// WithMaxCount 设置最大抓取数量
func WithMaxCount(n int) Option {
	return func(c *Crawler) {
		c.maxCount = n
	}
}

// WithMaxDepth 设置最大抓取深度
func WithMaxDepth(n int) Option {
	return func(c *Crawler) {
		c.maxDepth = n
	}
}

// WithDelay 设置抓取延迟
func WithDelay(d time.Duration) Option {
	return func(c *Crawler) {
		c.delay = d
	}
}

// WithConcurrency 设置并发数
func WithConcurrency(n int) Option {
	return func(c *Crawler) {
		c.concurrency = n
	}
}

// WithSameDomainOnly 设置是否只抓取同域名
func WithSameDomainOnly(b bool) Option {
	return func(c *Crawler) {
		c.sameDomainOnly = b
	}
}

// WithHeader 添加自定义 Header
func WithHeader(key, value string) Option {
	return func(c *Crawler) {
		if c.headers == nil {
			c.headers = make(map[string]string)
		}
		c.headers[key] = value
	}
}

// WithRetry 设置重试次数
func WithRetry(n int) Option {
	return func(c *Crawler) {
		c.retry = n
	}
}

// WithLogger 设置日志实现
func WithLogger(logger Logger) Option {
	return func(c *Crawler) {
		c.logger = logger
	}
}

// WithConvertToMarkdown 控制是否将 HTML 转换为 Markdown
func WithConvertToMarkdown(b bool) Option {
	return func(c *Crawler) {
		c.convertToMarkdown = b
	}
}

// WithRandomUserAgent 启用随机 UA
func WithRandomUserAgent() Option {
	return func(c *Crawler) {
		c.randomUserAgent = true
	}
}

// WithUserAgentPool 设置自定义 UA pool
func WithUserAgentPool(pool []string) Option {
	return func(c *Crawler) {
		c.userAgentPool = pool
	}
}

// WithAllowURLRevisit 设置是否允许重复访问同一个 URL
// 默认为 false（不允许重复访问），设置为 true 可启用重复访问
func WithAllowURLRevisit(allow bool) Option {
	return func(c *Crawler) {
		c.allowURLRevisit = allow
	}
}

// WithAsync 启用异步处理模式
// 当启用异步模式时，抓取结果将通过 channel 异步返回，提高并发性能
func WithAsync(async bool) Option {
	return func(c *Crawler) {
		c.async = async
	}
}
