package crawler

import (
	"bufio"
	"fmt"
	"net/http"
	"net/url"
	"os"
	"strings"
	"sync"

	"golang.org/x/net/html"
)

type Crawler struct {
	baseURL      *url.URL
	maxDepth     int
	visited      sync.Map
	urlFile      string
	client       *http.Client
	allowedHosts map[string]bool
	tagConfigs   []TagConfig
}

// TagConfig 定义需要抓取的HTML标签和属性配置
type TagConfig struct {
	// Tag 标签名称，如 "a", "img" 等
	Tag string
	// Attr 需要抓取的属性名，如 "href", "src" 等
	Attr string
}

// Config 结构体添加新的字段
type Config struct {
	MaxDepth     int
	URLFile      string
	AllowedHosts []string
	// 添加标签配置
	TagConfigs []TagConfig
}

func New(baseURL string, config Config) (*Crawler, error) {
	parsedURL, err := url.Parse(baseURL)
	if err != nil {
		return nil, fmt.Errorf("invalid base URL: %w", err)
	}

	allowedHosts := make(map[string]bool)
	for _, host := range config.AllowedHosts {
		allowedHosts[host] = true
	}
	// 总是允许基础URL的主机
	allowedHosts[parsedURL.Host] = true

	return &Crawler{
		baseURL:      parsedURL,
		maxDepth:     config.MaxDepth,
		urlFile:      config.URLFile,
		client:       &http.Client{},
		allowedHosts: allowedHosts,
		tagConfigs:   config.TagConfigs,
	}, nil
}

func (c *Crawler) Start() error {
	// 加载已存在的URL
	if err := c.loadExistingURLs(); err != nil {
		return err
	}

	// 开始爬取
	return c.crawl(c.baseURL.String(), 0)
}

func (c *Crawler) crawl(urlStr string, depth int) error {
	if depth > c.maxDepth {
		return nil
	}

	// 检查是否已访问
	if _, visited := c.visited.LoadOrStore(urlStr, true); visited {
		return nil
	}

	// 保存URL到文件
	if err := c.saveURL(urlStr); err != nil {
		return err
	}

	// 获取页面内容
	urls, err := c.extractURLs(urlStr)
	if err != nil {
		return fmt.Errorf("failed to extract URLs from %s: %w", urlStr, err)
	}

	// 递归爬取
	for _, u := range urls {
		if err := c.crawl(u, depth+1); err != nil {
			fmt.Printf("Error crawling %s: %v\n", u, err)
		}
	}

	return nil
}

func (c *Crawler) extractURLs(urlStr string) ([]string, error) {
	resp, err := c.client.Get(urlStr)
	if err != nil {
		return nil, err
	}
	defer resp.Body.Close()

	// 只处理HTML内容
	contentType := resp.Header.Get("Content-Type")
	if !strings.Contains(contentType, "text/html") {
		return nil, nil
	}

	doc, err := html.Parse(resp.Body)
	if err != nil {
		return nil, err
	}

	var urls []string
	var f func(*html.Node)
	f = func(n *html.Node) {
		if n.Type == html.ElementNode {
			// 处理<a>标签的href属性
			if n.Data == "a" {
				for _, attr := range n.Attr {
					if attr.Key == "href" {
						if u, err := c.normalizeURL(attr.Val); err == nil && u != "" {
							urls = append(urls, u)
						}
					}
				}
			}
			// 处理<img>标签的src属性
			if n.Data == "img" {
				for _, attr := range n.Attr {
					if attr.Key == "src" {
						if u, err := c.normalizeURL(attr.Val); err == nil && u != "" {
							urls = append(urls, u)
						}
					}
				}
			}
		}
		for c := n.FirstChild; c != nil; c = c.NextSibling {
			f(c)
		}
	}
	f(doc)

	return urls, nil
}

func (c *Crawler) normalizeURL(rawURL string) (string, error) {
	u, err := url.Parse(rawURL)
	if err != nil {
		return "", err
	}

	// 处理相对URL
	base := c.baseURL
	u = base.ResolveReference(u)

	// 检查是否是允许的主机
	if !c.allowedHosts[u.Host] {
		return "", nil
	}

	return u.String(), nil
}

func (c *Crawler) loadExistingURLs() error {
	file, err := os.OpenFile(c.urlFile, os.O_CREATE|os.O_RDONLY, 0644)
	if err != nil {
		return err
	}
	defer file.Close()

	scanner := bufio.NewScanner(file)
	for scanner.Scan() {
		c.visited.Store(scanner.Text(), true)
	}

	return scanner.Err()
}

func (c *Crawler) saveURL(urlStr string) error {
	file, err := os.OpenFile(c.urlFile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
	if err != nil {
		return err
	}
	defer file.Close()

	_, err = fmt.Fprintln(file, urlStr)
	return err
}

// 去重并保存最终结果
func (c *Crawler) Finalize() error {
	// 读取所有URL
	urls := make(map[string]bool)
	c.visited.Range(func(key, value interface{}) bool {
		urls[key.(string)] = true
		return true
	})

	// 创建临时文件
	tmpFile := c.urlFile + ".tmp"
	f, err := os.Create(tmpFile)
	if err != nil {
		return err
	}
	defer f.Close()

	// 写入去重后的URL
	for url := range urls {
		if _, err := fmt.Fprintln(f, url); err != nil {
			return err
		}
	}

	// 替换原文件
	return os.Rename(tmpFile, c.urlFile)
}
