package cmd

import (
	"downloader/internal/crawler"
	"fmt"

	"github.com/spf13/cobra"
)

var (
	maxDepth     int
	outputFile   string
	allowedHosts []string
)

var crawlCmd = &cobra.Command{
	Use:   "crawl [url]",
	Short: "Crawl URLs from a website",
	Args:  cobra.ExactArgs(1),
	RunE: func(cmd *cobra.Command, args []string) error {
		baseURL := args[0]

		config := crawler.Config{
			MaxDepth:     maxDepth,
			URLFile:      outputFile,
			AllowedHosts: allowedHosts,
		}

		c, err := crawler.New(baseURL, config)
		if err != nil {
			return fmt.Errorf("failed to create crawler: %w", err)
		}

		if err := c.Start(); err != nil {
			return fmt.Errorf("crawling failed: %w", err)
		}

		if err := c.Finalize(); err != nil {
			return fmt.Errorf("failed to finalize results: %w", err)
		}

		return nil
	},
}

func init() {
	crawlCmd.Flags().IntVarP(&maxDepth, "depth", "d", 3, "maximum crawling depth")
	crawlCmd.Flags().StringVarP(&outputFile, "output", "o", "urls.txt", "output file for URLs")
	crawlCmd.Flags().StringSliceVarP(&allowedHosts, "hosts", "H", nil, "allowed hosts to crawl (default is base URL host)")
}
