package scraper

import (
	"context"
	"regexp"
	"strconv"
	"strings"
	"time"

	"spam3uk/internal/models"

	"github.com/chromedp/cdproto/cdp"
	"github.com/chromedp/chromedp"
)

// scrapeClassicTrimsWithChromedp navigates to detail URL, expands promotions and extracts trims
func (ts *TrimScraper) scrapeClassicTrimsWithChromedp(modelName, detailURL string) ([]models.TrimInfo, error) {
	// 配置Chrome选项来禁用HTTP/2，强制使用HTTP/1.1
	opts := append(chromedp.DefaultExecAllocatorOptions[:],
		chromedp.Flag("disable-http2", true),                      // 禁用HTTP/2
		chromedp.Flag("disable-web-security", true),               // 禁用Web安全(某些情况下有助于连接)
		chromedp.Flag("disable-features", "VizDisplayCompositor"), // 禁用某些可能导致问题的特性
		chromedp.UserAgent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"),
	)

	allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...)
	defer cancel()

	ctx, cancel := chromedp.NewContext(allocCtx)
	defer cancel()

	// increase timeout for dynamic pages
	ctx, cancel = context.WithTimeout(ctx, 45*time.Second)
	defer cancel()

	var htmlAfter string

	tasks := chromedp.Tasks{
		chromedp.Navigate(detailURL),
		chromedp.WaitReady("body", chromedp.ByQuery),
		// Best-effort cookie accept
		chromedp.ActionFunc(func(ctx context.Context) error {
			candidates := []string{
				`#cookie-accept-button`,
				`button[aria-label*="Accept"]`,
				`button[aria-label*="accept"]`,
			}
			for _, sel := range candidates {
				_ = chromedp.Click(sel, chromedp.ByQuery).Do(ctx)
				time.Sleep(300 * time.Millisecond)
			}
			return nil
		}),
		// Expand promotions if present
		chromedp.ActionFunc(func(ctx context.Context) error {
			xpaths := []string{
				`//button[contains(translate(., 'ABCDEFGHIJKLMNOPQRSTUVWXYZ','abcdefghijklmnopqrstuvwxyz'), 'see promotion')]`,
				`//a[contains(translate(., 'ABCDEFGHIJKLMNOPQRSTUVWXYZ','abcdefghijklmnopqrstuvwxyz'), 'see promotion')]`,
			}
			for _, xp := range xpaths {
				var nodes []*cdp.Node
				if err := chromedp.Nodes(xp, &nodes, chromedp.BySearch).Do(ctx); err == nil && len(nodes) > 0 {
					_ = chromedp.MouseClickNode(nodes[0]).Do(ctx)
					time.Sleep(900 * time.Millisecond)
					break
				}
			}
			return nil
		}),
		// Capture current HTML
		chromedp.OuterHTML("html", &htmlAfter, chromedp.ByQuery),
	}

	if err := chromedp.Run(ctx, tasks); err != nil {
		return nil, err
	}

	// Parse trims from the expanded HTML: look for promotion/offer tiles
	type promo struct{ Name, Price, Href string }
	var promos []promo

	reView := regexp.MustCompile(`(?i)>\s*view\s*promotion\s*<`)
	reHeading := regexp.MustCompile(`(?is)<(h2|h3|h4)[^>]*>(.*?)</(h2|h3|h4)>`)
	rePrice := regexp.MustCompile(`£\s*([0-9][0-9,]+(?:\.[0-9]{2})?)`)
	reHref := regexp.MustCompile(`(?i)<a[^>]+href=\"([^\"]+)\"[^>]*>[^<]*view\s*promotion`)

	htmlLower := strings.ToLower(htmlAfter)
	idx := 0
	for {
		loc := reView.FindStringIndex(htmlLower[idx:])
		if loc == nil {
			break
		}
		start := idx + loc[0]
		end := start + 4000
		if end > len(htmlAfter) {
			end = len(htmlAfter)
		}
		from := start - 2000
		if from < 0 {
			from = 0
		}
		window := htmlAfter[from:end]

		name := ""
		if m := reHeading.FindStringSubmatch(window); len(m) >= 3 {
			name = htmlUnescape(stripTags(m[2]))
		}
		price := ""
		if mp := rePrice.FindStringSubmatch(window); len(mp) >= 2 {
			price = mp[1]
		}
		href := ""
		if mh := reHref.FindStringSubmatch(window); len(mh) >= 2 {
			href = mh[1]
		}
		if name != "" {
			promos = append(promos, promo{Name: strings.TrimSpace(name), Price: price, Href: href})
		}
		idx = start + 1
	}

	// Deduplicate by Name
	seen := map[string]struct{}{}
	var trims []models.TrimInfo
	for _, p := range promos {
		key := strings.ToLower(strings.TrimSpace(p.Name))
		if _, ok := seen[key]; ok {
			continue
		}
		seen[key] = struct{}{}

		var priceFrom *float64
		if p.Price != "" {
			if v, err := strconv.ParseFloat(strings.ReplaceAll(p.Price, ",", ""), 64); err == nil {
				priceFrom = &v
			}
		}
		trim := models.TrimInfo{
			ModelName:             modelName,
			TrimName:              p.Name,
			TrimDisplayName:       p.Name,
			PriceFrom:             priceFrom,
			Currency:              "GBP",
			ConfigureURL:          absolutize("https://www.ford.co.uk", p.Href),
			SeeFullSpecsAvailable: true,
			ScrapedAt:             time.Now(),
		}
		trims = append(trims, trim)
	}

	return trims, nil
}

// scrapeClassicPromoSpecs navigates to a promotion URL and extracts simple key-value specs
func (ts *TrimScraper) scrapeClassicPromoSpecs(modelName, trimName, promoURL string) []models.TrimSpecsInfo {
	if promoURL == "" {
		return nil
	}

	ctx, cancel := chromedp.NewContext(context.Background())
	defer cancel()
	ctx, cancel = context.WithTimeout(ctx, 30*time.Second)
	defer cancel()

	var pageHTML string
	if err := chromedp.Run(ctx, chromedp.Tasks{
		chromedp.Navigate(promoURL),
		chromedp.WaitReady("body", chromedp.ByQuery),
		chromedp.OuterHTML("html", &pageHTML, chromedp.ByQuery),
	}); err != nil {
		return nil
	}

	var specs []models.TrimSpecsInfo
	// Generic extraction: list items with "Name: Value"
	reKV := regexp.MustCompile(`(?i)<li[^>]*>\s*([^:<]{2,}?)\s*:\s*([^<]{1,})</li>`)
	for _, m := range reKV.FindAllStringSubmatch(pageHTML, -1) {
		name := strings.TrimSpace(stripTags(m[1]))
		val := strings.TrimSpace(stripTags(m[2]))
		if name == "" || val == "" {
			continue
		}
		specs = append(specs, models.TrimSpecsInfo{
			TrimName:     trimName,
			ModelName:    modelName,
			SpecCategory: "General",
			SpecName:     htmlUnescape(name),
			SpecValue:    htmlUnescape(val),
			DisplayOrder: 0,
			ScrapedAt:    time.Now(),
		})
	}
	return specs
}

func stripTags(s string) string {
	re := regexp.MustCompile(`<[^>]+>`)
	return re.ReplaceAllString(s, "")
}

func htmlUnescape(s string) string {
	s = strings.ReplaceAll(s, "&amp;", "&")
	s = strings.ReplaceAll(s, "&nbsp;", " ")
	s = strings.ReplaceAll(s, "&pound;", "£")
	s = strings.ReplaceAll(s, "&quot;", "\"")
	s = strings.ReplaceAll(s, "&#39;", "'")
	return s
}
