package prefetch

import (
	"context"
	"encoding/json"
	"fmt"
	"sync"

	"github.com/containerd/containerd/log"
	"onyx-snapshotter/pkg/config"
	"onyx-snapshotter/pkg/estargz"
	"onyx-snapshotter/pkg/metrics"
)

// Engine manages prefetching of files
type Engine struct {
	config *config.PrefetchConfig

	// Download function injected from loader
	downloadFunc func(ctx context.Context, url string, entry *estargz.TOCEntry, dest string) error

	mu sync.Mutex
}

type PrefetchList struct {
	Files []PrefetchEntry `json:"files"`
}

type PrefetchEntry struct {
	Path     string `json:"path"`
	Priority int    `json:"priority"` // Higher = more important
	Size     int64  `json:"size"`
}

func NewEngine(cfg *config.PrefetchConfig, downloadFunc func(context.Context, string, *estargz.TOCEntry, string) error) *Engine {
	return &Engine{
		config:       cfg,
		downloadFunc: downloadFunc,
	}
}

// Prefetch executes prefetching for a snapshot
func (e *Engine) Prefetch(ctx context.Context, toc *estargz.TOC, layerURL, snapshotDir string) error {
	if !e.config.Enabled {
		return nil
	}

	log.G(ctx).WithField("snapshot", snapshotDir).Info("starting prefetch")

	// Extract prefetch list from TOC (if embedded)
	prefetchList, err := e.extractPrefetchList(toc)
	if err != nil {
		log.G(ctx).WithError(err).Debug("no prefetch list found, using heuristics")
		prefetchList = e.generateHeuristicList(toc)
	}

	// Limit to max files
	if len(prefetchList.Files) > e.config.MaxFiles {
		prefetchList.Files = prefetchList.Files[:e.config.MaxFiles]
	}

	// Execute prefetch
	return e.executePrefetch(ctx, toc, layerURL, snapshotDir, prefetchList)
}

func (e *Engine) extractPrefetchList(toc *estargz.TOC) (*PrefetchList, error) {
	// Look for embedded prefetch metadata in TOC
	for _, entry := range toc.Entries {
		if entry.Name == ".prefetch.json" {
			// This is a special file containing prefetch hints
			var list PrefetchList
			// In real implementation, would read and parse this file
			return &list, nil
		}
	}
	return nil, fmt.Errorf("no prefetch list found")
}

func (e *Engine) generateHeuristicList(toc *estargz.TOC) *PrefetchList {
	list := &PrefetchList{
		Files: []PrefetchEntry{},
	}

	// Heuristic: prioritize small files and common paths
	for _, entry := range toc.Entries {
		if entry.Type != "reg" {
			continue
		}

		priority := e.calculatePriority(entry.Name, entry.Size)
		if priority > 0 {
			list.Files = append(list.Files, PrefetchEntry{
				Path:     entry.Name,
				Priority: priority,
				Size:     entry.Size,
			})
		}
	}

	// Sort by priority (highest first)
	e.sortByPriority(list.Files)

	return list
}

func (e *Engine) calculatePriority(path string, size int64) int {
	priority := 0

	// High priority for binaries and libraries
	if matches(path, "/usr/bin/", "/usr/local/bin/", "/bin/") {
		priority += 10
	}
	if matches(path, "/lib/", "/usr/lib/") {
		priority += 8
	}

	// High priority for small files
	if size < 1024*1024 { // < 1MB
		priority += 5
	} else if size < 10*1024*1024 { // < 10MB
		priority += 2
	}

	// High priority for config files
	if matches(path, "/etc/") {
		priority += 7
	}

	// Lower priority for documentation and sources
	if matches(path, "/usr/share/doc/", "/usr/share/man/", "/usr/src/") {
		priority -= 5
	}

	return priority
}

func (e *Engine) executePrefetch(ctx context.Context, toc *estargz.TOC, layerURL, snapshotDir string, list *PrefetchList) error {
	// Create worker pool
	semaphore := make(chan struct{}, e.config.Concurrency)
	var wg sync.WaitGroup
	var mu sync.Mutex
	var totalFiles, totalBytes int64
	errors := []error{}

	for _, prefetchEntry := range list.Files {
		// Find entry in TOC
		var tocEntry *estargz.TOCEntry
		for i := range toc.Entries {
			if toc.Entries[i].Name == prefetchEntry.Path {
				tocEntry = &toc.Entries[i]
				break
			}
		}

		if tocEntry == nil {
			continue
		}

		wg.Add(1)
		go func(entry *estargz.TOCEntry) {
			defer wg.Done()

			// Acquire semaphore
			semaphore <- struct{}{}
			defer func() { <-semaphore }()

			// Download file
			dest := snapshotDir + entry.Name
			if err := e.downloadFunc(ctx, layerURL, entry, dest); err != nil {
				mu.Lock()
				errors = append(errors, err)
				mu.Unlock()
				log.G(ctx).WithError(err).WithField("path", entry.Name).Warn("prefetch failed")
				return
			}

			mu.Lock()
			totalFiles++
			totalBytes += entry.Size
			mu.Unlock()

			log.G(ctx).WithFields(map[string]interface{}{
				"path": entry.Name,
				"size": entry.Size,
			}).Debug("prefetched file")
		}(tocEntry)
	}

	wg.Wait()

	success := len(errors) == 0
	metrics.RecordPrefetch(int(totalFiles), totalBytes, success)

	log.G(ctx).WithFields(map[string]interface{}{
		"files":  totalFiles,
		"bytes":  totalBytes,
		"errors": len(errors),
	}).Info("prefetch completed")

	if len(errors) > 0 {
		return fmt.Errorf("prefetch had %d errors", len(errors))
	}

	return nil
}

func (e *Engine) sortByPriority(entries []PrefetchEntry) {
	// Simple bubble sort by priority (descending)
	n := len(entries)
	for i := 0; i < n-1; i++ {
		for j := 0; j < n-i-1; j++ {
			if entries[j].Priority < entries[j+1].Priority {
				entries[j], entries[j+1] = entries[j+1], entries[j]
			}
		}
	}
}

// GeneratePrefetchList analyzes file access patterns and generates a prefetch list
func (e *Engine) GeneratePrefetchList(accessLog []string, toc *estargz.TOC) (*PrefetchList, error) {
	// Count access frequency
	frequency := make(map[string]int)
	for _, path := range accessLog {
		frequency[path]++
	}

	list := &PrefetchList{
		Files: []PrefetchEntry{},
	}

	for path, count := range frequency {
		// Find file in TOC
		for _, entry := range toc.Entries {
			if entry.Name == path && entry.Type == "reg" {
				list.Files = append(list.Files, PrefetchEntry{
					Path:     path,
					Priority: count * 10, // Use access count as priority
					Size:     entry.Size,
				})
				break
			}
		}
	}

	e.sortByPriority(list.Files)
	return list, nil
}

// SavePrefetchList saves a prefetch list to JSON
func (e *Engine) SavePrefetchList(path string, list *PrefetchList) error {
	data, err := json.MarshalIndent(list, "", "  ")
	if err != nil {
		return err
	}
	// Would write to file in real implementation
	_ = data
	return nil
}

// Helper functions

func matches(path string, prefixes ...string) bool {
	for _, prefix := range prefixes {
		if len(path) >= len(prefix) && path[:len(prefix)] == prefix {
			return true
		}
	}
	return false
}
