package inference

import (
	"encoding/json"
	"fmt"
	"log"
	"os"
	"os/exec"
	"path/filepath"
	"sync"
	"time"

	"goyolo/internal/config"

	"github.com/google/uuid"
)

type InferenceEngine struct {
	config      *config.Config
	mu          sync.Mutex
	modelInfo   *ModelInfo
	isWarmedUp  bool
	warmupMutex sync.Mutex
}

type InferenceRequest struct {
	ImagePath string `json:"image_path"`
	ModelPath string `json:"model_path,omitempty"`
	RequestID string `json:"request_id"`
}

type Detection struct {
	ClassID    int     `json:"class_id"`
	ClassName  string  `json:"class_name"`
	Confidence float32 `json:"confidence"`
	Box        Box     `json:"box"`
}

type TimingInfo struct {
	ValidationTime  float64 `json:"validation_time_ms"`
	InferenceTime   float64 `json:"inference_time_ms"`
	OutputParseTime float64 `json:"output_parse_time_ms"`
	TotalTime       float64 `json:"total_time_ms"`
}

type InferenceResponse struct {
	Status      string      `json:"status"`
	RequestID   string      `json:"request_id"`
	ImagePath   string      `json:"image_path"`
	ImageWidth  int         `json:"image_width"`
	ImageHeight int         `json:"image_height"`
	Detections  []Detection `json:"detections"`
	ProcessTime float64     `json:"process_time_ms"`
	Timing      TimingInfo  `json:"timing"`
	Error       string      `json:"error,omitempty"`
}

func NewInferenceEngine(cfg *config.Config) (*InferenceEngine, error) {
	// Verify C++ binary exists
	if _, err := os.Stat(cfg.CppBinaryPath); err != nil {
		return nil, fmt.Errorf("C++ binary not found at %s: %w", cfg.CppBinaryPath, err)
	}

	// Verify model exists
	if _, err := os.Stat(cfg.ModelPath); err != nil {
		return nil, fmt.Errorf("model not found at %s: %w", cfg.ModelPath, err)
	}

	// Initialize model info
	modelInfo := &ModelInfo{
		Version:    ParseModelVersion(cfg.ModelVersion),
		NumClasses: cfg.NumClasses,
		InputSize:  cfg.InputSize,
		ClassNames: DefaultCOCOClasses(),
	}

	// If NumClasses is not set, use default COCO classes (80)
	if modelInfo.NumClasses == 0 {
		modelInfo.NumClasses = 80
	}

	return &InferenceEngine{
		config:      cfg,
		modelInfo:   modelInfo,
		isWarmedUp:  false,
		warmupMutex: sync.Mutex{},
	}, nil
}

func (e *InferenceEngine) Infer(imagePath string) (*InferenceResponse, error) {
	startTime := time.Now()
	requestID := uuid.New().String()
	timing := TimingInfo{}

	// Stage 1: Validate image path
	validationStart := time.Now()
	if _, err := os.Stat(imagePath); err != nil {
		timing.ValidationTime = time.Since(validationStart).Seconds() * 1000
		timing.TotalTime = time.Since(startTime).Seconds() * 1000
		return &InferenceResponse{
			Status:    "failed",
			RequestID: requestID,
			ImagePath: imagePath,
			Error:     fmt.Sprintf("image not found: %v", err),
			Timing:    timing,
		}, err
	}
	timing.ValidationTime = time.Since(validationStart).Seconds() * 1000

	// Create temporary output file
	outputPath := filepath.Join(e.config.TempDir, fmt.Sprintf("output_%s.json", requestID))
	defer os.Remove(outputPath)

	// Stage 2: Call C++ binary for inference
	inferenceStart := time.Now()
	cmd := exec.Command(
		e.config.CppBinaryPath,
		e.config.ModelPath,
		imagePath,
		outputPath,
	)

	// Execute command with timeout
	done := make(chan error, 1)
	go func() {
		done <- cmd.Run()
	}()

	select {
	case err := <-done:
		if err != nil {
			timing.InferenceTime = time.Since(inferenceStart).Seconds() * 1000
			timing.TotalTime = time.Since(startTime).Seconds() * 1000
			return &InferenceResponse{
				Status:    "failed",
				RequestID: requestID,
				ImagePath: imagePath,
				Error:     fmt.Sprintf("inference failed: %v", err),
				Timing:    timing,
			}, err
		}
	case <-time.After(30 * time.Second):
		cmd.Process.Kill()
		timing.InferenceTime = time.Since(inferenceStart).Seconds() * 1000
		timing.TotalTime = time.Since(startTime).Seconds() * 1000
		return &InferenceResponse{
			Status:    "failed",
			RequestID: requestID,
			ImagePath: imagePath,
			Error:     "inference timeout",
			Timing:    timing,
		}, fmt.Errorf("inference timeout")
	}
	timing.InferenceTime = time.Since(inferenceStart).Seconds() * 1000

	// Stage 3: Read and parse output JSON
	parseStart := time.Now()
	outputData, err := os.ReadFile(outputPath)
	if err != nil {
		timing.OutputParseTime = time.Since(parseStart).Seconds() * 1000
		timing.TotalTime = time.Since(startTime).Seconds() * 1000
		return &InferenceResponse{
			Status:    "failed",
			RequestID: requestID,
			ImagePath: imagePath,
			Error:     fmt.Sprintf("failed to read output: %v", err),
			Timing:    timing,
		}, err
	}

	// Parse output
	var response InferenceResponse
	if err := json.Unmarshal(outputData, &response); err != nil {
		timing.OutputParseTime = time.Since(parseStart).Seconds() * 1000
		timing.TotalTime = time.Since(startTime).Seconds() * 1000
		return &InferenceResponse{
			Status:    "failed",
			RequestID: requestID,
			ImagePath: imagePath,
			Error:     fmt.Sprintf("failed to parse output: %v", err),
			Timing:    timing,
		}, err
	}
	timing.OutputParseTime = time.Since(parseStart).Seconds() * 1000

	// Set response metadata
	response.RequestID = requestID
	response.ProcessTime = time.Since(startTime).Seconds() * 1000
	timing.TotalTime = response.ProcessTime
	response.Timing = timing

	// Debug: Log timing information
	log.Printf("[TIMING] Validation=%.2fms, Inference=%.2fms, Parse=%.2fms, Total=%.2fms",
		timing.ValidationTime, timing.InferenceTime, timing.OutputParseTime, timing.TotalTime)

	return &response, nil
}

func (e *InferenceEngine) GetModelInfo() map[string]interface{} {
	e.mu.Lock()
	defer e.mu.Unlock()

	return map[string]interface{}{
		"model_path":           e.config.ModelPath,
		"cpp_binary_path":      e.config.CppBinaryPath,
		"confidence_threshold": e.config.ConfidenceThreshold,
		"nms_threshold":        e.config.NMSThreshold,
		"input_size":           e.config.InputSize,
		"model_version":        e.modelInfo.Version.String(),
		"num_classes":          e.modelInfo.NumClasses,
	}
}

func (e *InferenceEngine) GetModelConfig() *ModelInfo {
	e.mu.Lock()
	defer e.mu.Unlock()
	return e.modelInfo
}

// WarmupModel performs a warmup inference to load the model into memory
// This reduces latency for the first actual inference request
func (e *InferenceEngine) WarmupModel() error {
	e.warmupMutex.Lock()
	defer e.warmupMutex.Unlock()

	// If already warmed up, skip
	if e.isWarmedUp {
		return nil
	}

	// Ensure temp directory exists
	if err := os.MkdirAll(e.config.TempDir, 0755); err != nil {
		return fmt.Errorf("failed to create temp directory: %w", err)
	}

	// Create a temporary warmup image (640x640 RGB)
	warmupImagePath := filepath.Join(e.config.TempDir, "warmup_image.jpg")

	// Create a simple test image using the C++ binary
	// We'll create a minimal valid JPEG file
	if err := createWarmupImage(warmupImagePath); err != nil {
		return fmt.Errorf("failed to create warmup image: %w", err)
	}
	defer os.Remove(warmupImagePath)

	// Run inference on the warmup image
	outputPath := filepath.Join(e.config.TempDir, "warmup_output.json")
	defer os.Remove(outputPath)

	cmd := exec.Command(
		e.config.CppBinaryPath,
		e.config.ModelPath,
		warmupImagePath,
		outputPath,
	)

	// Execute warmup with timeout
	done := make(chan error, 1)
	go func() {
		done <- cmd.Run()
	}()

	select {
	case err := <-done:
		if err != nil {
			return fmt.Errorf("warmup inference failed: %w", err)
		}
	case <-time.After(60 * time.Second):
		cmd.Process.Kill()
		return fmt.Errorf("warmup inference timeout")
	}

	e.isWarmedUp = true
	return nil
}

// IsWarmedUp returns whether the model has been warmed up
func (e *InferenceEngine) IsWarmedUp() bool {
	e.warmupMutex.Lock()
	defer e.warmupMutex.Unlock()
	return e.isWarmedUp
}

// createWarmupImage creates a simple test image for model warmup
func createWarmupImage(imagePath string) error {
	// Create a minimal valid JPEG file (640x640 blue image)
	// This is a base64-encoded minimal JPEG
	jpegData := []byte{
		0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46, 0x49, 0x46, 0x00, 0x01,
		0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0xFF, 0xDB, 0x00, 0x43,
		0x00, 0x08, 0x06, 0x06, 0x07, 0x06, 0x05, 0x08, 0x07, 0x07, 0x07, 0x09,
		0x09, 0x08, 0x0A, 0x0C, 0x14, 0x0D, 0x0C, 0x0B, 0x0B, 0x0C, 0x19, 0x12,
		0x13, 0x0F, 0x14, 0x1D, 0x1A, 0x1F, 0x1E, 0x1D, 0x1A, 0x1C, 0x1C, 0x20,
		0x24, 0x2E, 0x27, 0x20, 0x22, 0x2C, 0x23, 0x1C, 0x1C, 0x28, 0x37, 0x29,
		0x2C, 0x30, 0x31, 0x34, 0x34, 0x34, 0x1F, 0x27, 0x39, 0x3D, 0x38, 0x32,
		0x3C, 0x2E, 0x33, 0x34, 0x32, 0xFF, 0xC0, 0x00, 0x0B, 0x08, 0x02, 0x80,
		0x02, 0x80, 0x01, 0x01, 0x11, 0x00, 0xFF, 0xC4, 0x00, 0x1F, 0x00, 0x00,
		0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00,
		0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
		0x09, 0x0A, 0x0B, 0xFF, 0xC4, 0x00, 0xB5, 0x10, 0x00, 0x02, 0x01, 0x03,
		0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D,
		0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06,
		0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xA1, 0x08,
		0x23, 0x42, 0xB1, 0xC1, 0x15, 0x52, 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72,
		0x82, 0x09, 0x0A, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x25, 0x26, 0x27, 0x28,
		0x29, 0x2A, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45,
		0x46, 0x47, 0x48, 0x49, 0x4A, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
		0x5A, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75,
		0x76, 0x77, 0x78, 0x79, 0x7A, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
		0x8A, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3,
		0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6,
		0xB7, 0xB8, 0xB9, 0xBA, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9,
		0xCA, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE1, 0xE2,
		0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 0xE8, 0xE9, 0xEA, 0xF1, 0xF2, 0xF3, 0xF4,
		0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFF, 0xDA, 0x00, 0x08, 0x01, 0x01,
		0x00, 0x00, 0x3F, 0x00, 0xFB, 0xD0, 0xFF, 0xD9,
	}

	return os.WriteFile(imagePath, jpegData, 0644)
}
