package handler

import (
	"fmt"
	"net/http"
	"os"
	"path/filepath"

	"goyolo/internal/config"
	"goyolo/internal/inference"
	"goyolo/internal/textdetection"

	"github.com/gin-gonic/gin"
)

type Handler struct {
	engine *inference.InferenceEngine
	config *config.Config
}

type InferRequest struct {
	ImagePath string `json:"image_path" binding:"required"`
}

type BatchInferRequest struct {
	ImagePaths []string `json:"image_paths" binding:"required,min=1"`
}

type HealthResponse struct {
	Status  string `json:"status"`
	Message string `json:"message"`
}

func NewHandler(engine *inference.InferenceEngine, cfg *config.Config) *Handler {
	return &Handler{
		engine: engine,
		config: cfg,
	}
}

// HealthCheck handles GET /health
func (h *Handler) HealthCheck(c *gin.Context) {
	c.JSON(http.StatusOK, HealthResponse{
		Status:  "healthy",
		Message: "YOLOv8 OpenVINO inference service is running",
	})
}

// Infer handles POST /api/v1/infer
func (h *Handler) Infer(c *gin.Context) {
	var req InferRequest
	if err := c.ShouldBindJSON(&req); err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"error": fmt.Sprintf("invalid request: %v", err),
		})
		return
	}

	// Validate image path
	if _, err := os.Stat(req.ImagePath); err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"error": fmt.Sprintf("image not found: %s", req.ImagePath),
		})
		return
	}

	// Run inference
	response, err := h.engine.Infer(req.ImagePath)
	if err != nil {
		c.JSON(http.StatusInternalServerError, gin.H{
			"error": fmt.Sprintf("inference failed: %v", err),
		})
		return
	}

	c.JSON(http.StatusOK, response)
}

// DetectText handles POST /api/v1/detect/text
func (h *Handler) DetectText(c *gin.Context) {
	var req textdetection.TextDetectionRequest
	if err := c.ShouldBindJSON(&req); err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"status":  "error",
			"error":   "invalid_request",
			"details": fmt.Sprintf("Invalid request: %v", err),
		})
		return
	}

	// Set default values if not provided
	if req.ConfidenceThreshold == 0 {
		req.ConfidenceThreshold = h.config.TextDetectionConfidence
	}
	if req.OutputDir == "" {
		req.OutputDir = h.config.TextDetectionOutputDir
	}

	// Validate image path
	if _, err := os.Stat(req.ImagePath); err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"status":  "error",
			"error":   "invalid_image_path",
			"details": fmt.Sprintf("Image file not found: %s", req.ImagePath),
		})
		return
	}

	// Create detector with OpenVINO device selection
	detector, err := textdetection.NewDetector(h.config.TextDetectionModelPath, h.config.OpenVINODevice)
	if err != nil {
		c.JSON(http.StatusInternalServerError, gin.H{
			"status":  "error",
			"error":   "model_not_found",
			"details": fmt.Sprintf("Failed to load model: %v", err),
		})
		return
	}
	defer detector.Close()

	// Perform detection
	response, err := detector.Detect(&req)
	if err != nil {
		statusCode := http.StatusInternalServerError
		if response.Error == "invalid_request" || response.Error == "invalid_image_path" {
			statusCode = http.StatusBadRequest
		}
		c.JSON(statusCode, response)
		return
	}

	c.JSON(http.StatusOK, response)
}

// InferBatch handles POST /api/v1/infer/batch
func (h *Handler) InferBatch(c *gin.Context) {
	var req BatchInferRequest
	if err := c.ShouldBindJSON(&req); err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"error": fmt.Sprintf("invalid request: %v", err),
		})
		return
	}

	// Validate all image paths
	for _, imagePath := range req.ImagePaths {
		if _, err := os.Stat(imagePath); err != nil {
			c.JSON(http.StatusBadRequest, gin.H{
				"error": fmt.Sprintf("image not found: %s", imagePath),
			})
			return
		}
	}

	// Run inference on all images
	responses := make([]*inference.InferenceResponse, 0, len(req.ImagePaths))
	for _, imagePath := range req.ImagePaths {
		response, err := h.engine.Infer(imagePath)
		if err != nil {
			response = &inference.InferenceResponse{
				Status:    "failed",
				ImagePath: imagePath,
				Error:     err.Error(),
			}
		}
		responses = append(responses, response)
	}

	c.JSON(http.StatusOK, gin.H{
		"results": responses,
		"total":   len(responses),
	})
}

// GetModelInfo handles GET /api/v1/model/info
func (h *Handler) GetModelInfo(c *gin.Context) {
	info := h.engine.GetModelInfo()
	c.JSON(http.StatusOK, info)
}

// UploadAndInfer handles POST /api/v1/infer/upload
func (h *Handler) UploadAndInfer(c *gin.Context) {
	file, err := c.FormFile("image")
	if err != nil {
		c.JSON(http.StatusBadRequest, gin.H{
			"error": fmt.Sprintf("failed to get image: %v", err),
		})
		return
	}

	// Save uploaded file to temp directory
	tempDir := "/tmp/yolov8_uploads"
	os.MkdirAll(tempDir, 0755)

	filePath := filepath.Join(tempDir, file.Filename)
	if err := c.SaveUploadedFile(file, filePath); err != nil {
		c.JSON(http.StatusInternalServerError, gin.H{
			"error": fmt.Sprintf("failed to save image: %v", err),
		})
		return
	}
	defer os.Remove(filePath)

	// Run inference
	response, err := h.engine.Infer(filePath)
	if err != nil {
		c.JSON(http.StatusInternalServerError, gin.H{
			"error": fmt.Sprintf("inference failed: %v", err),
		})
		return
	}

	c.JSON(http.StatusOK, response)
}
