package com.watermark.service.impl;

import com.watermark.config.AppConfig;
import com.watermark.model.WatermarkRequest;
import com.watermark.model.WatermarkResponse;
import com.watermark.service.WatermarkService;
import com.watermark.util.ImageUtils;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.ConvolutionLayer;
import org.deeplearning4j.nn.conf.layers.BatchNormalization;
import org.deeplearning4j.nn.conf.layers.SubsamplingLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.opencv.core.*;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.UUID;

@Slf4j
@Service
@RequiredArgsConstructor
public class WatermarkServiceImpl implements WatermarkService {
    private final AppConfig appConfig;
    private MultiLayerNetwork embedderNetwork;
    private MultiLayerNetwork extractorNetwork;
    private static final int DEFAULT_NETWORK_SIZE = 128;
    private static final int DEFAULT_QUALITY = 90;

    @PostConstruct
    public void init() throws Exception {
        log.info("Initializing watermark service...");
        embedderNetwork = createEmbedderNetwork();
        extractorNetwork = createExtractorNetwork();
        ImageUtils.createDirectoryIfNotExists(appConfig.getUpload().getDir());
        log.info("Watermark service initialized successfully");
    }

    @Override
    public WatermarkResponse embedWatermark(WatermarkRequest request) throws Exception {
        log.info("Starting watermark embedding process...");
        long startTime = Instant.now().toEpochMilli();
        
        validateRequest(request);
        
        try {
            // Load and validate original image
            Mat originalImage = loadAndValidateImage(request.getOriginalImage(), "Original");
            
            // Store original dimensions
            int originalHeight = originalImage.rows();
            int originalWidth = originalImage.cols();
            
            // Log watermark parameters
            log.info("Watermark parameters - Opacity: {}, Density: {}, Angle: {}", 
                request.getOpacity(), request.getDensity(), request.getAngle());
            
            // Create text watermark
            Mat watermarkImage = ImageUtils.createTextWatermark(
                request.getWatermarkText(),
                originalWidth,
                originalHeight,
                request.getWatermarkFont() != null ? request.getWatermarkFont() : "Arial",
                request.getFontSize() != null ? request.getFontSize() : 32,
                request.getColor() != null ? request.getColor() : "#000000",
                request.getOpacity() != null ? request.getOpacity() : 0.5,
                request.getAngle() != null ? request.getAngle() : 0,
                request.getDensity() != null ? request.getDensity() : 1.0
            );
            
            // Embed watermark
            Mat watermarkedImage = ImageUtils.embedWatermark(originalImage, watermarkImage, 0.5);
            
            // Save result
            String outputFileName = UUID.randomUUID().toString() + ".png";
            String outputPath = Paths.get(appConfig.getUpload().getDir(), outputFileName).toString();
            ImageUtils.saveImage(watermarkedImage, outputPath);
            
            // Calculate similarity
            double similarity = ImageUtils.calculateSimilarity(originalImage, watermarkedImage);
            
            long processingTime = Instant.now().toEpochMilli() - startTime;
            log.info("Watermark embedding completed in {} ms, similarity: {}", processingTime, similarity);
            
            return WatermarkResponse.builder()
                .watermarkedImageUrl("/uploads/" + outputFileName)
                .similarity(similarity)
                .processingTime(processingTime)
                .message("Watermark embedded successfully")
                .build();
                
        } catch (Exception e) {
            log.error("Error during watermark embedding: ", e);
            throw new RuntimeException("Failed to embed watermark: " + e.getMessage(), e);
        }
    }

    @Override
    public WatermarkResponse extractWatermark(MultipartFile watermarkedImage) throws Exception {
        log.info("Starting watermark text extraction process...");
        long startTime = Instant.now().toEpochMilli();
        
        try {
            // Load and process image
            Mat image = loadAndValidateImage(watermarkedImage, "Watermarked");
            
            // Save original image for reference
            String originalFileName = "debug_original_" + System.currentTimeMillis() + ".png";
            String originalPath = Paths.get(appConfig.getUpload().getDir(), originalFileName).toString();
            ImageUtils.saveImage(image, originalPath);
            
            // Process image for OCR
            Mat processedImage = ImageUtils.preprocessForOCR(image);
            
            // Extract text using OCR
            String extractedText = ImageUtils.performOCR(processedImage);
            log.info("Extracted watermark text: {}", extractedText);
            
            long processingTime = Instant.now().toEpochMilli() - startTime;
            log.info("Watermark text extraction completed in {} ms", processingTime);
            
            return WatermarkResponse.builder()
                .watermarkedImageUrl("/uploads/" + originalFileName)
                .extractedText(extractedText != null && !extractedText.isEmpty() ? extractedText : "No text detected")
                .processingTime(processingTime)
                .message("Watermark text extraction " + 
                    (extractedText != null && !extractedText.isEmpty() ? "completed successfully" : "failed to detect any text"))
                .build();
                
        } catch (Exception e) {
            log.error("Error during watermark text extraction: ", e);
            throw new RuntimeException("Failed to extract watermark text: " + e.getMessage(), e);
        }
    }

    private void validateRequest(WatermarkRequest request) {
        if (request.getOriginalImage() == null) {
            throw new IllegalArgumentException("Original image is required");
        }
        
        if (request.getWatermarkText() == null || request.getWatermarkText().trim().isEmpty()) {
            throw new IllegalArgumentException("Watermark text is required");
        }
        
        if (request.getQuality() != null && (request.getQuality() < 0 || request.getQuality() > 100)) {
            throw new IllegalArgumentException("Image quality must be between 0 and 100");
        }
        
        if (request.getOpacity() != null && (request.getOpacity() < 0 || request.getOpacity() > 1)) {
            throw new IllegalArgumentException("Watermark opacity must be between 0 and 1");
        }
        
        if (request.getFontSize() != null && request.getFontSize() <= 0) {
            throw new IllegalArgumentException("Font size must be greater than 0");
        }

        if (request.getDensity() != null && (request.getDensity() < 0.5 || request.getDensity() > 2.0)) {
            throw new IllegalArgumentException("Watermark density must be between 0.5 and 2.0");
        }

        // Log validated parameters
        log.debug("Validated parameters: opacity={}, density={}", 
            request.getOpacity(), request.getDensity());
    }

    private Mat loadAndValidateImage(MultipartFile file, String imageType) throws IOException {
        Mat image = ImageUtils.loadImage(file);
        
        if (image.empty()) {
            throw new IllegalArgumentException(imageType + " image is invalid or corrupted");
        }
        
        // Validate image size
        if (image.rows() < appConfig.getWatermark().getMinSize() || 
            image.cols() < appConfig.getWatermark().getMinSize()) {
            throw new IllegalArgumentException(imageType + " image is too small. Minimum size is " + 
                appConfig.getWatermark().getMinSize() + "x" + appConfig.getWatermark().getMinSize());
        }
        
        if (image.rows() > appConfig.getWatermark().getMaxSize() || 
            image.cols() > appConfig.getWatermark().getMaxSize()) {
            throw new IllegalArgumentException(imageType + " image is too large. Maximum size is " + 
                appConfig.getWatermark().getMaxSize() + "x" + appConfig.getWatermark().getMaxSize());
        }
        
        return image;
    }

    private Mat preprocessImageForNetwork(Mat image) {
        Mat processed = new Mat();
        
        // Convert to grayscale if needed
        if (image.channels() > 1) {
            Imgproc.cvtColor(image, processed, Imgproc.COLOR_BGR2GRAY);
        } else {
            image.copyTo(processed);
        }
        
        // Resize to network size
        processed = ImageUtils.resizeImage(processed, DEFAULT_NETWORK_SIZE, DEFAULT_NETWORK_SIZE);
        
        // Normalize to [0,1]
        processed.convertTo(processed, CvType.CV_32F);
        Core.normalize(processed, processed, 0, 1, Core.NORM_MINMAX);
        
        return processed;
    }

    private Mat postprocessImage(Mat image, int targetWidth, int targetHeight, int quality) {
        Mat processed = new Mat();
        
        // Denormalize to [0,255]
        Core.normalize(image, processed, 0, 255, Core.NORM_MINMAX);
        processed.convertTo(processed, CvType.CV_8UC1);
        
        // Resize to target dimensions
        if (image.rows() != targetHeight || image.cols() != targetWidth) {
            processed = ImageUtils.resizeImage(processed, targetWidth, targetHeight);
        }
        
        // Apply quality compression if needed
        if (quality < 100) {
            MatOfInt params = new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, quality);
            MatOfByte buffer = new MatOfByte();
            Imgcodecs.imencode(".jpg", processed, buffer, params);
            processed = Imgcodecs.imdecode(buffer, Imgcodecs.IMREAD_UNCHANGED);
        }
        
        return processed;
    }

    private INDArray matToINDArray(Mat mat) {
        float[] pixels = new float[mat.rows() * mat.cols()];
        mat.get(0, 0, pixels);
        return Nd4j.create(pixels).reshape('c', 1, 1, mat.rows(), mat.cols());
    }

    private Mat indArrayToMat(INDArray array, int height, int width) {
        // Ensure array is 2D
        INDArray reshaped;
        if (array.rank() > 2) {
            // If array is [batch, channels, height, width], take first image and channel
            reshaped = array.get(NDArrayIndex.point(0), NDArrayIndex.point(0));
        } else {
            reshaped = array;
        }
        
        // Create OpenCV Mat
        Mat mat = new Mat(height, width, CvType.CV_32F);
        
        // Copy data
        float[] pixels = reshaped.dup().data().asFloat();
        mat.put(0, 0, pixels);
        
        return mat;
    }

    private MultiLayerNetwork createEmbedderNetwork() throws Exception {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(123)
            .list()
            // Encoder layers
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(2)
                .nOut(64)
                .build())
            .layer(new BatchNormalization.Builder().build())
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(64)
                .nOut(128)
                .build())
            .layer(new BatchNormalization.Builder().build())
            // Decoder layers
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(128)
                .nOut(64)
                .build())
            .layer(new BatchNormalization.Builder().build())
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.SIGMOID)
                .nIn(64)
                .nOut(1)
                .build())
            .setInputType(InputType.convolutional(DEFAULT_NETWORK_SIZE, DEFAULT_NETWORK_SIZE, 2))
            .build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        return model;
    }

    private MultiLayerNetwork createExtractorNetwork() throws Exception {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(123)
            .list()
            // Feature extraction layers
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(1)
                .nOut(64)
                .build())
            .layer(new BatchNormalization.Builder().build())
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(64)
                .nOut(128)
                .build())
            .layer(new BatchNormalization.Builder().build())
            // Watermark reconstruction layers
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.LEAKYRELU)
                .nIn(128)
                .nOut(64)
                .build())
            .layer(new BatchNormalization.Builder().build())
            .layer(new ConvolutionLayer.Builder()
                .kernelSize(3,3)
                .stride(1,1)
                .padding(1,1)
                .activation(Activation.SIGMOID)
                .nIn(64)
                .nOut(1)
                .build())
            .setInputType(InputType.convolutional(DEFAULT_NETWORK_SIZE, DEFAULT_NETWORK_SIZE, 1))
            .build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        return model;
    }

    private String performOCROnFeatures(INDArray features) {
        try {
            // Convert features to image for OCR
            Mat featureImage = indArrayToMat(features, DEFAULT_NETWORK_SIZE, DEFAULT_NETWORK_SIZE);
            
            // Convert to proper format for OCR
            Mat normalizedImage = new Mat();
            Core.normalize(featureImage, normalizedImage, 0, 255, Core.NORM_MINMAX);
            normalizedImage.convertTo(normalizedImage, CvType.CV_8UC1);
            
            // Enhance image for better OCR results
            Mat enhancedImage = enhanceImageForOCR(normalizedImage);
            
            // Save debug image if needed
            String debugPath = "debug_ocr_" + System.currentTimeMillis() + ".png";
            Imgcodecs.imwrite(debugPath, enhancedImage);
            log.debug("Saved debug OCR image to: {}", debugPath);
            
            // Perform OCR using Tesseract
            try {
                String result = ImageUtils.performOCR(enhancedImage);
                log.debug("OCR result: {}", result);
                return result;
            } catch (Exception e) {
                log.error("OCR processing failed: ", e);
                return "";
            }
            
        } catch (Exception e) {
            log.error("Error during feature conversion: ", e);
            return "";
        }
    }

    private Mat enhanceImageForOCR(Mat image) {
        Mat enhanced = new Mat();
        
        // Convert to grayscale if needed
        if (image.channels() > 1) {
            Imgproc.cvtColor(image, enhanced, Imgproc.COLOR_BGR2GRAY);
        } else {
            image.copyTo(enhanced);
        }
        
        // Convert to 8-bit image
        enhanced.convertTo(enhanced, CvType.CV_8UC1);
        
        // Apply adaptive thresholding
        Mat thresholded = new Mat();
        Imgproc.adaptiveThreshold(
            enhanced, 
            thresholded, 
            255,
            Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY,
            11,
            2
        );
        
        // Apply Gaussian blur for denoising
        Mat denoised = new Mat();
        Imgproc.GaussianBlur(thresholded, denoised, new Size(3, 3), 0);
        
        return denoised;
    }
} 