package com.example.chatbackend.controller;

import com.example.chatbackend.service.AiService;
import com.example.chatbackend.service.SpeechService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.util.Map;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import jakarta.servlet.http.HttpServletRequest;
import java.util.UUID;
import javax.sound.sampled.*;

@RestController
@RequestMapping("/api")
public class ChatController {
    private final AiService aiService;
    private final SpeechService speechService;
    @Value("${media.baseDir:}")
    private String mediaBaseDir;

    public ChatController(AiService aiService, SpeechService speechService) {
        this.aiService = aiService;
        this.speechService = speechService;
    }

    @PostMapping("/chat")
    public Map<String, String> chat(@RequestBody Map<String, String> body) {
        String message = body.getOrDefault("message", "");
        String sessionId = body.getOrDefault("sessionId", "");
        String reply = aiService.generateReply(sessionId, message);
        return Map.of("reply", reply);
    }

    @PostMapping(value = "/chat/audio", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
    public Map<String, Object> chatAudio(
            @RequestParam("file") MultipartFile file,
            @RequestParam("sessionId") String sessionId,
            @RequestParam("durationSec") Double durationSec,
            @RequestParam(value = "format", required = false) String format,
            @RequestParam(value = "metadata", required = false) String metadata,
            HttpServletRequest request
    ) throws Exception {
        if (file == null || file.isEmpty()) {
            throw new IllegalArgumentException("file is required");
        }
        if (file.getSize() > 10 * 1024 * 1024) {
            throw new IllegalArgumentException("file too large (<=10MB)");
        }
        String original = file.getOriginalFilename();
        String ext = (original != null && original.contains(".")) ? original.substring(original.lastIndexOf('.') + 1).toLowerCase() : "";
        String declared = (format == null || format.isEmpty()) ? ext : format.toLowerCase();
        if (!("mp3".equals(declared) || "aac".equals(declared) || "wav".equals(declared))) {
            throw new IllegalArgumentException("unsupported format");
        }

        byte[] audioBytes = file.getBytes();
        byte[] pcm16kMono = convertToPcm16kMono(audioBytes, declared);
        String transcript = speechService.transcribePcm16kMono(pcm16kMono, 20000);
        if (!StringUtils.hasText(transcript)) {
            transcript = "";
        }
        String reply = aiService.generateReply(sessionId, transcript);

        // Synthesize reply audio (WAV 16k) and expose as URL
        byte[] replyAudio = speechService.synthesizeWav16k(reply, 20000);
        String baseDir = (mediaBaseDir != null && !mediaBaseDir.isEmpty())
                ? mediaBaseDir
                : System.getProperty("java.io.tmpdir") + File.separator + "chat-media";
        Path dir = Paths.get(baseDir);
        if (!Files.exists(dir)) {
            Files.createDirectories(dir);
        }
        String replyFile = "reply_" + UUID.randomUUID() + ".wav";
        Path outPath = dir.resolve(replyFile);
        Files.write(outPath, replyAudio);
        String mediaUrl = request.getScheme() + "://" + request.getServerName()
                + (request.getServerPort() == 80 || request.getServerPort() == 443 ? "" : (":" + request.getServerPort()))
                + "/media/" + replyFile;

        String messageId = "m_" + UUID.randomUUID();
        String replyMessageId = "m_" + UUID.randomUUID();
        return Map.of(
                "messageId", messageId,
                "audioUrl", mediaUrl,
                "transcript", transcript,
                "reply", reply,
                "replyMessageId", replyMessageId,
                "replyAudioUrl", mediaUrl
        );
    }

    private static byte[] convertToPcm16kMono(byte[] inputBytes, String format) throws Exception {
        if ("wav".equals(format)) {
            try (AudioInputStream in = AudioSystem.getAudioInputStream(new ByteArrayInputStream(inputBytes))) {
                AudioFormat target = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 16000f, 16, 1, 2, 16000f, false);
                try (AudioInputStream converted = AudioSystem.getAudioInputStream(target, in)) {
                    return toByteArray(converted);
                }
            }
        }
        // For compressed formats (e.g., mp3/aac), first decode to PCM without changing channel count
        // to avoid known issues in some SPI implementations when downmixing during decode.
        try (AudioInputStream compressed = AudioSystem.getAudioInputStream(new ByteArrayInputStream(inputBytes))) {
            AudioFormat sourceFormat = compressed.getFormat();
            AudioFormat intermediatePcm = new AudioFormat(
                    AudioFormat.Encoding.PCM_SIGNED,
                    sourceFormat.getSampleRate(),
                    16,
                    sourceFormat.getChannels(),
                    sourceFormat.getChannels() * 2,
                    sourceFormat.getSampleRate(),
                    false
            );
            try (AudioInputStream decodedPcm = AudioSystem.getAudioInputStream(intermediatePcm, compressed)) {
                AudioFormat target = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 16000f, 16, 1, 2, 16000f, false);
                try (AudioInputStream mono16k = AudioSystem.getAudioInputStream(target, decodedPcm)) {
                    return toByteArray(mono16k);
                }
            }
        } catch (UnsupportedAudioFileException e) {
            // fallback to ffmpeg if available
            byte[] viaFfmpeg = tryFfmpegToPcm16kMono(inputBytes);
            if (viaFfmpeg != null && viaFfmpeg.length > 0) {
                return viaFfmpeg;
            }
            throw new IllegalArgumentException("Server missing codec to decode " + format + ", and ffmpeg not available. Please upload wav 16k mono.");
        }
    }

    private static byte[] toByteArray(AudioInputStream stream) throws IOException {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        byte[] buf = new byte[4096];
        int n;
        while ((n = stream.read(buf)) > 0) {
            out.write(buf, 0, n);
        }
        return out.toByteArray();
    }

    private static byte[] tryFfmpegToPcm16kMono(byte[] inputBytes) {
        Process process = null;
        try {
            ProcessBuilder pb = new ProcessBuilder(
                    "ffmpeg",
                    "-hide_banner",
                    "-loglevel", "error",
                    "-i", "pipe:0",
                    "-ac", "1",
                    "-ar", "16000",
                    "-f", "s16le",
                    "-acodec", "pcm_s16le",
                    "pipe:1"
            );
            pb.redirectErrorStream(true);
            process = pb.start();
            // write input
            try (OutputStream stdin = process.getOutputStream()) {
                stdin.write(inputBytes);
                stdin.flush();
            }
            // read output
            byte[] buffer = new byte[8192];
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            try (InputStream stdout = process.getInputStream()) {
                int r;
                while ((r = stdout.read(buffer)) != -1) {
                    out.write(buffer, 0, r);
                }
            }
            int exit = process.waitFor();
            if (exit == 0) {
                return out.toByteArray();
            }
        } catch (Exception ignored) {
        } finally {
            if (process != null) process.destroy();
        }
        return null;
    }
}


