package com.och.mrcp.core.rtp.session;

import com.och.mrcp.config.RtpConfig;
import com.och.mrcp.core.rtp.model.RtpPacket;
import com.och.mrcp.handler.VoiceRecognitionSessionHandler;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;

import java.net.InetSocketAddress;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;

/**
 * RTP会话管理
 * 管理单个RTP流的传输状态和统计信息
 */
@Data
@Slf4j
public class RtpSession {
    
    // 会话标识
    private final String sessionId;
    private final InetSocketAddress localAddress;
    private final InetSocketAddress remoteAddress;
    
    // MRCP相关
    private String channelIdentifier;  // MRCP Channel-Identifier
    
    // RTP参数 - 从配置中获取，不再硬编码
    private final RtpConfig rtpConfig;
    private int payloadType;            // 负载类型
    private int sampleRate;             // 采样率
    private int channels;               // 声道数
    private int frameSize;              // 帧大小
    
    // 序列号管理
    private AtomicInteger sequenceNumber = new AtomicInteger(0);
    private AtomicLong timestamp = new AtomicLong(0);
    private Integer ssrc;
    
    // 统计信息
    private AtomicLong packetsSent = new AtomicLong(0);
    private AtomicLong packetsReceived = new AtomicLong(0);
    private AtomicLong bytesSent = new AtomicLong(0);
    private AtomicLong bytesReceived = new AtomicLong(0);
    private AtomicLong lastActivityTime = new AtomicLong(System.currentTimeMillis());
    
    // 会话状态
    private SessionState state = SessionState.INITIALIZED;
    private long startTime;
    private long endTime;
    
    // 抖动缓冲区
    private JitterBuffer jitterBuffer;
    
    // 语音识别处理器
    private VoiceRecognitionSessionHandler voiceRecognitionHandler;
    
    // RTP发送器（用于语音合成输出）
    private RtpSender rtpSender;
    
    public enum SessionState {
        INITIALIZED,    // 已初始化
        ACTIVE,         // 活跃状态
        PAUSED,         // 暂停状态
        TERMINATED      // 已终止
    }
    
    public RtpSession(String sessionId, InetSocketAddress localAddress, InetSocketAddress remoteAddress, RtpConfig rtpConfig) {
        this.sessionId = sessionId;
        this.localAddress = localAddress;
        this.remoteAddress = remoteAddress;
        this.rtpConfig = rtpConfig;
        this.ssrc = null; // 由第一包学习远端SSRC
        this.startTime = System.currentTimeMillis();
        
        // 从配置中初始化RTP参数
        this.payloadType = rtpConfig.getPayloadType();
        this.sampleRate = rtpConfig.getSampleRate();
        this.channels = rtpConfig.getChannels();
        this.frameSize = rtpConfig.getFrameSize();
        
        // 初始化抖动缓冲区，使用配置中的参数
        this.jitterBuffer = new JitterBuffer(rtpConfig.getJitterBufferSize(), rtpConfig.getJitterBufferTimeoutMs());
        
        // 初始化RTP发送器 - 使用独立的发送端口，避免与接收端口冲突
        InetSocketAddress senderLocalAddress = new InetSocketAddress("0.0.0.0", 0);
        this.rtpSender = new RtpSender(senderLocalAddress, remoteAddress);
        
        log.info("RTP session created: {} -> {}:{} with config: payloadType={}, sampleRate={}, channels={}, frameSize={}", 
                localAddress, remoteAddress.getAddress().getHostAddress(), remoteAddress.getPort(),
                payloadType, sampleRate, channels, frameSize);
    }
    
    /**
     * 设置语音识别处理器
     */
    public void setVoiceRecognitionHandler(VoiceRecognitionSessionHandler handler) {
        this.voiceRecognitionHandler = handler;
        log.debug("Voice recognition handler set for RTP session: {}", sessionId);
    }
    
    // 旧API已移除：语音识别通过VoiceRecognitionSessionHandler统一管理
    
    /**
     * 生成SSRC标识符
     */
    private int generateSsrc() {
        return (int) (Math.random() * Integer.MAX_VALUE);
    }
    
    /**
     * 创建RTP包
     */
    public RtpPacket createPacket(byte[] payload) {
        // 如果SSRC还没有初始化（用于发送），生成一个
        if (ssrc == null) {
            ssrc = generateSsrc();
            log.info("Generated SSRC for sending: {} for session {}", ssrc, sessionId);
        }
        
        RtpPacket packet = new RtpPacket();
        packet.setPayloadType(payloadType);
        packet.setSequenceNumber(sequenceNumber.getAndIncrement());
        packet.setTimestamp(timestamp.getAndAdd(frameSize));
        packet.setSsrc(ssrc);
        packet.setPayload(payload);
        
        return packet;
    }
    
    /**
     * 发送音频数据
     * 用于语音合成音频输出
     */
    public void sendAudioData(byte[] audioData) {
        if (audioData == null || audioData.length == 0) {
            log.debug("No audio data to send for session: {}", sessionId);
            return;
        }
        
        try {
            // 将音频数据分包发送
            int maxPayloadSize = 160; // 20ms at 8kHz, 16-bit samples
            int offset = 0;
            
            while (offset < audioData.length) {
                int packetSize = Math.min(maxPayloadSize, audioData.length - offset);
                byte[] payload = new byte[packetSize];
                System.arraycopy(audioData, offset, payload, 0, packetSize);
                
                // 创建RTP包
                RtpPacket packet = createPacket(payload);
                
                // 发送RTP包（这里需要实际的UDP发送逻辑）
                sendRtpPacket(packet);
                
                offset += packetSize;
                packetsSent.incrementAndGet();
                bytesSent.addAndGet(packetSize);
            }
            
            lastActivityTime.set(System.currentTimeMillis());
            log.debug("Sent {} bytes of audio data in {} packets for session: {}", 
                    audioData.length, (audioData.length + maxPayloadSize - 1) / maxPayloadSize, sessionId);
            
        } catch (Exception e) {
            log.error("Error sending audio data for session: {}", sessionId, e);
        }
    }
    
    /**
     * 发送RTP包
     */
    private void sendRtpPacket(RtpPacket packet) {
        try {
            // 确保RTP发送器已启动
            if (!rtpSender.isActive()) {
                rtpSender.start();
            }
            
            // 发送RTP包
            rtpSender.sendPacket(packet);
            
        } catch (Exception e) {
            log.error("Failed to send RTP packet for session: {}", sessionId, e);
            // 可以考虑实现重试机制或错误恢复
        }
    }
    
    /**
     * 启动RTP会话（包括发送器）
     */
    public void start() {
        try {
            if (rtpSender != null && !rtpSender.isActive()) {
                rtpSender.start();
                state = SessionState.ACTIVE;
                log.info("RTP session started: {}", sessionId);
            }
        } catch (Exception e) {
            log.error("Failed to start RTP session: {}", sessionId, e);
            state = SessionState.TERMINATED;
        }
    }
    
    /**
     * 停止RTP会话（包括发送器）
     */
    public void stop() {
        try {
            if (rtpSender != null) {
                rtpSender.stop();
            }
            state = SessionState.TERMINATED;
            endTime = System.currentTimeMillis();
            log.info("RTP session stopped: {}", sessionId);
        } catch (Exception e) {
            log.error("Error stopping RTP session: {}", sessionId, e);
        }
    }
    
    /**
     * 检查会话是否活跃
     */
    public boolean isActive() {
        return state == SessionState.ACTIVE && 
               (rtpSender == null || rtpSender.isActive());
    }
    
    /**
     * 处理接收到的RTP包
     */
    public void handleReceivedPacket(RtpPacket packet) {
        if (!isValidPacket(packet)) {
            log.warn("Invalid RTP packet received: {}", packet);
            return;
        }
        
        packetsReceived.incrementAndGet();
        bytesReceived.addAndGet(packet.getPacketSize());
        lastActivityTime.set(System.currentTimeMillis());
        
        // 添加到抖动缓冲区
        jitterBuffer.addPacket(packet);
        
        // 处理音频数据，路由到语音识别处理器
        byte[] audioData = packet.getPayload();
        if (audioData != null && audioData.length > 0) {
            int payloadType = packet.getPayloadType();
            log.debug("RTP audio data received: session={}, size={} bytes, payload type={} ({})", 
                    sessionId, audioData.length, payloadType, packet.getPayloadTypeDescription());
            
            // 如果设置了语音识别处理器，将音频数据路由过去
            if (voiceRecognitionHandler != null) {
                try {
                    // 传递原始音频数据和负载类型，让处理器决定是否需要转换
                    voiceRecognitionHandler.processAudioData(this, audioData, payloadType);
                    log.debug("Audio data routed to voice recognition handler: session={}, size={} bytes, payload type={}", 
                            sessionId, audioData.length, payloadType);
                } catch (Exception e) {
                    log.error("Error routing audio data to voice recognition handler: session={}", sessionId, e);
                }
            } else {
                log.debug("No voice recognition handler set for session: {}, audio data ignored", sessionId);
            }
        } else {
            log.warn("Empty audio payload in RTP packet for session: {}", sessionId);
        }
        
        log.debug("RTP packet received: seq={}, timestamp={}, size={}", 
                packet.getSequenceNumber(), packet.getTimestamp(), packet.getPacketSize());
    }
    
    /**
     * 验证RTP包
     */
    private boolean isValidPacket(RtpPacket packet) {
        if (packet == null || !packet.isValid()) {
            return false;
        }
        
        // 学习远端SSRC
        if (ssrc == null) {
            ssrc = packet.getSsrc();
            log.info("Learned remote SSRC: {} for session {}", ssrc, sessionId);
        }
        if (!ssrc.equals(packet.getSsrc())) {
            log.debug("Ignore packet with unexpected SSRC: {} (expected {})", packet.getSsrc(), ssrc);
            return false;
        }
        
        // 学习或验证负载类型（允许音频负载类型）
        if (packet.isAudioPacket()) {
            if (payloadType != packet.getPayloadType()) {
                log.info("Learned payload type: {} (was {}), codec: {} for session {}", 
                        packet.getPayloadType(), payloadType, 
                        packet.getPayloadTypeDescription(), sessionId);
                payloadType = packet.getPayloadType();
            }
            return true;
        } else {
            log.warn("Received non-audio RTP packet: payload type {}", packet.getPayloadType());
            return false;
        }
    }
    

    /**
     * 获取下一个音频帧
     */
    public byte[] getNextAudioFrame() {
        RtpPacket packet = jitterBuffer.getNextPacket();
        if (packet != null) {
            return packet.getPayload();
        }
        return null;
    }
    
    /**
     * 获取会话统计信息
     */
    public RtpSessionStats getStats() {
        RtpSessionStats stats = new RtpSessionStats();
        stats.setSessionId(sessionId);
        stats.setLocalAddress(localAddress);
        stats.setRemoteAddress(remoteAddress);
        stats.setPayloadType(payloadType);
        stats.setSampleRate(sampleRate);
        stats.setChannels(channels);
        stats.setFrameSize(frameSize);
        stats.setSsrc(ssrc);
        stats.setPacketsSent(packetsSent.get());
        stats.setPacketsReceived(packetsReceived.get());
        stats.setBytesSent(bytesSent.get());
        stats.setBytesReceived(bytesReceived.get());
        stats.setLastActivityTime(lastActivityTime.get());
        stats.setState(state);
        stats.setStartTime(startTime);
        stats.setEndTime(endTime);
        stats.setDuration(System.currentTimeMillis() - startTime);
        
        return stats;
    }
    
    /**
     * 暂停会话
     */
    public void pause() {
        if (state == SessionState.ACTIVE) {
            state = SessionState.PAUSED;
            log.info("RTP session paused: {}", sessionId);
        }
    }
    
    /**
     * 恢复会话
     */
    public void resume() {
        if (state == SessionState.PAUSED) {
            state = SessionState.ACTIVE;
            log.info("RTP session resumed: {}", sessionId);
        }
    }
    
    /**
     * 终止会话
     */
    public void terminate() {
        // 使用stop()方法来终止会话，确保RTP发送器也被停止
        stop();
    }
    
    /**
     * 检查会话是否超时
     */
    public boolean isTimeout(long timeoutMs) {
        return System.currentTimeMillis() - lastActivityTime.get() > timeoutMs;
    }
    
    /**
     * 更新活动时间
     */
    public void updateActivity() {
        lastActivityTime.set(System.currentTimeMillis());
    }
    
    @Override
    public String toString() {
        return String.format("RtpSession{id=%s, state=%s, local=%s, remote=%s:%d, ssrc=%d}",
                sessionId, state, localAddress, remoteAddress.getAddress().getHostAddress(), 
                remoteAddress.getPort(), ssrc);
    }
}
