package com.kafka.model;

import com.kafka.utils.MsgPackageUtil;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;

import java.util.List;

/**
 * 网络数据接收类
 */
public class TcpServerDecoder extends ByteToMessageDecoder {

    public TcpServerConfig serverConfig;

    public TcpServerDecoder(TcpServerConfig serverConfig) {
        this.serverConfig = serverConfig;
    }

    @Override
    protected void decode(ChannelHandlerContext ctx, ByteBuf bufferIn, List<Object> out) throws Exception {
        int msgLength = 0;
        int headLen = serverConfig.getHeadlength();
        boolean bigendian = true;
        if ("little-endian".equalsIgnoreCase(serverConfig.getEndian())) {
            bigendian = false;
        }
        //不够长度等待缓冲
        if (bufferIn.readableBytes() < headLen) {
            return;
        }
        //获取头数据
        ByteBuf headbuf = Unpooled.buffer(headLen);
        bufferIn.getBytes(bufferIn.readerIndex(), headbuf);
        //获取长度域
        int startpos = serverConfig.getHeadstartpos();
        int headcopylen = serverConfig.getHeadcopylen();
        byte[] messageHead = new byte[headcopylen];
        System.arraycopy(headbuf.array(), startpos, messageHead, 0, headcopylen);
        //解析报文体长度值
        msgLength = MsgPackageUtil.unpackMsgLen(messageHead, serverConfig.getHeadcodetype(), bigendian);
        headbuf.release();

        // 报文全长
        int msgFullLength = 0;
        if (serverConfig.isHeadincself()) {
            //长度包含头本身
            msgFullLength = msgLength;
        } else {
            //长度不包含头本身
            msgFullLength = headLen + msgLength;
        }

        //长度不够继续等待缓冲
        if (bufferIn.readableBytes() < msgFullLength) {
            return;
        }

        byte[] buff = new byte[msgFullLength];
        bufferIn.readBytes(buff);
        out.add(buff);
    }
}
