package com.sdg.kafkastream;


import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorContext;

public class LogProcessor implements Processor<byte[], byte[]> {


    //埋点日志数据格式  gao:1|23|4.5|123221423423
    public static final String PREFIX_MSG = "gao:";
    private ProcessorContext context;

    @Override
    public void init(ProcessorContext processorContext) {
        this.context = processorContext;
    }

    //处理数据
    @Override
    public void process(byte[] key, byte[] value) {
        //获取每一条数据
        String input = new String(value);
        System.out.println("对应的value--" + input);
        //过滤操作
        if (input.contains(PREFIX_MSG)) {
            //切分之后取出内容
            input = input.split(PREFIX_MSG)[1].trim();
            //根据业务需求，实现自己的业务员逻辑
            //key 没有要求,我们可以随便写,然后传给下一个队列
            context.forward("key".getBytes(), input.getBytes());
        }
    }

    @Override
    public void punctuate(long l) {

    }

    @Override
    public void close() {

    }
}
