package com.itzmn.tmall.kafkastream;

/*
 * @Author: 张梦楠
 * @Date: 2019/7/7 17:20
 * 简书：https://www.jianshu.com/u/d611be10d1a6
 * 码云：https://gitee.com/zhangqiye
 * @Description:  kafkastream 业务处理类，将日志截取，写入下一个topic
 */

import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorContext;

public class LogProcesser implements Processor<byte[],byte[]> {

    private ProcessorContext context;

    @Override
    public void init(ProcessorContext context) {

        this.context = context;
    }

    @Override
    public void process(byte[] key, byte[] value) {

        // 核心流程,处理日志
        String line = new String(value);
        if (line.contains("logger:>>>>")){
            System.out.println("LogProcess process data:" + line);
            String[] split = line.split("logger:>>>>");
            // 转发
            context.forward("LogProcess".getBytes(), split[1].trim().getBytes());
        }
        context.commit();
    }

    @Override
    public void punctuate(long timestamp) {

    }

    @Override
    public void close() {

    }
}
