package com.bw.data_summary;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

public class Kafkasource {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 获取数据
        DataStream<String> entranceGuardLog = KafkaUtil.consumerKafka(env, "dwd_entrance_guard_log");
//        entranceGuardLog.print("=====>");
        //todo 获取after数据
        SingleOutputStreamOperator<String> processed = entranceGuardLog.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                // 获取json数据
                JSONObject jsonObject = JSON.parseObject(s);
                String after = jsonObject.getString("after");
                if (null != after) {
                    collector.collect(after);
                }
            }
        });

        //todo 数据输出-sink
        KafkaUtil.producerKafka(processed,"clean_dwd_entrance_guard_log");


        env.execute("KFKsourceClean");
    }
}
