package com.zzw.demo.text_A;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zzw.demo.util.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class Text03 {
    public static void main(String[] args) throws Exception {
        //3）根据 ODS 层的日志数据，先剔除掉不可用的脏数据（如非法的“日期型”），然后使用侧输出流分流，将订单数据和用户数据分别发送到 DWD 层的不同的 Kafka 主题中（5分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(1);


        FlinkKafkaConsumer<String> consumer = KafkaUtil.kafkaSource("ods_db_cdc", "text03");

        DataStreamSource<String> stringDataStreamSource = env.addSource(consumer);
        // 创建侧输出流标签
        OutputTag<String> orderInfoTag = new OutputTag<String>("order_info"){};
        OutputTag<String> orderDetailTag = new OutputTag<String>("order_detail"){};
        //脏数据测流
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty"){};
        //进行分流
        SingleOutputStreamOperator<String> process = stringDataStreamSource.process(new ProcessFunction<String, String>() {

            @Override
            public void processElement(String value, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String table = jsonObject.getString("table");
                    if ("user_info".equals(table)) {
                        collector.collect(value);
                    } else if ("order_info".equals(table)) {
                        context.output(orderInfoTag, value);
                    } else if ("order_detail".equals(table)) {
                        context.output(orderDetailTag, value);
                    }
                } catch (Exception e) {  //转换json不成功 即为脏数据
                    context.output(dirtyTag, value);
                }
            }
        });

        //输出主流
        process.print();

        //输出测流 即输出
        DataStream<String> orderInfoStream = process.getSideOutput(orderInfoTag);
        orderInfoStream.print("orderInfoStream>>>");
        DataStream<String> orderDetailStream = process.getSideOutput(orderDetailTag);
        orderDetailStream.print("orderDetailStream>>>");

        //脏数据输出
        DataStream<String> dirtyStream = process.getSideOutput(dirtyTag);
        dirtyStream.print("dirtyStream>>>");

        //发送到Kafka
        process.addSink(KafkaUtil.kafkaProducer("dwd_user_info"));
        orderInfoStream.addSink(KafkaUtil.kafkaProducer("dwd_order_info"));
        orderDetailStream.addSink(KafkaUtil.kafkaProducer("dwd_order_detail"));
        dirtyStream.addSink(KafkaUtil.kafkaProducer("error"));



        env.execute();


    }
}
