package com.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;


/**
 * 根据 ODS 层的日志数据，先剔除掉不可用的脏数据（如非法的“日期型”）
 * ，然后使用侧输出流分流，将订单数据和用户数据分别发送到 DWD 层的不同的 Kafka 主题中（5分）
 */
public class Three {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //读数据
        FlinkKafkaConsumer<String> ods_base_db = KafkaUtils.createConsumer("ods_base_db","sdsa");
        DataStreamSource<String> stream = env.addSource(ods_base_db);
        stream.print();
        //处理数据  三张表  发送到主题
        OutputTag<JSONObject> order_infoTag = new OutputTag<JSONObject>("order_info88") {};
        OutputTag<JSONObject> order_detailTag = new OutputTag<JSONObject>("order_detail88") {};
        OutputTag<JSONObject> user_infoTag = new OutputTag<JSONObject>("user_info88") {};

        SingleOutputStreamOperator<JSONObject> mainDs = stream.map(x -> {
            return JSON.parseObject(x);
        });


        SingleOutputStreamOperator<JSONObject> tableDs = mainDs.process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                //开始分流
                String tableName = jsonObject.getString("tableName");
                JSONObject parse = JSON.parseObject(JSON.toJSONString(jsonObject));
                if ("order_info".equals(tableName)) {
                    context.output(order_infoTag, parse);
                } else if ("order_detail".equals(tableName)) {
                    context.output(order_detailTag, parse);
                } else if ("user_info".equals(tableName)) {
                    context.output(user_infoTag, jsonObject);
                } else {
                    collector.collect(parse);
                }
            }
        });

        DataStream<JSONObject> orderInfoDs = tableDs.getSideOutput(order_infoTag);
        DataStream<JSONObject> orderDetailDs = tableDs.getSideOutput(order_detailTag);
        DataStream<JSONObject> userInfoDs = tableDs.getSideOutput(user_infoTag);



        orderInfoDs.map(x->JSON.toJSONString(x)).addSink(KafkaUtils.createProduer("dwd_orderInfo_db"));
        orderDetailDs.map(x->JSON.toJSONString(x)).addSink(KafkaUtils.createProduer("dwd_orderDetail_db"));
        userInfoDs.map(x->JSON.toJSONString(x)).addSink(KafkaUtils.createProduer("dwd_userInfo_db"));

        env.execute();

    }
}
