package yuekao8.dim;

import com.alibaba.fastjson.JSON;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao8.util.KafkaUtil;
import yuekao8.util.MyHbase;

public class ReadKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //创建Flink流式程序，启用检查点机制（5秒一次Checkpoint），设置状态后端为FsStateBackend，实时消费Kafka队列业务数据；（5分）
//        env.enableCheckpointing(5000);
//        env.setStreamTimeCharacteristic(n);
        //  3.2）编写Flink流式程序，读取kafka的 ods_base_topic 主题数据，利用侧流将订单表和订单明细表将数据分流写到对应的kafka主题（dwd_order_info_topic、dwd_order_detail_topic）（5分）
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("ods_base_topic"));
        //dwd_order_info_topic、dwd_order_detail_topic
        OutputTag<String> order_info = new OutputTag<String>("order_info") {
        };
        OutputTag<String> order_detail = new OutputTag<String>("order_detail") {
        };
        OutputTag<String> region = new OutputTag<String>("region") {
        };
        OutputTag<String> product_spu = new OutputTag<String>("product_spu") {
        };
        OutputTag<String> shop = new OutputTag<String>("shop") {
        };
        OutputTag<String> product_sku = new OutputTag<String>("product_sku") {
        };
        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String table = JSON.parseObject(s).getString("table");
                if ("order_info".equals(table)) {
                    context.output(order_info, s);
                }
                if ("order_detail".equals(table)) {
                    context.output(order_detail, s);
                }
                if ("region".equals(table)) {
                    context.output(region, s);
                }
                if ("product_spu".equals(table)) {
                    context.output(product_spu, s);
                }
                if ("shop".equals(table)) {
                    context.output(shop, s);
                }
                if ("product_sku".equals(table)) {
                    context.output(product_sku, s);
                }
            }
        });

//        process.getSideOutput(order_info).print("order_info:");
//        process.getSideOutput(order_detail).print("order_detail:");
        process.getSideOutput(order_info).addSink(KafkaUtil.kafkaSink("dwd_order_info_topic"));
        process.getSideOutput(order_detail).addSink(KafkaUtil.kafkaSink("dwd_order_detail_topic"));
        //3.3） 编写Flink流式程序，读取kafka的ods_base_topic主题数据，利用侧流将将行政区划表和菜品表数据写到对应的Hbase表中表面：dim_region、dim_product_spu（5分）
        process.getSideOutput(product_spu).addSink(new MyHbase());
        process.getSideOutput(product_sku).addSink(new MyHbase());
        process.getSideOutput(region).addSink(new MyHbase());
        process.getSideOutput(shop).addSink(new MyHbase());
        env.execute();
    }
}
