package com.bw.dim;

import com.alibaba.fastjson.JSON;
import com.bw.utils.DimRedisSink;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;

/*

//jar->mysql->topic-db->本类->kafka/redis


读取cdc采集的事实表和维度表：
order_info order_detail 存放到kafka
sku_info  user_info  base_province 通过侧流存放到redis
 */

public class FlinkTM2 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //1）、创建Flink流式程序，启用检查点机制（5秒一次Checkpoint），设置状态后端为FsStateBackend，实时消费Kafka队列业务数据；（5分）
        env.enableCheckpointing(5000);
        env.setStateBackend(new FsStateBackend("file:///D:\\javaProject\\FlinkMonth2203A\\ck1"));
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        // ./bin/flink run -t yarn-per-job --detached -c com.bw.yk01.FlinkTM2 ./examples/streaming/TopSpeedWindowing.jar -s :/opt/modle/ck/21302714979264324


        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop102:9092");
        properties.setProperty("group.id", "group11");
        FlinkKafkaConsumer<String> flinkKafkaConsumer=new FlinkKafkaConsumer<>("topic-db-yk1", new SimpleStringSchema(), properties);
        flinkKafkaConsumer.setStartFromEarliest();
        DataStream<String> stream = env.addSource(flinkKafkaConsumer);
        //stream.print();
        //2）、采用侧边流方式，获取业务数据中不同维度数据（比如sku_info商品、user_info用户、province_info省份）等；（5分）
        //{"op":"c","data":{"area_code":"410000","name":"河南","region_id":"4","iso_3166_2":"CN-HA","id":23,"iso_code":"CN-41"},"db":"gmall","tb":"base_province"}
        OutputTag<String> tagSku = new OutputTag<String>("sku"){};
        OutputTag<String> tagUser = new OutputTag<String>("user"){};
        OutputTag<String> tagProvince = new OutputTag<String>("province"){};
        OutputTag<String> tagDetail = new OutputTag<String>("od"){};
        SingleOutputStreamOperator<String> processDS = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String tb = JSON.parseObject(s).getString("tb");
                if ("sku_info".equals(tb)) {
                    context.output(tagSku, s);
                } else if ("user_info".equals(tb)) {
                    context.output(tagUser, s);
                } else if ("base_province".equals(tb)) {
                    context.output(tagProvince, s);
                } else if ("order_detail".equals(tb)) {
                    context.output(tagDetail, s);
                } else {
                    collector.collect(s);
                }
            }
        });
        //processDS.print("order_info>>>>");
        //processDS.getSideOutput(tagSku).print("sku_info>>>>");
        //3）、启动Redis集群服务，编写Flink程序代码，将上述不同维度数据存储Redis内存数据库；（5分）
        //备注：Redis数据库存储时，其中Value类型为String，比如用户表的数据存储格式，其中key键为user:1，value值为{"birthday":"1973-07-10","gender":"M",","name":"苗平保","id":1,"email":"a8mht38@aol.com"}。
        //将事实表写入kafka主题
        FlinkKafkaProducer<String> myProducer1 = new FlinkKafkaProducer<>("order_info_yk1",new SimpleStringSchema(), properties);
        processDS.map(x-> JSON.parseObject(x).getString("data")).addSink(myProducer1);
        FlinkKafkaProducer<String> myProducer2 = new FlinkKafkaProducer<>("order_detail_yk1",new SimpleStringSchema(), properties);
        processDS.getSideOutput(tagDetail).map(x-> JSON.parseObject(x).getString("data")).addSink(myProducer2);
        //将维度表写入redis
        processDS.getSideOutput(tagSku).addSink(new DimRedisSink());
        processDS.getSideOutput(tagUser).addSink(new DimRedisSink());
        processDS.getSideOutput(tagProvince).addSink(new DimRedisSink());



        env.execute();
    }
}
