package yuekao1.dim;

import com.alibaba.fastjson.JSON;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao1.util.KafkaUtil;
import yuekao1.util.RedisUtil;

public class TagTable {
    public static void main(String[] args) throws Exception {
        //创建Flink流式程序，启用检查点机制（5秒一次Checkpoint），设置状态后端为FsStateBackend，实时消费Kafka队列业务数据；（5分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2）、采用侧边流方式，获取业务数据中不同维度数据（比如 sku_info 商品、user_info 用户、province_info 省份）等；（5分）
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("topic_db"));
        OutputTag<String> sku_info = new OutputTag<String>("sku_info") {
        };
        OutputTag<String> user_info = new OutputTag<String>("sku_info") {
        };
        OutputTag<String> base_province = new OutputTag<String>("base_province") {
        };
        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String tb = JSON.parseObject(s).getString("tb");
                if ("sku_info".equals(tb)) {
                    context.output(sku_info, s);
                }
                if ("user_info".equals(tb)) {
                    context.output(user_info, s);
                }
                if ("base_province".equals(tb)) {
                    context.output(base_province, s);
                }
            }
        });
//        process.getSideOutput(sku_info).print("sku_info");
//        process.getSideOutput(user_info).print("user_info");
//        process.getSideOutput(province_info).print("province_info");
        //3）、启动Redis集群服务，编写Flink程序代码，将上述不同维度数据存储Redis内存数据库；（5分）
        process.getSideOutput(sku_info).addSink(new RedisUtil());
        process.getSideOutput(user_info).addSink(new RedisUtil());
        process.getSideOutput(base_province).addSink(new RedisUtil());
        //备注：Redis数据库存储时，其中Value类型为String，比如用户表的数据存储格式，其中key键为user:1，value值为{"birthday":"1973-07-10","gender":"M",","name":"苗平保","id":1,"email":"a8mht38@aol.com"}。


    env.execute();
    }
}
