package mn10;


import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.utils.JedisUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import com.bw.gmall.realtime.utils.MysqlUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.CheckpointStorage;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class DemoTest02 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
////        1）、创建Flink流式程序，启用检查点机制（5秒一次Checkpoint），设置状态后端为FsStateBackend，实时消费Kafka队列业务数据；（5分）
//        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
////        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8082/211126/ck"));
//        env.setStateBackend(new HashMapStateBackend());
//        //储配置存储检查点到文件系统，可以直接传入一个 String，指定文件系统的路径
//       env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8082/211126/ck");
//        System.setProperty("HADOOP_USER_NAME","ROOT");

        //开启Checkpoint
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);
        //超时  checkpoint 最多只能10分钟完成
        env.setStateBackend(new FsStateBackend("file:///path/to/checkpoint/directory"));

        //如果在本地运行需要紧加上这句话
//        System.setProperty("HADOOP_USER_NAME","ROOT");





//        2）、采用侧边流方式，获取业务数据中不同维度数据（比如sku_info商品、user_info用户、province_info省份）等；（5分）
        DataStreamSource<String> dataStreamSource =
        env.addSource(MyKafkaUtil.getFlinkKafkaConsumer("topic_db", "DemoTest02xx"));
        OutputTag<JSONObject> sku_info = new OutputTag<JSONObject>("sku_info") {
        };
        OutputTag<JSONObject> user_info = new OutputTag<JSONObject>("user_info") {
        };
        OutputTag<JSONObject> province_info = new OutputTag<JSONObject>("province_info") {
        };
//        3）、启动Redis集群服务，编写Flink程序代码，将上述不同维度数据存储Redis内存数据库；（5分）
//        备注：Redis数据库存储时，其中Value类型为String，比如用户表的数据存储格式，其中key键为user:1，
//        value值为{"birthday":"1973-07-10","gender":"M",","name":"苗平保","id":1,"email":"a8mht38@aol.com"}。
        SingleOutputStreamOperator<JSONObject> process = dataStreamSource.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);

                String table = jsonObject.getString("table");
                if (table.equals("sku_info")) {
                    //存入到reds中
                    JedisUtil.insertRedis(table,jsonObject.getJSONObject("data").getString("id"),jsonObject);
                    ctx.output(sku_info, jsonObject);
                } else if (table.equals("user_info")) {
                    JedisUtil.insertRedis(table,jsonObject.getJSONObject("data").getString("id"),jsonObject);
                    ctx.output(user_info, jsonObject);
                } else if (table.equals("province_info")) {
                    JedisUtil.insertRedis(table,jsonObject.getJSONObject("data").getString("id"),jsonObject);
                    ctx.output(province_info, jsonObject);
                }else {
                    out.collect(jsonObject);
                }


            }
        });



        process.print("---------->");
        env.execute();
    }
}
