package com.bw.demo;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.bean.Disaply;
import com.bw.bean.TableProcess;
import com.bw.func.TableProcessHoKFunction;
import com.bw.utils.ClickHouseUtils;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;

/**
 * @ClassName Demo1
 * @Description TODO
 * @Author SXLWTT 单新龙    上传Git中
 * @Date 2022/4
 * @Version 1.0
 **/
public class Demo1 {

    public static void main(String[] args) throws Exception {

        //4）搭建Maven项目,创建flink流环境并设置并行度为1，并每隔10S周期性生成检查点，模式为精准一次，将状态信息保存到HDFS（路径自定义）。（5分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(10000);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //new ProducerRecord<>(sink_table, data.toString().getBytes())
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/"));
        String topic ="ods_base_db_m";
        String topic2 ="dwd_display_log";
        String topic3 ="ods_base_log";
        String groupId ="Sxl005";
        //	5） Flink实时读取kafka中的业务数据,并将结果打印到控制台。（5分）
        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        kafkaSource.setStartFromEarliest();
        DataStreamSource<String> stringDataStreamSource = env.addSource(kafkaSource);

        //转JSON格式
        SingleOutputStreamOperator<JSONObject> map = stringDataStreamSource.map(json -> JSON.parseObject(json));
        //map.print("打印到控制台");
        //	6） 将事实表的数据放到主流，写回到kafka中（自定义topic）（5分）
//        OutputTag<JSONObject> outputTag = new OutputTag<JSONObject>(TableProcess.SINK_TYPE_HBASE){};
//        SingleOutputStreamOperator<JSONObject> process = map.process(new TableProcessHoKFunction(outputTag));
//        DataStream<JSONObject> sideOutput = process.getSideOutput(outputTag);
//        sideOutput.print("hbase>>>");
//        //	7） 维度表中的数据放到侧流，写回到Hbase中。（5分）
//
//        FlinkKafkaProducer<JSONObject> kafkaSinkBySchema = MyKafkaUtil.getKafkaSinkBySchema(new KafkaSerializationSchema<JSONObject>() {
//            @Override
//            public ProducerRecord<byte[], byte[]> serialize(JSONObject jsonObject, @Nullable Long aLong) {
//                String sink_table = jsonObject.getString("sink_table");
//                JSONObject data = jsonObject.getJSONObject("data");
//
//
//                return new ProducerRecord<>(sink_table, data.toString().getBytes());
//            }
//        });
//        process.addSink(kafkaSinkBySchema);
        //	8） Flink 读取kafka中的日志数据并打印输出。（5分）
        //FlinkKafkaConsumer<String> kafkaSource2 = MyKafkaUtil.getKafkaSource(topic3, groupId);
       // kafkaSource.setStartFromEarliest();
       // DataStreamSource<String> stringDataStreamSource2 = env.addSource(kafkaSource2);

        //转JSON格式
        //SingleOutputStreamOperator<JSONObject> map2 = stringDataStreamSource2.map(json -> JSON.parseObject(json));
        //map2.print("打印到控制台");
        //9） 把Flink 日志输出级别设置成WARN级别。（5分）
        //10）把曝光日志写入到ClickHouse。（5分）
        FlinkKafkaConsumer<String> kafkaSource3 = MyKafkaUtil.getKafkaSource(topic2, groupId);
        kafkaSource.setStartFromEarliest();
        DataStreamSource<String> stringDataStreamSource3 = env.addSource(kafkaSource3);
        //转JSON格式
        SingleOutputStreamOperator<JSONObject> map3 = stringDataStreamSource3.map(json -> JSON.parseObject(json));
        map3.print("打印到控制台");
        //11）查看ClickHouse中的结果。（5分）
        SingleOutputStreamOperator<Disaply> map1 = map3.map(new MapFunction<JSONObject, Disaply>() {
            @Override
            public Disaply map(JSONObject jsonObject) throws Exception {

                return jsonObject.toJavaObject(Disaply.class);
            }
        });
        map1.addSink(ClickHouseUtils.getJdbcSink("insert into display(display_type," +
                "page_id," +
                "item," +
                "item_type," +
                "pos_id," +
                "order) values(?,?,?,?,?,?);"));

        env.execute();
    }
}
