package cn._51doit.live.jobs;

import cn._51doit.live.deserializer.MyKafkaDeserializationSchema;
import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.JsonToBeanFunctionV2;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;

/**
 * 使用Flink对Kafka中的数据进行预处理
 *
 * 预处理都有哪些操作呢？
 * 数据清洗过滤、维度关联(广播状态、异步IO)、数据补全、类型转换（csv->json或avro待）、数据脱敏（将数据按照指定的加密方式进行加密：md5）、主题拆分
 */
public class PreETL {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);
        DataStream<Tuple2<String, String>> tpStream = FlinkUtils.createKafkaStreamWithId(parameterTool, MyKafkaDeserializationSchema.class);
        //类型转换与过滤
        SingleOutputStreamOperator<DataBean> beanStream = tpStream.process(new JsonToBeanFunctionV2());

        //将数据才分主题（使用侧流输出）
        OutputTag<DataBean> launchTag = new OutputTag<DataBean>("launch-tag") {};
        OutputTag<DataBean> productTag = new OutputTag<DataBean>("product-tag") {};
        //...会定义许多tag

        SingleOutputStreamOperator<DataBean> mainStream = beanStream.process(new ProcessFunction<DataBean, DataBean>() {
            @Override
            public void processElement(DataBean bean, Context ctx, Collector<DataBean> out) throws Exception {
                String eventId = bean.getEventId();
                if (Constants.APP_LAUNCH.equals(eventId)) {
                    ctx.output(launchTag, bean);
                } else if (eventId.startsWith("product")) {
                    ctx.output(launchTag, bean);
                } //...
                out.collect(bean); //未打标签的数据
            }
        });

        DataStream<DataBean> launchStream = mainStream.getSideOutput(launchTag);

        DataStream<DataBean> productStream = mainStream.getSideOutput(productTag);

        //将数据写回到Kafka中
        String bootstrapServers = parameterTool.getRequired("bootstrap.servers");
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", bootstrapServers);
        FlinkKafkaProducer<String> kafkaProducer1 = new FlinkKafkaProducer<>("topic-launch", new SimpleStringSchema(), properties);
        launchStream.map(JSON::toJSONString).addSink(kafkaProducer1);


        FlinkKafkaProducer<String> kafkaProducer2 = new FlinkKafkaProducer<>("topic-product", new SimpleStringSchema(), properties);
        productStream.map(JSON::toJSONString).addSink(kafkaProducer2);
        //...


        //将主流（未打标签的数据，写入到ClickHouse中）
        //mainStream.addSink()

        FlinkUtils.env.execute();

    }
}
