










package com.ry.flink.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ry.flink.function.IndicatorProcessMainByTransportTypeApplySumFun;
import com.ry.flink.utils.Constants;
import com.ry.flink.utils.FlinkKafkaUtils;
import com.ry.flink.watermark.IndicatorProcessMainByTransportTypeWaterMark;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.streaming.connectors.kafka.internals.KeyedSerializationSchemaWrapper;
import org.apache.flink.util.OutputTag;
import java.text.SimpleDateFormat;
import java.util.Properties;

/**
 需求：统计不同区域不同类型运输方式每隔5分钟发出的运单量
 - watermark保证数据的有序，允许数据延迟30秒
 - 迟到的数据写到kafka做数据补全
 * */
public class IndicatorProcessMainByTransportType {

    static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //根据Topic partition的数量进行设置
        env.setParallelism(1);
        //开启checkpoint机制，备份数据
        env.enableCheckpointing(20000);
        //设置仅一次语义
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //二次Checkpoint之间的间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000);
        //保留几个checkpoint的结果，保存1个就行了
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        //设置checkpint的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        //程序失败或者完成是否保存checkpoint数据
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //设置state的保存策略
//        env.setStateBackend(new FsStateBackend());
        //设置重启策略
//        env.setRestartStrategy();
        //设置EventTime为处理时间
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        /**
         * 读取数据源
         * */
        String topic = "realtime_indicators2";
        Properties properties = FlinkKafkaUtils.getProducerProperties(Constants.BROKERS);
        FlinkKafkaConsumer011 flinkKafkaConsumer011 = new FlinkKafkaConsumer011(topic,new SimpleStringSchema(),properties);
        DataStreamSource dataSource = env.addSource(flinkKafkaConsumer011);

        /**
         * tuple(125123123,上海,空运)
         * */
        SingleOutputStreamOperator<Tuple3<Long, String, String>> mapSource = dataSource.map(new MapFunction<String, Tuple3<Long, String, String>>() {
            @Override
            public Tuple3<Long, String, String> map(String line) {
                JSONObject jsonObject = JSON.parseObject(line);
                //获取区域和运输类型
                String area = jsonObject.getString("area");
                String type = jsonObject.getString("type");
                //获取时间,并且转换为时间戳
                String datetime = jsonObject.getString("datetime");
                long timestamp = 0;
                try {
                    timestamp = format.parse(datetime).getTime();
                } catch (Exception e) {
                    e.printStackTrace();
                }
                return Tuple3.of(timestamp, area, type);
            }
        });
        SingleOutputStreamOperator<Tuple3<Long, String, String>> filterSource = mapSource.filter(x -> x.f0 != 0);

        /**
         * 旁路输出：迟到的数据输出到Kafka
         * */
        OutputTag<Tuple3<Long, String, String>> oupputTag = new OutputTag<Tuple3<Long, String, String>>("lateData") {};

        SingleOutputStreamOperator<Tuple4<String, String, String, Long>> resultDataSource = filterSource.assignTimestampsAndWatermarks(new IndicatorProcessMainByTransportTypeWaterMark())
                .keyBy(1, 2)
                .window(TumblingProcessingTimeWindows.of(Time.seconds(3)))
                .sideOutputLateData(oupputTag)
                .apply(new IndicatorProcessMainByTransportTypeApplySumFun());

        /**
         * 验证结果集
         * */
        resultDataSource.print();


        /**
         * 迟到的数据写入Kafka
         * */
        String late_data_topic = "realtime_indicators2_late_data";
        FlinkKafkaProducer011<String> kafkaProducer011 = new FlinkKafkaProducer011(late_data_topic,new KeyedSerializationSchemaWrapper<String>(new SimpleStringSchema()),properties);
        resultDataSource.getSideOutput(oupputTag).map(t -> t.toString()).addSink(kafkaProducer011);
        env.execute("IndicatorProcessMainByTransportType");
    }
}


