package com.flink.ttest.flinkj;

import com.flink.ttest.cofig.*;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousEventTimeTrigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.flink.streaming.connectors.kafka.shuffle.FlinkKafkaShuffleConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;

/**
 * @author: LCG
 * @date: 2022-06-27 17:47:12
 * @description:
 **/
public class TestFlinkStream {
    public static void main(String[] args) throws Exception {
        Configuration configuration=new Configuration();
        //获取流处理的执行环境
        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.setParallelism(1);
        //env.setMaxParallelism(1);

        //配置kafka的配置信息
        Properties props = new Properties();
        props.put("bootstrap.servers", "47.94.152.49:9092");
        props.put("group.id", "consumer1");

        //定义kafka的消费者实例 旧版本的定义方式
        //FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("", new SimpleStringSchema(), props);
        //2、配置数据源读取数据
        //DataStreamSource<String> dataStreamSource = env.addSource(kafkaConsumer);

        //新版本的定义Kafka数据源的方式
        KafkaSource<String> kafkaSource=KafkaSource.<String>builder()
                .setBootstrapServers("47.94.152.49:9092")
                .setGroupId("consumer1")
                .setTopics("TEST-TOPIC")
                //设置消费位置
                .setStartingOffsets(OffsetsInitializer.latest())
                .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
                .build();

        DataStreamSource<String> stringDataStreamSource = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "My--Kafka Source");
        //
        stringDataStreamSource.setParallelism(1);
        //TODO 3）处理数据
      /* dataStreamSource
                .map(new MyMapFunction())
                .keyBy(new MyOrderKeySelecter())
                //按数量窗口滚动，每3个输入数据流，计算一次
                .countWindow(2)
                .sum("orderPrice").print();*/

        stringDataStreamSource
                .map(new MyMapFunction())
                .keyBy(new MyOrderKeySelecter())
                //按数量窗口滚动，每3个输入数据流，计算一次
                .window(TumblingEventTimeWindows.of(Time.seconds(100)))
                .trigger(ContinuousEventTimeTrigger.of(Time.seconds(5)))
                .aggregate(new OrderPriceCount(),new OrderPriceCountWindow()).print();

        //数据输出保存.

        //启动作业执行操作
        env.execute("Order Price by mintus");

    }
}
