package com.flink.ttest.flinkj;

import com.alibaba.fastjson.JSON;
import com.flink.ttest.cofig.MyOrderKeySelecter;
import com.flink.ttest.cofig.Order;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.Watermark;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Map;

/**
 * @author: LCG
 * @date: 2022-06-29 22:54:46
 * @description:  无界数据累加
 **/
public class WuJieFlinkStream {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.setParallelism(1);

        //新版本的定义Kafka数据源的方式
        KafkaSource<String> kafkaSource=KafkaSource.<String>builder()
                .setBootstrapServers("47.94.152.49:9092")
                .setGroupId("consumer1")
                .setTopics("TEST-TOPIC")
                //设置消费位置
                .setStartingOffsets(OffsetsInitializer.latest())
                .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
                .build();

        //添加kafka数据源
        DataStreamSource<String> stringDataStreamSource = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "My--Kafka Source");

        //统计计算订单总数
        SingleOutputStreamOperator<Tuple3<String, Long,Double>> tuple2SingleOutputStreamOperator = stringDataStreamSource.flatMap(new FlatMapFunction<String, Tuple3<String, Long,Double>>() {
            @Override
            public void flatMap(String s, Collector<Tuple3<String, Long,Double>> collector) throws Exception {
                Order order = JSON.parseObject(s, Order.class);
                collector.collect(Tuple3.of(order.userId, 1L,order.getOrderPrice()));
            }
        });

        //ReduceFunction 对窗口中的数据进行增量聚合
        SingleOutputStreamOperator<Tuple3<String, Long, Double>> sum = tuple2SingleOutputStreamOperator.keyBy(0).reduce(new ReduceFunction<Tuple3<String, Long, Double>>() {
            @Override
            public Tuple3<String, Long, Double> reduce(Tuple3<String, Long, Double> stringLongDoubleTuple3, Tuple3<String, Long, Double> t1) throws Exception {
                //计算累加的结果
                t1.f1=stringLongDoubleTuple3.f1+t1.f1;
                t1.f2=stringLongDoubleTuple3.f2+t1.f2;
                return t1;
            }
        });

        //将结果写入数据库
        sum.addSink(new SinkFunction<Tuple3<String, Long, Double>>() {
            @Override
            public void invoke(Tuple3<String, Long, Double> value, Context context) throws Exception {
                System.out.println("用户："+value.f0+" =订单数："+value.f1+" =总金额："+value.f2);
            }
        });


        env.execute("start job ");
    }
}
