package com.atguigu.app;

import com.atguigu.bean.Bean3;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

public class WMTransTest2 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        SingleOutputStreamOperator<Bean3> bean3DS = env.socketTextStream("hadoop102", 8888)
                .map(line -> {

                    String[] fields = line.split(",");
                    return new Bean3(fields[0],
                            Double.parseDouble(fields[1]),
                            Long.parseLong(fields[2]));
                })
                .assignTimestampsAndWatermarks(WatermarkStrategy.<Bean3>forMonotonousTimestamps()
                        .withTimestampAssigner(new SerializableTimestampAssigner<Bean3>() {
                            @Override
                            public long extractTimestamp(Bean3 element, long recordTimestamp) {
                                return element.getTs() * 1000L;
                            }
                        }));

        KeyedStream<Bean3, String> keyedStream = bean3DS.keyBy(Bean3::getId);

        SingleOutputStreamOperator<Bean3> result = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .sum("vc");

        result.print();

        env.execute();

    }

}
