package com.lizhiyu.flink.demo5_window;

import com.alibaba.fastjson.JSON;
import com.lizhiyu.flink.model.VideoOrder;
import org.apache.commons.collections.IteratorUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;


/**
 * 全窗口函数 apply(new WindowFunction)  当窗口内所有的数据都收集齐了才会触发计算
 * 而 aggregate 是来一条数据就进行汇总一次
 *
 * WindowFunction 后期可能会弃用
 * 后期可能要使用使用 process(new ProcessWindowFunction)  //可以获得窗口上下文等更多信息
 */
public class WindowDemo3Apply {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> ds = env.addSource(new CustomSource3());
        ds.setParallelism(1);
        ds.print("------------source:");
        //对数据格式进行转换
        SingleOutputStreamOperator<Tuple3<String, Integer,String>> mapDs = ds.map(new MapFunction<String, Tuple3<String, Integer,String>>() {
            @Override
            public Tuple3<String, Integer,String> map(String value) throws Exception {
                String[] split = value.split(":");
                Tuple3 Tuple3 = new Tuple3(split[0],Integer.parseInt(split[1]),split[2]);
                return Tuple3;
            }
        });

        //对数据进行分组聚合
        KeyedStream<Tuple3<String, Integer,String>, String> keyByDs = mapDs.keyBy(new KeySelector<Tuple3<String, Integer,String>, String>() {
            @Override
            public String getKey(Tuple3<String, Integer,String> value) throws Exception {
                return value.f0;
            }
        });


        SingleOutputStreamOperator<VideoOrder> apply = keyByDs.window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
                //<IN, OUT, KEY, W extends Window>
                .apply(new WindowFunction<Tuple3<String, Integer, String>, VideoOrder, String, TimeWindow>() {
                    @Override
                    public void apply(String key, TimeWindow window, Iterable<Tuple3<String, Integer, String>> input, Collector<VideoOrder> out) throws Exception {
                        List<Tuple3<String, Integer, String>> list = IteratorUtils.toList(input.iterator());
                        //apply 是等窗口结束后一口气计算所有的数据 然后返回，这个方法比aggregate好一些，相对不用每次来一条数据就计算一次
                        System.out.println(JSON.toJSONString(list).toString() +"执行了一次");
                        VideoOrder videoOrder = new VideoOrder();

                        videoOrder.setMoney(list.stream().collect(Collectors.summingInt(obj -> obj.f1)));
                        videoOrder.setTitle(list.get(0).f0);
                        videoOrder.setCreateTimeStr(list.get(0).f2);
                        out.collect(videoOrder);
                    }
                });
        apply.print("sink:");
        env.execute();
    }
}
