package com.hujf.flink.DataStreamAPI;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author hujf
 * @Date 2022/3/17 16:02
 * @Description TODO
 */
public class Map {
    public static void main(String[] args) throws Exception {
//        StreamExecutionEnvironment environment =StreamExecutionEnvironment.getExecutionEnvironment();
//        DataStreamSource<Integer> integerDataStreamSource = environment.fromElements(1, 2, 3, 4);
//        SingleOutputStreamOperator<Integer> map = integerDataStreamSource.map((MapFunction<Integer, Integer>) integer2 -> integer2 * 2);
//
//        map.print();
//        environment.execute("执行job");
        FaltMap();
      //  Fileter();
    }

    //一个输入 多个输出
    public static void FaltMap() throws Exception {
        StreamExecutionEnvironment environment =StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> integerDataStreamSource = environment.fromElements("hello how are you my name is hujf and how about you");


        //map 的过程
        DataStream<Tuple2<String,Integer>> map = integerDataStreamSource.flatMap((FlatMapFunction<String, Tuple2<String, Integer>>) (s, out) -> {
            for(String word: s.split(" ")){
                out.collect(new Tuple2<>(word,1));
            }
        }).returns(TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class));

        //reduce 过程
        DataStream<Tuple2<String, Integer>> reduce = map.keyBy(o -> o.f0).reduce(new ReduceFunction<Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                return new Tuple2<String, Integer>(value1.f0, value1.f1 + value2.f1);
            }
        });

        reduce.print().setParallelism(1);
        environment.execute("执行job");
    }

    public static void Fileter() throws Exception {
        StreamExecutionEnvironment environment =StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<Integer> dataStream = environment.fromElements(1,2,30,0,9,0);

        //过滤掉不为0 的
        SingleOutputStreamOperator<Integer> filter = dataStream.filter((FilterFunction<Integer>) num -> num != 0);

        filter.print();
        environment.execute("执行job");

    }
}
