package com.xxxx;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.Arrays;

/**
 * @program: flink19Test
 * @description: 编写wc的多种写法
 * @author: CoreDao
 * @create: 2021-03-31 19:35
 **/

public class StreamingJobWc {
    public static void main(String[] args) throws Exception {
        //第一步：环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //第二步：数据源 source
        DataStreamSource<String> socket = env.socketTextStream("localhost", 9999);
        //第三步：过程
        /**
         * 第一种：匿名函数类
         */
        //anonymousInnerClass(socket);

        /**
         * 第二种：lambda表达式
         * 注意：returns
         */
        //lambdaTest(socket);

        /**
         * 第三种：富函数
         */
        richFunction(socket);


        //第四步：执行计算
        env.execute();
    }

    /**
     * 富函数
     * @param socket
     */
    private static void richFunction(DataStreamSource<String> socket) {
        socket.flatMap(new RichFlatMapFunction<String, Tuple2<String,Integer>>() {

            private String jobName = null;
            @Override
            public void open(Configuration parameters) throws Exception {
//                super.open(parameters);
                jobName = "myfirst flink jon";
            }

            @Override
            public void close() throws Exception {
//                super.close();
                jobName = null;
            }

            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                //运行环境，类似于spark中的sparkContext,可以定义累加器广播变量等内容
                System.out.println(getRuntimeContext().getIndexOfThisSubtask());
                String[] s = value.split(" ");
                for (String s1 : s) {
                    out.collect(Tuple2.of(s1,1));
                }
            }
        }).keyBy(0).sum(1).print();

    }

    /**
     * lambda表达式
     * @param socket
     */
    private static void lambdaTest(DataStreamSource<String> socket) {
        SingleOutputStreamOperator<String> stringSingleOutputStreamOperator = socket.flatMap((String value, Collector<String> out) -> {
            Arrays.stream(value.split(" ")).forEach(word -> out.collect(word));
        });
        stringSingleOutputStreamOperator.returns(Types.STRING);
        stringSingleOutputStreamOperator
                .map(word -> Tuple2.of(word,1)).returns(Types.TUPLE(Types.STRING,Types.INT))
                .keyBy("f0")
                .sum(1)
                .print();
    }

    /**
     * 匿名函数类
     * @param socket
     */
    private static void anonymousInnerClass(DataStreamSource<String> socket) {
        //第三步，datastream处理方式operator
        SingleOutputStreamOperator<String> stringSingleOutputStreamOperator = socket.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                String[] s = value.split(" ");
                for (String s1 : s) {
                    out.collect(s1);
                }
            }
        });
        SingleOutputStreamOperator<Tuple2<String, Integer>> map = stringSingleOutputStreamOperator.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                return Tuple2.of(value, 1);
            }
        });
        KeyedStream<Tuple2<String, Integer>, Tuple> tuple2TupleKeyedStream = map.keyBy(0);
        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = tuple2TupleKeyedStream.sum(1);
        sum.print();
    }
}
