package com.lagou.bak;

import org.apache.commons.lang3.SystemUtils;
//import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;

/**
 * @author lzj
 * @date 2021/6/15 9:45
 */
public class WordCount {

    public static void main(String[] args) throws Exception {
        /*Stream<String> stream = Arrays.stream("hello you".split(" "));
        stream.forEach(x-> System.out.println(x));
//        System.out.println(stream);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> data = env.socketTextStream("hadoop2", 7777);
        SingleOutputStreamOperator<String> flated = data.flatMap((String in, Collector<String> out) -> Arrays.stream(in.split(",")).forEach(out::collect)).returns(Types.STRING);
        flated.print();
        env.execute();*/


        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        String output = "file:///D:/ckp";
        //String output;
        if (parameterTool.has("output")) {
            output = parameterTool.get("output");
            System.out.println("指定了输出路径使用:" + output);
        } else {
//            指定 hdfs NameNode 地址
            output = "hdfs://linux121:9000/wordcount";
            System.out.println("可以指定输出路径使用 --output, 没有指定使用默认的:" + output);
        }

        if (SystemUtils.IS_OS_WINDOWS) {
            output = "file:///D:/ckp";
        }

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
//        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
//        根据 source 自动选择使用流处理还是批处理，默认使用 STREAMING 模式处理
//        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);

        env.enableCheckpointing(2000);
        env.setStateBackend(new FsStateBackend(output));
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(10);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                3,
                Time.of(5, TimeUnit.SECONDS) // 重启时间间隔
        ));

        DataStreamSource<String> source = env.socketTextStream("hadoop2", 9999);

//        需显式指定函数的返回类型，flink在调用函数时会擦除泛型导致找不到类型。
//        未显式指定返回类型会报以下错误：
//        Caused by: org.apache.flink.api.common.functions.InvalidTypesException: The generic type parameters of 'Collector' are missing. In many cases lambda methods don't provide enough information...
        SingleOutputStreamOperator<String> mappedStream = source.flatMap(
                (String in, Collector<String> out) -> Arrays.stream(in.split(" ")).forEach(out::collect)
        ).returns(Types.STRING);

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndCnt = mappedStream.map(in -> Tuple2.of(in, 1)).returns(Types.TUPLE(Types.STRING, Types.INT));

        KeyedStream<Tuple2<String, Integer>, String> keyedStream = wordAndCnt.keyBy(tuple2 -> tuple2.f0);

        SingleOutputStreamOperator<Tuple2<String, Integer>> result = keyedStream.sum(1);

        result.print();

//        批流作业都使用 execute() 触发执行
        env.execute();

    }

}
