package operator;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.operators.StreamMap;

/**
 * Flink的Map算子
 * Task 相当于Spark中的stage 一个Task根据并且度可以分为多个subTask
 * 如果并行度一致 同一个job下的多个算子会形成一个算子链 在一个Task中
 */
public class MapDemo1 {

    public static void main(String[] args) throws Exception {
        // Flink配置
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 80);
        // 创建带Webui的Flink本地执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        // 使用socket创建DataStream
        DataStreamSource<String> data = env.socketTextStream("192.168.56.10", 8888);

        // 使用map算子
        // SingleOutputStreamOperator<Integer> maped = data.map(s -> Integer.parseInt(s) * 10);

        MapFunction<String, Integer> mapFunction = new MapFunction<String, Integer>() {
            @Override
            public Integer map(String o) throws Exception {
                return Integer.parseInt(o) * 10;
            }
        };
        // map算子底层使用的是transform方法
        SingleOutputStreamOperator<Integer> maped = data.transform("MyMap", TypeInformation.of(Integer.class), new StreamMap<>(mapFunction));

        // sink
        maped.print();

        env.execute("map operator");
    }
}
