package cn._51doit.flink.day02.transformations;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 使用lambda表达数据完成map操作
 *
 * 如果使用Java的Lambda表达式，返回的DataStream里面的数据类型还有泛型（即泛型嵌套了），程序再编译是，泛型类型丢失
 * 必须调用returns方法指定返回的类型。
 *
 * 比如：DataStream<Boy> 不用加returns
 * 比如：DataStream<MyList<Boy>> 要加returns
 *
 */
public class MapDemo3 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //spark,1
        //flink,3
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndCount = lines.map(line -> {
            String[] fields = line.split(",");
            String word = fields[0];
            int count = Integer.parseInt(fields[1]);
            return Tuple2.of(word, count);
        }).returns(TypeInformation.of(new TypeHint<Tuple2<String, Integer>>(){}));
        //.returns(Types.TUPLE(Types.STRING, Types.INT)); //如果使用lambda表达式，返回的DataStream有泛型嵌套，必须使用returns指定返回的数据类型

        wordAndCount.print();

        env.execute();
    }

}
