package cn._51doit.flink.day02.transformations;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.StreamMap;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;

/**
 * 不直接调用map方法，而是调用transform方法实现与map方法一样的功能！
 *
 * StreamMap类底层的具体实现
 *
 * Flink底层的算子有一个processElement方法，每输入一条数据就会调用一次该方法，输出数据用output对象输出
 *
 */
public class MapDemo5 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //spark
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        //SPARK
        SingleOutputStreamOperator<String> res = lines.transform("MyMap", TypeInformation.of(String.class), new MyStreamMap());

        res.print();

        env.execute();
    }

    private static class MyStreamMap extends AbstractStreamOperator<String> implements OneInputStreamOperator<String, String> {

        @Override
        public void processElement(StreamRecord<String> element) throws Exception {
            //System.out.println(element);
            String in = element.getValue();
            String out = in.toUpperCase();
            //怎样将数据输出呢？使用output对象输出数据
            //output.collect(new StreamRecord<>(out));
            output.collect(element.replace(out));
        }
    }

}
