package processFunction;

import beans.SenSorReading;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class Process_SideOutput {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> inputStream = env.socketTextStream("hadoop", 9999);

        SingleOutputStreamOperator<SenSorReading> dataStream = inputStream.map(line -> {
            String[] fields = line.split(",");
            return new SenSorReading(fields[0], new Long(fields[1]), new Double(fields[2]));
        });
        //第一一个output表示低温流
        OutputTag<SenSorReading> lowTemp = new OutputTag<SenSorReading>("low-temp") {
        };

        //自定义测输出流实现分流操作
        SingleOutputStreamOperator<SenSorReading> highTemp = dataStream.process(new ProcessFunction<SenSorReading, SenSorReading>() {
            @Override
            public void processElement(SenSorReading senSorReading, Context context, Collector<SenSorReading> collector) throws Exception {
                if (senSorReading.getTemperature() > 30) {
                    collector.collect(senSorReading);
                } else {
                    context.output(lowTemp, senSorReading);
                }
            }
        });
        highTemp.print("high-temp");
        highTemp.getSideOutput(lowTemp).print("low-temp");
        env.execute();
    }
}
