package streaming.api.sink;

import streaming.api.beans.SensorReading;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.util.Collector;
import utils.PropertiesReader;

/**
 * file -> file(本地)
 * 数据来源： file [sensor1.txt]
 * Sink目标： file []
 */
public class SinkTest3_file_1 {

    private static String formPath = PropertiesReader.get("default.file.from.path");
    private static String toPathDir = PropertiesReader.get("default.file.to.pathDir");

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStream<String> inputStream = env.readTextFile(formPath);
        DataStream<SensorReading> dataStream1 = inputStream.map(line -> {
            String[] fields = line.split(",");
            return new SensorReading(fields[0], new Long(fields[1]), new Double(fields[2]));
        });
        // 转二元组
        DataStream dataStream2 = inputStream.flatMap(new FlatMapFunction<String, Tuple2<String, Double>>(){
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Double>> out) throws Exception {
                String[] fields = s.split(",");
                out.collect(new Tuple2<String, Double>(fields[0], Double.valueOf(fields[2])));
            }
        });
        //输出文件
         dataStream2.addSink(
            StreamingFileSink.forRowFormat(
                new Path(toPathDir),
                new SimpleStringEncoder<SensorReading>()
            ).build()
        );

        env.execute();
    }
}
