package com.alison.datastream.chapter2_transform;

import com.alison.tableapisql.chapter1_tableapiandsql.model.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Collections;

/**
 * @ClassName: TransformTest4_MultipleStreams
 * @Description:
 * @Author: wushengran on 2020/11/7 16:14
 * @Version: 1.0
 */
public class T5_TransformTest4_MultipleStreams {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        String inputPath = "D:/workspace/lab/learnbigdata/learnflink/flink-datastream/src/main/resources/datastream/sensor.txt";
        String inputPath2 = "D:/workspace/lab/learnbigdata/learnflink/flink-datastream/src/main/resources/dataset/words.txt";

        // 从文件读取数据
        DataStream<String> input1 = env.readTextFile(inputPath);
        DataStream<String> input2 = env.readTextFile(inputPath2);

        // 2. 合流 connect，将高温流转换成二元组类型，与低温流连接合并之后，输出状态信息
//        ConnectedStreams<String, String> connectedStreams = input1.connect(input2);
//        connectedStreams.map(new CoMapFunction<String, String, String>() {
//            @Override
//            public String map1(String s) throws Exception {
//                return "input1," + s;
//            }
//
//            @Override
//            public String map2(String s) throws Exception {
//                return "input2," + s;
//            }
//        }).print();

        /*
        是 row 行合并，多添加几条记录，不是 col 列合并
        Connect 的数据类型可以不同，Connect 只能合并两个流；
        Union可以合并多条流，Union的数据结构必须是一样的；
         */
        // union
        DataStream<String> union = input1.union(input2).union(input1);
        union.print();

        env.execute();
    }
}