package com.atguigu.day02.transform;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;

public class Flink08_Connect {
    public static void main(String[] args) {
         Configuration conf = new Configuration();
          //配置web页面固定端口
          conf.setInteger("rest.port", 20000);
          //创建流环境
          StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
          //设置并行度
          env.setParallelism(2);

        DataStreamSource<Integer> first = env.fromElements(1, 2, 4, 5, 8,9,10);
        DataStreamSource<String> second = env.fromElements("a","b","c","d");
        //connect可以连接两个数据类型不同的流
        //连接不是真正的合成一个流，内部是分别处理
        ConnectedStreams<Integer, String> cs = first.connect(second);

        cs.process(new CoProcessFunction<Integer, String, String>() {
            //把不同类型的值转为想要的同一种数据类型
            @Override
            public void processElement1(Integer value, Context context, Collector<String> out) throws Exception {
                out.collect(value.toString());
            }

            @Override
            public void processElement2(String value, Context context, Collector<String> out) throws Exception {
                out.collect(value);
            }
        }).print();

        try {
              env.execute();
          } catch (Exception e) {
              e.printStackTrace();
          }
    }
}
