package flink;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.rabbitmq.RMQSource;
import org.apache.flink.streaming.connectors.rabbitmq.common.RMQConnectionConfig;
import org.apache.flink.util.Collector;

import java.nio.charset.StandardCharsets;
import java.util.Arrays;
public class StreamWordCount {
    public static void main(String[] args) throws Exception {
        // 1. 创建流式执行环境
        StreamExecutionEnvironment env =
                StreamExecutionEnvironment.getExecutionEnvironment();
//        // 2. 读取文本流
//        DataStreamSource<String> lineDSS = env.socketTextStream("hadoop102",
//                7777);
        RMQConnectionConfig connectionConfig = new RMQConnectionConfig.Builder()
                .setHost("192.168.56.103")
                .setPort(5672)
                .setUserName("admin")
                .setPassword("123456")
                .setVirtualHost("/")
                .build();
        DataStreamSource<String> stringDataStreamSource = env.addSource(new RMQSource<>(
                // mq 连接配置
                connectionConfig,
                // 队列名
                "vehicle-location",
                true,
                // 反序列化方式
                new SimpleStringSchema(StandardCharsets.UTF_8)));

//        // 3. 转换数据格式
//        SingleOutputStreamOperator<Tuple2<String, Long>> wordAndOne = lineDSS
//                .flatMap((String line, Collector<String> words) -> {
//                    Arrays.stream(line.split(" ")).forEach(words::collect);
//                })
//                .returns(Types.STRING)
//                .map(word -> Tuple2.of(word, 1L))
//                .returns(Types.TUPLE(Types.STRING, Types.LONG));
//        // 4. 分组
//        KeyedStream<Tuple2<String, Long>, String> wordAndOneKS = wordAndOne
//                .keyBy(t -> t.f0);
//        // 5. 求和
//        SingleOutputStreamOperator<Tuple2<String, Long>> result = wordAndOneKS
//                .sum(1);
        // 6. 打印
        stringDataStreamSource.print();
        // 7. 执行
        env.execute();
    }
}