package com.doit.demo.day11;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @DATE 2022/2/28/11:11
 * @Author MDK
 * @Version 2021.2.2
 *
 * FlinkSql实现wordCount
 *
 *
 **/
public class SQLWordCount {
    public static void main(String[] args) {
        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        //创建普通的数据流
        DataStreamSource<String> lines = env.socketTextStream("linux01", 8888);
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = lines.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] fields = value.split(",");
                String words = fields[0];
                int count = Integer.parseInt(fields[1]);
                return Tuple2.of(words, count);
            }
        });

        //将普通数据流关联schema,变成关系型的数据流
        tableEnvironment.createTemporaryView(
                "t_wordcount",
                wordAndOne,
                $("word"),
                $("counts"));
        //写Sql的API或者TableAPI
        TableResult tableResult = tableEnvironment.executeSql("SELECT word, sum(counts) as counts from t_wordcount group by word");

        tableResult.print();

    }
}
