package com.fwmagic.flink.tablesql.batch;

import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.BatchTableEnvironment;

public class BatchTableWordCount {
    public static void main(String[] args) throws Exception{
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        BatchTableEnvironment batchTableEnv = BatchTableEnvironment.create(env);

        DataSet<WordCount> dataSet = env.fromElements(
                new WordCount("Spark", 1L),
                new WordCount("Spark", 1L),
                new WordCount("Flink", 1L),
                new WordCount("Flink", 1L),
                new WordCount("Flink", 1L),
                new WordCount("Flink", 1L),
                new WordCount("Java", 1L),
                new WordCount("Spark", 1L)
        );

        Table table = batchTableEnv.fromDataSet(dataSet);

        Table filtered = table.groupBy("word")//分组
                .select("word, counts.sum as counts")//sum
                .filter("counts>=2") //过滤
                .orderBy("counts.desc");

        DataSet<WordCount> wordCountDataSet = batchTableEnv.toDataSet(filtered, WordCount.class);

        wordCountDataSet.print();


    }



    public static class WordCount {
        public String word;

        public Long counts;

        public WordCount() {
        }

        public WordCount(String word, Long counts) {
            this.word = word;
            this.counts = counts;
        }

        @Override
        public String toString() {
            return "WordCount{" +
                    "word='" + word + '\'' +
                    ", counts=" + counts +
                    '}';
        }
    }
}
