package cn._51doit.flink.day11;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 使用Flink SQL实现实时的WordCount
 */
public class SqlWordCount4 {

    public static void main(String[] args) throws Exception {

        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //StreamTableEnvironment将原来的StreamExecutionEnvironment包装起来（增强）
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        //先创建普通的数据流
        //spark,1
        //spark,5
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<WCBean> wordAndOne = lines.map(new MapFunction<String, WCBean>() {
            @Override
            public WCBean map(String line) throws Exception {
                String[] fields = line.split(",");
                return new WCBean(fields[0], Integer.parseInt(fields[1]));
            }
        });

        tableEnvironment.createTemporaryView("tb_wc", wordAndOne);

        TableResult tableResult = tableEnvironment.executeSql("select * from tb_wc");

        tableResult.print();

        env.execute();


    }

    public static class WCBean {

        public String word;

        public Integer counts;

        public WCBean(){}

        public WCBean(String word, Integer counts) {
            this.word = word;
            this.counts = counts;
        }
    }
}
