package com.lkh.real;

import com.lkh.map.WordCountFlatMapFuncation;
import com.lkh.sink.WordCountRedisSink;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

public class ComputeWordCount {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        //连接kafka读取数据
        tableEnv.connect(
                new Kafka()
                        .version("universal")
                        .topic("comment1")
                        .startFromEarliest()
                        .property("zookeeper.connect", "master:2181,node1:2181,node2:2181")
                        .property("bootstrap.servers", "node2:9092")
                        //组id每次都需要更改,更改后会从头消费
                        .property("group.id", "2"))
                .withFormat(
                        new Csv().fieldDelimiter('|'))
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("sentId", DataTypes.STRING())
                        .field("date", DataTypes.STRING())
                        .field("likeCount", DataTypes.STRING())
                        .field("userId", DataTypes.STRING())
                        .field("text", DataTypes.STRING()))
                .inAppendMode()
                .createTemporaryTable("t_comment");

        Table table = tableEnv.from("t_comment").select("sentId,text");//选择表

        DataStream<Row> commentDS = tableEnv.toAppendStream(table, Row.class);

        //对数据进行分词
        SingleOutputStreamOperator<Tuple3<String, String, Integer>> wordsDS =
                commentDS.flatMap(new WordCountFlatMapFuncation());


        //统计单词的数量
        SingleOutputStreamOperator<Tuple3<String, String, Integer>> resultDS = wordsDS
                .keyBy(0, 1)
                .sum(2);


        //数据保存到redis中
        resultDS.addSink(new WordCountRedisSink());

        env.execute();

    }
}
