package com.flinksql.test;

import com.flinksql.bean.WaterSensor;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Elasticsearch;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Schema;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author: Lin
 * @create: 2021-06-16 10:21
 * @description: FlinkTableAPI使sink 到ES写法，包含1.10 connect写法及ddl写法
 **/
public class FlinkTableAPI_Test7 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);
        SingleOutputStreamOperator<WaterSensor> mapDS = source.map(new RichMapFunction<String, WaterSensor>() {
            @Override
            public WaterSensor map(String value) throws Exception {
                String[] split = value.split(",");
                return new WaterSensor(split[0]
                        , Long.parseLong(split[1])
                        , Integer.parseInt(split[2]));
            }
        });

        //1.创建表的执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //2.创建表：将流转换成动态表。
        Table table = tableEnv.fromDataStream(mapDS);
        //3.对动态表进行查询，注意先后顺序，where groupby aggregate select
        Table selectTbale = table.select($("id"), $("ts"),$("vc"));

        //4.创建表：创建输出表
        //connect写法，1.10写法
        tableEnv.connect(new Elasticsearch()
                .index("sensor")
                .documentType("_doc")
                .version("7")
                .host("localhost",9200,"http")
                .bulkFlushMaxActions(1))
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("ts", DataTypes.BIGINT())
                        .field("vc",DataTypes.INT()))
                .withFormat(new Json())
                .inAppendMode()
                .createTemporaryTable("sensor");

        //DDL写法
/*        String str = "CREATE TABLE sensor (" +
                "  id STRING," +
                "  ts BIGINT," +
                "  vc INT," +
                "  PRIMARY KEY (id) NOT ENFORCED" +
                ") WITH (" +
                "  'connector' = 'elasticsearch-7'," +
                "  'hosts' = 'http://localhost:9200'," +
                "  'index' = 'users'," +
                "  'sink.bulk-flush.max-actions' = '1')";
        tableEnv.executeSql(str);*/

/*        DataStream<Row> rowDataStream = tableEnv.toAppendStream(selectTbale, Row.class);
        rowDataStream.print();*/

        //5.将数据写入到输出表中
        selectTbale.executeInsert("sensor");


        //env.execute();
    }
}
