package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by Smexy on 2022/12/21
 *
 */
public class Demo4_ReadKafka
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);



      /*
            创建目标表

       */
      String  createTableSQL = " CREATE TABLE t1( id string, ts bigint , vc int  ) " +
          "                       WITH (  " +
          "                         'connector' = 'kafka', " +
          "                          'properties.bootstrap.servers' = 'hadoop103:9092'    , " +
          "                           'properties.group.id' = 'test1' ,      " +
          "                           'scan.startup.mode' = 'group-offsets' ," +
          "                         'topic' = 'topicD',   " +
          "                         'format' = 'json'    " +
          "                            )      ";


      //建表(连接外部文件系统)
      tableEnv.executeSql(createTableSQL);

      tableEnv.sqlQuery("select * from t1 ")
              .execute()
              .print();
    }
}
