package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by Smexy on 2022/12/21
 *
 */
public class Demo6_WriteUpsertKafka2
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());


        //输入表
        Table table = tableEnv.fromDataStream(ds);

        tableEnv.createTemporaryView("source",table);


        /*
            Exception in thread "main" org.apache.flink.table.api.ValidationException:

                    主键约束不支持 ENFORCED mode
                            ENFORCED mode： 强制模式。 当写入时，遵循主键的规则，一定出现主键冲突的，不能写入！
                          kafka，无法使用强制模式。

                Flink doesn't support ENFORCED mode for PRIMARY KEY constraint.

                ENFORCED/NOT ENFORCED  controls if the constraint checks are performed on the incoming/outgoing data.

                 Flink does not own the data therefore the only supported mode is the NOT ENFORCED mode

         */
      String  createTableSQL = " CREATE TABLE t1( id string , ts bigint , sumVC double ," +
          "                                        primary key (id,ts) NOT ENFORCED ) " +
          "                       WITH (  " +
          "                         'connector' = 'upsert-kafka', " +
          "                          'properties.bootstrap.servers' = 'hadoop103:9092'    , " +
          "                         'topic' = 'topicE',   " +
          "                         'key.format' = 'json'," +
          "                         'value.format' = 'json'    " +
          "                            )      ";


      //建表(连接外部文件系统)
      tableEnv.executeSql(createTableSQL);

      tableEnv.executeSql(" insert into t1 select  id ,ts ,sum(vc) a  from source group by id, ts ");

    }
}
