package com.luoy.hadoop;

import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author weiwei
 * @Title: HudiTest
 * @ProjectName flink_hudi_metrics
 * @Description: TODO
 * @date 2022/7/21 16:27
 */
public class HudiTest {

    public static void main(String[] args) throws Exception {
        // blink table sql 方案
        StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        bsEnv.setParallelism(1);

        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings);
        // 设置检查点 instant 与 checkpoint的时间有关
        bsEnv.enableCheckpointing(20000);

        bsEnv.setStateBackend(new FsStateBackend("hdfs://taia-1.novalocal:8020/user/weiwei/hudi_on_flink_cp"));

        /**
         *  /usr/sdp/5.0.0.RELEASE/kafka/bin/kafka-console-producer.sh --broker-list  taia-5.novalocal:9092 --topic flink_ww_01
         *  /usr/sdp/5.0.0.RELEASE/kafka/bin/kafka-console-consumer.sh --topic flink_ww_02  --bootstrap-server 172.22.5.15:9092  --from-beginning
         * {"plate_color_name": "蓝", "plate_no": "京A57561", "zone": "气象局大院", "location": "116.476915,39.809484", "createTime": "2021-09-21 15:38:30", "endTime": "2021-09-21 15:56:30"}
         **/

        String ddc = "CREATE TABLE KafkaTable (\n" +
                "  `plate_color_name` STRING,\n" +
                "  `plate_no` STRING,\n" +
                "  `zone` STRING,\n" +
                "  `location` STRING,\n" +
                "  `createTime` STRING,\n" +
                "  `endTime` STRING \n" +
                "  ) WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'flink_ww_01',\n" +
                "  'properties.bootstrap.servers' = 'taia-5.novalocal:9092',\n" +
                "  'properties.group.id' = 'testGroup2',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json',\n" +
                "  'json.ignore-parse-errors' = 'true',\n" +
                "  'json.fail-on-missing-field' = 'false'\n" +
                ")";
        bsTableEnv.executeSql(ddc);

        String desSql = "CREATE TABLE t1(\n" +
                "  `plate_color_name` VARCHAR(10),\n" +
                "  `plate_no` VARCHAR(10),\n" +
                "  `zone` VARCHAR(10),\n" +
                "  `location` VARCHAR(10),\n" +
                "  `createTime` VARCHAR(10),\n" +
                "  `endTime` VARCHAR(10)\n" +
                ")\n" +
                "WITH (\n" +
                "  'connector' = 'hudi',\n" +
                "  'path' = 'hdfs://taia-1.novalocal:8020/user/weiwei/wwcs062',\n" +
                "  'hoodie.datasource.write.recordkey.field'='plate_no',\n" +
                "  'write.tasks' = '3',\n" +
                "  'compaction.tasks' = '1',\n" +
                "  'write.precombine.field'='createTime',\n" +
                "  'hive_sync.enable'= 'false',\n " +
                "  'hive_sync.mode'= 'hms', \n" +
//                "  'hive_sync.mode'= 'jdbc', \n" +
                "  'hive_sync.metastore.uris' = 'thrift://172.22.5.13:9083',\n" +
//                "  'hive_sync.jdbc_url'= 'jdbc:hive2://172.22.5.13:10000',\n " +
                "  'hive_sync.table'= 'wwcs062' ,\n" +
                "  'hive_sync.db'= 'ww_cs',\n" +
                "  'hoodie.metrics.on'='true',\n" +
                "  'hoodie.metrics.reporter.type'='CONSOLE',\n" +
//                "  'hive_sync.username'= 'hive',\n" +
//                "  'hive_sync.password'= 'hive' ,\n" +
                "  'table.type' = 'MERGE_ON_READ'\n" +
                ")";

        bsTableEnv.executeSql(desSql);

        String insertSql = "insert into t1  select * from  KafkaTable";


        bsTableEnv.executeSql(insertSql);

//        final Table table = bsTableEnv.sqlQuery("select * from t1");
//
//        bsTableEnv.toDataStream(table).print();

    }
}
