//package com.imooc.flink.hudi;
//
//import org.apache.flink.streaming.api.datastream.DataStream;
//import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.table.api.Table;
//import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
//import org.apache.flink.table.data.RowData;
//import org.apache.flink.types.Row;
//import org.apache.hudi.common.model.HoodieTableType;
//import org.apache.hudi.configuration.FlinkOptions;
//import org.apache.hudi.util.HoodiePipeline;
//
//import java.util.HashMap;
//import java.util.Map;
//
///**
// * @author: Chen Yixing
// * @since: 2023/10/31 19:31:54
// * @description:
// */
//public class HudiDataStreamApp {
//    private final static String TAG = "HudiDataStreamApp";
//    private static String targetTable = "t1";
//    private static String basePath = "hdfs://192.168.197.131:8020/user/hudi/warehouse/hudi_db/t1";
//
//    public static void main(String[] args) throws Exception {
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//
//        Map<String, String> options = new HashMap<>();
//        options.put(FlinkOptions.PATH.key(), basePath);
//        options.put(FlinkOptions.TABLE_TYPE.key(), HoodieTableType.MERGE_ON_READ.name());
//        options.put(FlinkOptions.READ_AS_STREAMING.key(), "true"); // this option enable the streaming read
////        options.put(FlinkOptions.READ_START_COMMIT.key(), "'20210316134557'"); // specifies the start commit instant time
//
//        HoodiePipeline.Builder builder = HoodiePipeline.builder(targetTable)
//                .column("uuid VARCHAR(20)")
//                .column("name VARCHAR(10)")
//                .column("age INT")
//                .column("ts TIMESTAMP(3)")
//                .column("`partition` VARCHAR(20)")
//                .pk("uuid")
//                .partition("partition")
//                .options(options);
//
//        DataStream<RowData> rowDataDataStream = builder.source(env);
//        rowDataDataStream.print();
//        env.execute(TAG);
//    }
//}
