package com.flink.paimon;

import org.apache.paimon.catalog.Catalog;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.flink.FlinkCatalogFactory;
import org.apache.paimon.options.Options;
import org.apache.paimon.schema.Schema;
import org.apache.paimon.types.DataTypes;

/**
 * RemoteTest
 *
 * @author caizhiyang
 * @since 2024-04-19
 */
public class CreateTable {


    /**
     * 使用Paimon的JavaAPI-创建Paimon表
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {
        // 代码中用到hdfs，需要导入hadoop依赖、指定访问hdfs的用户名
        System.setProperty("HADOOP_USER_NAME", "hadoop");
        Schema.Builder schemaBuilder = Schema.newBuilder();
        schemaBuilder.primaryKey("order_id");
        schemaBuilder.column("order_id", DataTypes.BIGINT());
        schemaBuilder.column("price", DataTypes.DOUBLE());
        Schema schema = schemaBuilder.build();

        Identifier identifier = Identifier.create("default", "remote_table2");
        Options catalogOptions = new Options();
//        catalogOptions.set("name", "fs_catalog2"); //设置catalog名称无用!!
        catalogOptions.set("warehouse", "hdfs://node1:8020/paimon/fs");
        Catalog.Loader catalogLoader =
                () -> FlinkCatalogFactory.createPaimonCatalog(catalogOptions);
        Catalog catalog = catalogLoader.load();
        catalog.createTable(identifier, schema, false);
    }

}
