package com.dada.cn.paimon;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.types.DataType;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
import org.apache.paimon.catalog.Catalog;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.flink.FlinkCatalogFactory;
import org.apache.paimon.flink.sink.FlinkSinkBuilder;
import org.apache.paimon.options.Options;
import org.apache.paimon.schema.Schema;
import org.apache.paimon.table.Table;

public class CreateTable {

    public static void main(String[] args) throws Exception {
        writeTo();
    }

    public static void writeTo() throws Exception {

        Options catalogOptions = new Options();
        catalogOptions.set("warehouse", "/opt/warehouse");
        Catalog catalog = FlinkCatalogFactory.createPaimonCatalog(catalogOptions);
        catalog.createDatabase("my_db", true);


        Schema.Builder schemaBuilder = Schema.newBuilder();
        schemaBuilder.primaryKey("apply_no", "partiton_key");
        schemaBuilder.partitionKeys("partiton_key");

        schemaBuilder.column("apply_no", org.apache.paimon.types.DataTypes.STRING());
        schemaBuilder.column("partiton_key", org.apache.paimon.types.DataTypes.STRING());
        schemaBuilder.column("apply_id", org.apache.paimon.types.DataTypes.INT());
        Schema schema = schemaBuilder.build();

        Identifier identifier = Identifier.create("my_db", "T");
        catalog.createTable(identifier, schema, true);

    }
}