package com.deep.flink;

import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.data.util.DataFormatConverters;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.types.Row;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.util.HoodiePipeline;

import java.time.LocalDateTime;
import java.util.*;


public class Test152 {
    public static RowType getKafkaRowType() {
        return new RowType(Arrays.asList(
                new RowType.RowField("uuid", new VarCharType(VarCharType.MAX_LENGTH)),
                new RowType.RowField("name", new VarCharType(VarCharType.MAX_LENGTH)),
                new RowType.RowField("age", new IntType()),
                new RowType.RowField("ts", new TimestampType()),
                new RowType.RowField("partition", new VarCharType(VarCharType.MAX_LENGTH))));
    }

    public static final DataFormatConverters.DataFormatConverter<RowData, Row> CONVERTER =
            DataFormatConverters.getConverterForDataType(
                    TypeConversions.fromLogicalToDataType(getKafkaRowType())
            );

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        String targetTable = "t1";
        String basePath = "file:///tmp/t1";

        Map<String, String> options = new HashMap<>();

        options.put(FlinkOptions.PATH.key(), basePath);
        options.put(FlinkOptions.TABLE_TYPE.key(), HoodieTableType.MERGE_ON_READ.name());
        options.put(FlinkOptions.PRECOMBINE_FIELD.key(), "ts");
//        DataStream<Event> dataStream = env.addSource(new ClickSource());
        DataStream<RowData> dataStream = env.addSource(new SourceFunction<RowData>() {
            private boolean running = true;

            @Override
            public void run(SourceContext<RowData> sourceContext) throws Exception {
                Random random = new Random();
                Integer[] ages = {1, 2, 3, 4};
                String[] users = {"mary", "Alice", "Bob", "Cary"};
                String[] urls = {"./home", "./cart", "./prod?id=100", "./prod?id=10"};

                while (running) {

                    String uuid = users[random.nextInt(users.length)];
                    String name = urls[random.nextInt(urls.length)];
                    Date ts = new Date();
                    Integer age = ages[random.nextInt(ages.length)];
                    LocalDateTime currentDateTime = LocalDateTime.now();
                    String partition = uuid.toLowerCase();

                    Row row = Row.of(uuid, name, age, currentDateTime, partition);
                    sourceContext.collect(CONVERTER.toInternal(row));

//                    Thread.sleep(1000);

                }
            }

            @Override
            public void cancel() {
                running = false;
            }
        });
        HoodiePipeline.Builder builder = HoodiePipeline.builder(targetTable)
                .column("uuid VARCHAR(20)")
                .column("name VARCHAR(10)")
                .column("age INT")
                .column("ts TIMESTAMP(3)")
                .column("`partition` VARCHAR(20)")
                .pk("uuid")
                .partition("partition")
                .options(options);

        builder.sink(dataStream, false); // The second parameter indicating whether the input data stream is bounded
        env.execute("Api_Sink");
    }
}
