package com.edata.bigdata.viewmain;

import com.edata.bigdata.basic.Manager;
import com.edata.bigdata.entity.Workspace;
import com.edata.bigdata.spark.streaming.StreamSink;
import com.edata.bigdata.spark.streaming.StreamSource;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.streaming.OutputMode;

import java.util.Properties;


public class StreamK2Pg {

    public static void main(String[] args) throws Exception {
        Manager manager = new Manager();
        manager.APPNAME = "StreamReaderTesting";
        manager.MASTER = "local[*]";
        manager.createSparkSession();

        Properties sourceOptions = new Properties();
        sourceOptions.setProperty("kafka.bootstrap.servers", "172.16.11.97:9092");
        sourceOptions.setProperty("subscribe", "test");
        sourceOptions.setProperty("group.id", "test");
        sourceOptions.setProperty("startingOffsets", "earliest");
        sourceOptions.setProperty("kafka.session.timeout.ms", "30000");
        StreamSource streamSource = new StreamSource(manager.SPARKSESSION);
        Dataset<Row> srcStream = streamSource
                .createDataStreamReader(StreamSource.SourceType.KAFKA, sourceOptions)
                .load();

        Dataset<Row> windowedStream = streamSource.applyWindowAndWatermark(
                srcStream,
                "5 seconds",
                "5 second",
                "10 seconds"
        );

        Dataset<Workspace> workspaceStream = streamSource.toBeanMapper(windowedStream, Workspace.class);

        Properties sinkOptions = new Properties();
        sinkOptions.setProperty("checkpointLocation", "hdfs://172.16.11.97:8082/testing/checkpoint");
        sinkOptions.setProperty("jdbc.url", "jdbc:postgresql://172.16.11.117:5432/elearning");
        sinkOptions.setProperty("jdbc.user", "postgres");
        sinkOptions.setProperty("jdbc.password", "123456");
        sinkOptions.setProperty("jdbc.upsert.conflict.field.name", "id");
        StreamSink streamSink = new StreamSink();
        streamSink.createDataStreamWriter(workspaceStream,
                        StreamSink.SinkType.JDBC,
                        OutputMode.Append(),
                        "2 seconds",
                        sinkOptions,
                        Workspace.class)
                .start()
                .awaitTermination();

    }
}
