package com.edata.bigdata.viewmain;

import com.edata.bigdata.basic.Manager;
import com.edata.bigdata.entity.Workspace;
import com.edata.bigdata.spark.HdfsConnector;
import com.edata.bigdata.spark.HdfsReader;
import com.edata.bigdata.spark.KafkaConnector;
import com.edata.bigdata.spark.KafkaWriter;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;


public class Hdfs2Kafka {

    public static void main(String[] args) {
        Manager manager = new Manager();
        manager.APPNAME = "HDFSReadTesting";
        manager.MASTER = "local[*]";
        manager.createSparkSession();

        HdfsConnector hdfsConnector = new HdfsConnector();
        hdfsConnector.ENTRYPOINT = "172.16.11.97:8082";
        HdfsReader hdfsReader = new HdfsReader(manager.SPARKSESSION);
        hdfsReader.hdfsConnector = hdfsConnector;
        JavaRDD<String> data = hdfsReader.findData("/testing/workspace");
        Dataset<Workspace> datasets = hdfsReader.toBeanMapper(data, Workspace.class);

        KafkaConnector kafkaConnector = new KafkaConnector();
        kafkaConnector.BOOTSTRAP = "172.16.11.97:9092";
        kafkaConnector.TOPIC = "test";

        KafkaWriter producer = new KafkaWriter();
        producer.kafkaConnector = kafkaConnector;
        producer.createDataFrameWriter(datasets).save();
        manager.stop();


    }
}
