package com.edata.bigdata.viewmain;

import com.edata.bigdata.basic.Manager;
import com.edata.bigdata.entity.Workspace;
import com.edata.bigdata.spark.HdfsConnector;
import com.edata.bigdata.spark.HdfsReader;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;

public class HdfsRead {
    public static void main(String[] args) {
        Manager manager = new Manager();
        manager.APPNAME = "HDFSReadTesting";
        manager.MASTER = "local[*]";
        manager.createSparkSession();

        HdfsConnector hdfsConnector = new HdfsConnector();
        hdfsConnector.ENTRYPOINT = "172.16.11.97:8082";

        HdfsReader hdfsReader = new HdfsReader(manager.SPARKSESSION);
        hdfsReader.hdfsConnector = hdfsConnector;
        JavaRDD<String> data = hdfsReader.findData("/testing/workspace");
        Dataset<Workspace> datasets = hdfsReader.toBeanMapper(data, Workspace.class);
        datasets.show();
        manager.stop();

    }
}
