package com.edata.bigdata.viewmain;

import com.edata.bigdata.basic.Manager;
import com.edata.bigdata.entity.Workspace;
import com.edata.bigdata.spark.HdfsConnector;
import com.edata.bigdata.spark.HdfsReader;
import com.edata.bigdata.spark.PgConnector;
import com.edata.bigdata.spark.PgWriter;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;


public class Hdfs2Pg {
    public static void main(String[] args) {
        Manager manager = new Manager();
        manager.APPNAME = "FromHDFSToPG";
        manager.MASTER = "local[*]";
        manager.createSparkSession();

        HdfsConnector hdfsConnector = new HdfsConnector();
        hdfsConnector.ENTRYPOINT = "172.16.11.97:8082";

        HdfsReader hdfsReader = new HdfsReader(manager.SPARKSESSION);
        hdfsReader.hdfsConnector = hdfsConnector;
        JavaRDD<String> data = hdfsReader.findData("/testing/workspace");
        Dataset<Workspace> datasets = hdfsReader.toBeanMapper(data, Workspace.class);

        PgConnector pgConnector = new PgConnector();
        pgConnector.JDBC_IP="172.16.11.117";
        pgConnector.JDBC_PORT="5432";
        pgConnector.JDBC_DATABASE = "elearning";
        pgConnector.JDBC_USER="postgres";
        pgConnector.JDBC_PASSWORD="123456";

        PgWriter pgWriter = new PgWriter(manager.SPARKSESSION);
        pgWriter.connector = pgConnector;
        pgWriter.insert(datasets,Workspace.class);
        manager.stop();
    }
}
