package com.edata.bigdata.viewmain;

import com.edata.bigdata.basic.Manager;
import com.edata.bigdata.entity.Workspace;
import com.edata.bigdata.spark.PgConnector;
import com.edata.bigdata.spark.PgReader;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;


public class PgRead {
    public static void main(String[] args) {
        Manager manager = new Manager();
        manager.APPNAME = "PGReadTesting";
        manager.MASTER = "local[*]";
        manager.createSparkSession();

        PgConnector connector = new PgConnector();
        connector.JDBC_IP="172.16.11.117";
        connector.JDBC_PORT="5432";
        connector.JDBC_DATABASE = "elearning";
        connector.JDBC_USER="postgres";
        connector.JDBC_PASSWORD="123456";

        PgReader pgReader = new PgReader(manager.SPARKSESSION);
        pgReader.connector = connector;

        String query = "select * from workspace";
        Dataset<Row> datas = pgReader.findData(query);
        Dataset<Workspace> workspaces = pgReader.toBeanMapper(datas, Workspace.class);
        workspaces.show(10);
        manager.stop();
    }
}
