public class GSCDemo {

    private static String table = "y_53_1608106352039";

    public static void main(String[] args) {

        /**
         System.out.println("####################################################begin");
         System.out.println(System.currentTimeMillis());

         SparkSession spark = SparkSession
         .builder()
         .appName("GSC_Test")
         .getOrCreate();
         String tableName = args[1];
         Dataset<Row> dataset = spark.read().format("greenplum")
         .options(gpInfo())
         .option("dbtable", tableName)
         .option("partitionColumn", "_record_id_")
         .load();

         System.out.println("#####################################################end");
         System.out.println(System.currentTimeMillis());

         dataset.take(1);

         System.out.println("#####################################################end2");
         System.out.println(System.currentTimeMillis());
         dataset.count();

         System.out.println("#####################################################end2");
         System.out.println(System.currentTimeMillis());

         Dataset<Row> updateDataset = dataset.filter("id > 100");
         updateDataset.write().format("greenplum")
         .options(gpInfo()).option("dbtable", table + "_" + System.currentTimeMillis())
         .mode(SaveMode.Append)
         .save();

         spark.close();
         **/
    }

}
