package io.example.demo

import java.util.Properties

import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.postgresql.Driver
object SparkSql {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.sql.SparkSession
    val pg = Map(
      "url"->"jdbc:postgresql://myserver:5432/test",
      "driver"->classOf[Driver].getName,
      "dbtable"->"t1",
      "user"->"postgres",
      "password"->"root"
    )
    val spark = SparkSession
      .builder()
      .master("local[1]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    val ctx = spark.sparkContext
    import spark.implicits._
    var df = spark.read.format("jdbc").options(pg).load()
//    df.show()
    //    val rdd = ctx.parallelize(Array(Row(3,3,3)))
    //    val schema = StructType(Array(
    //      StructField("c1",IntegerType),
    //      StructField("c2",IntegerType,nullable = true),
    //      StructField("c3",IntegerType,nullable = false)))
    //     df = spark.createDataFrame(rdd,schema)
    //    df.select($"name", $"age" + 1).show()
    //    df.write.mode("append").format("jdbc").options(pg).save()

    df.createOrReplaceTempView("t1")
    df = spark.sqlContext.sql("select max(c1) from t1")
    df.foreach(x=>{
      println(x(0))
    })
  }
}
