
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SaveMode, SparkSession}
object xuetangsql {

  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf = new SparkConf().setAppName("xuetangsql").setMaster("local")
    //2.创建SparkSession对象
    val spark:SparkSession = SparkSession.builder().config(conf).getOrCreate()

    import spark.implicits._

    var testtableDf : DataFrame =spark.read.format("jdbc")
      .option("url","jdbc:mysql://localhost:3306/health_monitoring")
      .option("user","root")
      .option("password","root")
      .option("dbtable","tnb_cleaned")
      .load()

    testtableDf.createOrReplaceTempView("tnb")
    val result = spark.sql(
      "select id,xuetang  " +
        "   from tnb " +
        "   where xuetang > 6.1 "
        )

    result.show()
    result.write
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost/health_monitoring")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "xuetang_table")
      .mode(SaveMode.Overwrite)
      .save()
  }


}
