package SQL_L

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.types.{FloatType, IntegerType, StringType, StructField, StructType}

class JDBC {
  def main(args: Array[String]): Unit = {
    //1、创建SparkSession
    val spark = SparkSession.builder()
      .appName("Mysql")
      .master("local[6]")
      .getOrCreate()
    //读取数据
    import spark.implicits._
    val schema = StructType {
      List(
        StructField("name", StringType),
        StructField("age", IntegerType),
        StructField("gpa", FloatType)
      )
    }
    val df = spark.read
      .option("delimiter", "\t")
      .schema(schema)
      .csv("hdfs:///dataset/student.csv")
    val result = df.where('age > 50)
    //写入数据
    result.write
      .format("jdbc")
      .option("url", "jdbc:mysql://node1:3306/spark")
      .option("dbtable","student")
      .option("user","spark")
      .option("password","123456")
      .mode(SaveMode.Overwrite)

  }
}
