import org.apache.spark.sql.{SparkSession, DataFrame}
import spark.implicits._
// 导入隐式转换
object SparkSQLInsert {
  def main(args: Array[String]): Unit = {
    // 创建SparkSession
    val spark = SparkSession.builder()
      .appName("Spark SQL Insert Example")
      .config("spark.master", "local")
      .getOrCreate()

    // 准备学生信息数据集DataFrame
    val studentsData: Seq[(String, String, Int, String, String)] = Seq(
      ("Alice", "Smith", 20, "Computer Science", "2022-09-01"),
      ("Bob", "Johnson", 22, "Electrical Engineering", "2021-08-15"),
      ("Charlie", "Williams", 21, "Mechanical Engineering", "2023-01-20"),
      ("David", "Brown", 19, "Biology", "2024-03-10"),
      ("Emma", "Jones", 20, "Physics", "2022-06-05")
    )
    val studentsDF: DataFrame = studentsData.toDF("first_name", "last_name", "age", "major", "enrollment_date")

    // 连接MySQL数据库
    val url = "jdbc:mysql://localhost:3306/sparksql_practice"
    val properties = new java.util.Properties()
    properties.setProperty("user", "lsz")
    properties.setProperty("password", "lsz")

    // 将数据插入到students表中
    studentsDF.write.mode("append").jdbc(url, "students", properties)

    // 验证数据是否成功插入，查询最后几条记录
    val lastRecords = spark.read.jdbc(url, "students", properties).orderBy($"student_id".desc).limit(5)
    lastRecords.show()

    // 关闭SparkSession
    spark.stop()
  }
}

