// 实验一：创建MySQL数据库和表，并填充实验数据
// 数据库名称：sparksql_practice
// 表名称：students
// 表结构设计：
//     • student_id INT PRIMARY KEY 自增，表示学生ID。
//     • first_name VARCHAR(50)，学生名字。
//     • last_name VARCHAR(50)，学生姓氏。
//     • age INT，学生年龄。
//     • major VARCHAR(100)，学生专业。
//     • enrollment_date DATE，入学日期。


CREATE DATABASE IF NOT EXISTS sparksql_practice;
USE sparksql_practice;

// 创建表students
val createTableQuery = """
CREATE TABLE IF NOT EXISTS students (
    student_id INT PRIMARY KEY AUTO_INCREMENT,
    first_name VARCHAR(50),
    last_name VARCHAR(50),
    age INT,
    major VARCHAR(100),
    enrollment_date DATE
)
"""
spark.sql(createTableQuery)

// 填充实验数据
val data = Seq(
  (1, "Alice", "KK", 22, "Computer Science", "2022-01-01"),
  (2, "Bob", "EE", 21, "Electrical Engineering", "2022-01-02"),
  (3, "Charlie", "CC", 22, "Data Science", "2022-01-03"),
  (4, "David", "DD", 23, "Chemistry", "2022-01-04"),
  (5, "Eve", "AA", 24, "Biology", "2022-01-05")
)
val studentsDF = spark.createDataFrame(data).toDF("student_id", "first_name", "last_name", "age", "major", "enrollment_date")

// 将数据插入表students
studentsDF.write.mode("overwrite").jdbc(url, "students", new Properties() {
  put("user", user)
  put("password", password)
})

// 查询表students中的所有数据
val queryResult = spark.read.jdbc(url, "students", new Properties() {
  put("user", user)
  put("password", password)
})
queryResult.show()

// 关闭SparkSession
spark.stop()