package com.spark.mooc.ch6_sparksql.part05_sparksqlReadWriteDataBase

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * @description: 通过JDBC连接数据库
 * @time: 2020/11/29 12:12
 * @author: lhy
 */
object JDBC {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession.builder().appName("JDBC").master("local").getOrCreate()
        /*
         读取MySQL数据库中的数据
         */
        val jdbcDF: DataFrame = spark.read.format("jdbc")
                               .option("url","jdbc:mysql://192.168.21.104:3306/spark")
                               .option("driver","com.mysql.jdbc.Driver")
                               .option("dbtable","student")
                               .option("user","root")
                               .option("password","bigdata")
                               .load()
        jdbcDF.show()
        /*
         向MySQL数据库中写入数据
         */
        // 下面设置两条数据表示两个学生信息
        val studentRDD: RDD[Array[String]] = spark.sparkContext.parallelize(Array("5 KunGe M 34","6 Tom M 27")).map(_.split(" "))
        // 下面设置模式信息
        val schema: StructType = StructType(List(StructField("id",IntegerType,nullable = false),
                                                 StructField("name",StringType,nullable = true),
                                                 StructField("gender",StringType,nullable = true),
                                                 StructField("age",IntegerType,nullable = true)))
        // 下面创建Row对象，每个/Row对象都是rowRDD中的一行
        val rowRDD: RDD[Row] = studentRDD.map(p => Row(p(0).toInt, p(1).trim, p(2).trim, p(3).toInt))
        // 建立起Row对象和模式之间的对应关系，即：把数据和模式对应起来
        val studentDF: DataFrame = spark.createDataFrame(rowRDD,schema)
        // 下面创建一个prop变量用来保存JDBC连接参数
        val prop = new Properties()
        prop.put("user","root")
        prop.put("password","bigdata")
        prop.put("driver","com.mysql.jdbc.Driver")
        // 下面连接数据库，采用append模式，表示追加记录到数据库spark的student表中
        studentDF.write.mode("append").jdbc("jdbc:mysql://192.168.21.104:3306/spark","spark.student",prop)
        jdbcDF.show()
    }
}
