package SQL

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object JdbcDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder().master("local[2]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    spark.sqlContext.sparkContext.setLogLevel("WARN")
    import spark.implicits._

    val connectionProperties = new Properties() ;
    connectionProperties.put("user", "hive")
    connectionProperties.put("password", "hive")
    val jdbcDF: DataFrame = spark.read.jdbc("jdbc:mysql://spark:3306/test", "employee", connectionProperties)
    jdbcDF.show()

    val employeeRDD: RDD[Array[String]] = spark.sparkContext.parallelize(Array("zhangsan F 23" , "lisi M 34")).map(_.split(" "))
    val rowRDD: RDD[Row] = employeeRDD.map(r=>Row(r(0),r(1),r(2).toInt))

    val schema = new StructType().add("name",StringType).add("gender",StringType).add("age",IntegerType)

    val employeeDF = spark.createDataFrame(rowRDD,schema)


    employeeDF.write.mode("append")
      .jdbc("jdbc:mysql://spark:3306/test", "employee", connectionProperties)

    spark.close()
  }

}
