package org.example

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

object SparkSqlTest {

  case class Person(str: String, i: Int)

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("SparkSqlTest").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val person = Array(Person("zhangsan", 18), Person("lisi", 20), Person("wangwu", 22), Person("zhaoliu", 24))
    val personRDD = sc.parallelize(person)

    val sqlCon = new SQLContext(sc)
    import sqlCon.implicits._
    personRDD.toDF().show()


    val df = personRDD.toDF()
    val prop = new Properties()
    prop.setProperty("user","niit")
    prop.setProperty("password","123456")
    prop.setProperty("driver","com.mysql.cj.jdbc.Driver")
    df.write.mode("append").jdbc("jdbc:mysql://123.56.187.176:1101/huel","personZ",prop)
  }
}
