package com.wzz.sparkSql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.functions.expr
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

import java.util.Properties

object Demo1 {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder().master("local").getOrCreate()


    val properties = new Properties()
    properties.put("driver", "com.mysql.jdbc.Driver")
    properties.put("user", "root")
    properties.put("password", "123456")

    import spark.implicits._
    val df: DataFrame = List((1,"zhangsan",20),(2,"lisi",23),(3,"wangwu",23)).toDF("id","name","age")
    df.write.mode("append").jdbc("jdbc:mysql://hadoop10:3306/test", "df_user", properties)


    println(" ")
  }
}
