package com.atbeijing.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkSQL08_MySQL {

    def main(args: Array[String]): Unit = {

        // TODO 创建环境对象
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL")
        val spark: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()

        val df = spark.read.format("jdbc")
            .option("url", "jdbc:mysql://linux1:3306/spark-sql")
            .option("driver", "com.mysql.jdbc.Driver")
            .option("user", "root")
            .option("password", "123123")
            .option("dbtable", "user")
            .load()//.show

        df.write.format("jdbc")
            .mode("append")
            .option("url", "jdbc:mysql://linux1:3306/spark-sql")
            .option("driver", "com.mysql.jdbc.Driver")
            .option("user", "root")
            .option("password", "123123")
            .option("dbtable", "user1")
            .save()

        spark.stop()

    }
}
