package jupitermouse.site.sql

import org.apache.spark.sql.SparkSession

/**
  * 操作Mysql
  */
object MysqlContextApp {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("DataFrameRDDApp").master("local[4]").config("spark.driver.host", "localhost").getOrCreate()

    //mysql 生成DataFrame
    val mysqlDF = spark.read.format("jdbc")
      .option("url","jdbc:mysql://localhost:3306")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user","root")
      .option("password","root")
      .option("dbtable","database.table") //操作的表
      .load()
    mysqlDF.select().show()

    val mysqlWrite = mysqlDF.select("")

    //写入json
    mysqlWrite.write.format("json").save("")
    //写入Mysql
    mysqlWrite.write
      .format("mysql")
      .option("url","jdbc:mysql://localhost:3306")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user","root")
      .option("password","root")
      .option("dbtable","schema.tablename") //操作的表
      .saveAsTable("new_table")

    //写入kafka

  }

}
