package com.gy.spark.sparksql.dataframe

import java.util.HashMap
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SaveMode
import java.util.Properties

object CreateDFFromMysql {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    /**
     * 第一种方式读取Mysql数据库表创建DF
     */
    val options = new HashMap[String, String]();
    options.put("url", "jdbc:mysql://mainframe59:7306/db1")
    options.put("driver", "com.mysql.jdbc.Driver")
    options.put("user", "root")
    options.put("password", "123456")
    options.put("dbtable", "student")
    val person = sqlContext.read.format("jdbc").options(options).load()
    person.show()
    person.registerTempTable("student")


    /**
     * 第二种方式读取Mysql数据库表创建DF
     */
    val reader = sqlContext.read.format("jdbc")
    reader.option("url", "jdbc:mysql://mainframe59:7306/db1")
    reader.option("driver", "com.mysql.jdbc.Driver")
    reader.option("user", "root")
    reader.option("password", "123456")
    reader.option("dbtable", "sc")
    val score = reader.load()
    score.show()


    score.registerTempTable("score")
    val result = sqlContext.sql("select s.SID,s.Sname,s.Ssex,e.CID,e.score from student s inner join score e on s.SID = e.SID")
    result.printSchema()
    result.show()


    /**
     * 将数据写入到Mysql表中
     */
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    result.write.mode(SaveMode.Overwrite).jdbc("jdbc:mysql://mainframe59:7306/db1", "spark_result1", properties)

    sc.stop()
  }
}