package main.scala.demo

import org.apache.spark.sql.SparkSession

/**
  * SparkMemoryDemo
  *
  * @author zhangyimin
  * @date 2018-10-11 下午2:03
  * @version 1.0
  */
object SparkMemoryDemo {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("memory")
//      .master("spark://10.16.7.36:7077")
      .master("local")
      .getOrCreate()
    val sc = spark.sparkContext
//    sc.getConf.setJars(Array("/Users/zhangyimin/IdeaProjects/sparkScalaDemo/out/artifacts/spark_data_source/spark_data_source.jar"))
    //    val props = new Properties()
    //    props.setProperty("user", "root")
    //    props.setProperty("password", "123456")
    //    val res = spark.read.jdbc(
    //      "jdbc:mysql://localhost:3306/hive_etl?characterEncoding=utf-8&useSSL=false"
    //      , "stu"
    //      , props
    //    )
    //val res = spark.read.jdbc("jdbc:mysql://localhost:3306/hive_etl?characterEncoding=utf-8&useSSL=false", "stu", props)
    val mysqlDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/hive_etl?characterEncoding=utf-8&useSSL=false")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "emp").load()



    //    val mysqlDF=spark.read.format("jdbc").option("url","jdbc:mysql://localhost:3306/hive_etl?characterEncoding=utf-8&useSSL=false").option("user","root").option("password","123456").option("dbtable","emp").load()

    mysqlDF.registerTempTable("emp")
    spark.sql("select * from emp").show()
    spark.sqlContext.cacheTable("emp")
    //把结果缓存
    spark.sql("select * from emp").show()
    //直接从缓存中读取
    spark.sql("select * from emp").show()


    spark.stop()
    sc.stop()
  }

}
