package cn.aijson.demo.sparksql

object SparksqlMysql {
  def main(args: Array[String]): Unit = {

    println("------读取mysql关联文件rdd")
    //Spark SQL读取MySQL的dept和hive的emp表，做join和分组查询，后写到json
//    val jdbcDF = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306").option("dbtable", "hive.dept").option("user", "root").option("password", "root").load()
//    jdbcDF.createOrReplaceTempView("dept")
//    val hiveDF = sql("SELECT * FROM emp")
//    val sqlDF = sql("SELECT * FROM emp e JOIN dept d ON e.deptno = d.deptno").show
//
//    sqlDF.write.format("json").save("file:///empJoinDept.json")

  }
}
