package com.dtkavin.sparkSQL

import java.util.Properties

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/23 0023
  * Time : 22:54
  * Discribtion : 未实验，通过SparkSql的shell可以执行，但是对于写jar包，目前测试还未通过
  */
class Hive2SparkSql {

}

object Hive2SparkSql {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("Hive2SparkSql").setMaster("local[3]")
    val sc = new SparkContext(conf)
    sc.addJar("G:\\BigData\\lessions\\spark\\spark-day7\\lecture\\mysql-connector-java-5.1.35-bin.jar")
    val sqlc = new SQLContext(sc)
    val prop = new Properties()
    prop.put("url","jdbc:mysql://mysql01:3306/hivedb")
    prop.put("driver","com.mysql.jdbc.Driver")
    prop.put("user", "root")
    prop.put("password", "123456")
    sqlc.setConf(prop)
    sqlc.sql("select * from t_person1").show()

    sc.stop()


  }
}