package cn.lecosa.spark.mysql

import java.sql.DriverManager
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.sql.SQLContext
import java.util.Properties
import java.text.DecimalFormat
import java.text.SimpleDateFormat
import java.util.Date

/**
 * Created by LLS on 2017/03/14.
 */
object Oracle2Df {
  def main(args: Array[String]) {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setAppName("Simple Application") //给Application命名    
    conf.setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc);
    //        SELECT * from TYID

    val jdbcMap = Map("url" -> "jdbc:oracle:thin:@10.20.8.94:1521/orcl",
      "user" -> "QZ",
      "password" -> "skycomm123",
      "dbtable" -> "TYID",
      "driver" -> "oracle.jdbc.driver.OracleDriver")
    val jdbcDF = sqlContext.read.options(jdbcMap).format("jdbc").load
    jdbcDF.registerTempTable("patient")

    //    val t0 = System.nanoTime : Double

    val res = sqlContext.sql("SELECT * from patient")
    //    val t1 = System.nanoTime : Double
    //    println("Elapsed time " + (t1 - t0) / 1000000.0 + " msecs")
    res.show()
    res.printSchema();

    sc.stop()
  }
}