package com.comtop.db.oracle

import java.sql.DriverManager
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import java.io.File

object OracleTest {
  def main(args: Array[String]): Unit = {
    
    mkdirFile()
    
    val conf = new SparkConf().setAppName("FastarData")
                              .setMaster("local[*]")
                              .set("spark.driver.allowMultipleContexts", "true")
    val sc = new SparkContext(conf)

    val rdd = new JdbcRDD(
      sc,
      () => {
        Class.forName("oracle.jdbc.driver.OracleDriver").newInstance()
        DriverManager.getConnection("jdbc:oracle:thin:@10.10.61.60:1521:ORCL", "odcdev_sz", "odcdev_sz")
      },
      "SELECT * from ODC_PUB_USER WHERE 1 = ? AND rownum < ?",
      1, 10, 1,
      r => (r.getString(1), r.getString(3), r.getString(8)))
   
      rdd.collect().foreach(println)
      sc.stop()
  }

  def mkdirFile(): Unit = {
    val workgroud = new File(".");
    System.getProperties().put("hadoop.home.dir",
      workgroud.getAbsoluteFile());
    new File("./bin").mkdir();
    new File("./bin/winutils.exe").createNewFile();
  }
}