package com.comtop.db.dm

import java.sql.DriverManager
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.JdbcRDD

object JdbcConnction {
  
   val conf = new SparkConf().setAppName("DmData")
                              .setMaster("local[1]")
                              .set("spark.driver.allowMultipleContexts", "true")
   val sc = new SparkContext(conf)
   
   
   /**
   * 创建数据库连接
   */
   def getConnection() = {
       Class.forName("dm.jdbc.driver.DmDriver").newInstance()
       DriverManager.getConnection("jdbc:dm://10.10.61.90:5236?autoReconnect=true", "JSHL", "123456789")
   }
   
   
   /**
    * 获取DM数据库数据
    */
    def getDataFromDm() = {
       
    println("***************************************start***************************************")
    val starTime =  System.currentTimeMillis()
    val connection = getConnection()
    println("连接DM数据库成功....") 
    val rdd = new JdbcRDD(
      sc, getConnection,"SELECT * FROM RDP_MD_CUSTOMER T WHERE T.KIND=20",
      1, 10, 1,
//      r => (r.getString("ID"),r.getString("ID_FRAGMENT"),r.getString("KIND"),r.getTimestamp("CREATE_TIME"),
    		  r => (r.getString("CUSTOMER_MRID")))
//  rdd.collect().foreach(println)
//  println(rdd.collect().toList)
    
    println("*********************共查询到" + rdd.count() + "条数据...")    
    
    sc.stop()
    println("***************************************end***************************************")
    val endTime =  System.currentTimeMillis()
    println("[共耗时]: "+ (endTime - starTime)+"毫秒")
  }
  
    
    def main(args: Array[String]): Unit = {
      getDataFromDm
    }
}