package com.cn.spark

import java.sql.DriverManager

import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.{SQLContext, SparkSession}

/**
  * Created by Administrator on 2017/2/10.
  * spark2 连接mysql
  */
object DBOracleConDemo {
  def main(args: Array[String]) {
    val sparkSession : SparkSession = SparkSession.builder.master("local").appName("DBOracleConDemo").getOrCreate
//    val sqlContext = new SQLContext(sc)
    val sqlContext : SQLContext = sparkSession.sqlContext

    val url="jdbc:oracle:thin:@172.16.8.116:1521:hfwx"
    val prop = new java.util.Properties
    prop.put("user","mydba2")
    prop.put("password","mydbahfwx098")

//    val conf = new SparkConf()
//    val sc = new SparkContext(conf)

   /* val rdd = new JdbcRDD(
      sc,
      () => {
        Class.forName("oracle.jdbc.driver.OracleDriver").newInstance()
        DriverManager.getConnection("jdbc:oracle:thin:@172.16.222.112:1521:pms", "scyw", "scyw")
      },
      "SELECT * FROM MW_APP.CMST_AIRPRESSURE WHERE 1 = ? AND rownum < ?",
      1, 10, 1,
      r => (r.getString(1),r.getString(2),r.getString(5)))
    rdd.collect().foreach(println)
    sc.stop()*/

    //    #指定读取条件,这里 Array("id='1'") 是where过滤条件
    val cnFlight = sqlContext.read.jdbc(url,"users",Array("id='244'"),prop)

//    sqlContext.read
//    println(cnFlight.count())

    cnFlight.show()


//    #然后进行groupby 操作,获取数据集合
//    val emailList = cnFlight.groupBy("gps_city", "user_mail")

//    #计算数目，并根据数目进行降序排序
//    val sorted = emailList.count().orderBy( desc("count") )

//    #显示前10条
//    sorted.show(10)

//    #存储到文件（这里会有很多分片文件。。。）
//    sorted.rdd.saveAsTextFile("/home/qingpingzhang/data/flight_top”)


//    #存储到mysql表里
//    sorted.write.jdbc(url,"table_name",prop)
  }
}
