package Chapter7

import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.DriverManager

object SortBY {
  def main(args: Array[String]): Unit = {

    //sortBy   转换类型算子
    val conf = new SparkConf().setAppName("sort")
    val isLocal = args(0).toBoolean //通过applicat中program args传参
    if (isLocal) {
      conf.setMaster("local[*]")
    }

    val sc=new SparkContext(conf)
    val arr=Array("hello java","hello scala")
    val arrRdd=sc.parallelize(arr)
    arrRdd.flatMap(x=>{x.split(" ")})
      .map(x=>{(x,1)})
      .reduceByKey((x,y)=>{x+y})
      .sortBy(x=>{x._2},false)
      .foreach(println(_))

    arrRdd.flatMap(_.split(" "))
      .map((_,1))
      .reduceByKey((_+_))
      .sortBy(_._2)//x元素中的第二个值
      .foreach(println(_))
    //查询数据库
    //版本  mysql  8.0   url
    //mysql 5.x
    //5.x  云服务 mysql
    //jar包

    //url connection
    val conn=()=>DriverManager.getConnection("jdbc:mysql://123.56.187.176:1101/huelkq11","niit","123456")

    //rdd JavaRDD
    val result=new JdbcRDD(
      sc,
      conn,
      "select * from que where id> ? and id< ?",
      1,
      500,
      1,//分区
      x=>{
        val id=x.getInt(1)//第一列
        val name=x.getString(2)//第二列
        (id,name)
      }
    )
    println(result.collect().toBuffer)
  }
}
