package main.scala.demo

import java.sql.DriverManager

import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * JDBCRDDDemo
  *
  * @author zhangyimin
  * @date 2018-10-09 上午11:12
  * @version 1.0
  */
object JDBCRDDDemo{
  val conn=()=>{
    Class.forName("com.mysql.jdbc.Driver").newInstance()
    DriverManager.getConnection("jdbc:mysql://localhost:3306/hive_etl", "root", "123456")
  }

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    //配置
    val conf=new SparkConf()
    conf.setAppName("JDBCRDD")
    conf.setMaster("local")
    //创建一个sparkContext
    val sc=new SparkContext(conf)

    val mysqlRDD=new  JdbcRDD(sc,conn,"select * from emp where sal>? and deptno=?",2000,10,1,r=>{
      val empName=r.getString(2)
      val sal=r.getInt(6)
      (empName,sal)
    })

    //执行计算
    val result=mysqlRDD.collect()
    //打印结果
    println(result.toBuffer)
    sc.stop()


  }


  class JDBCRDDDemo{

  }
}
