package org.zjt.spark.sql

import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
import java.sql.{Connection, DriverManager, ResultSet}

import com.redis.RedisClient
import org.apache.log4j.Logger

import scala.collection.mutable.ArrayBuffer


/**
  * DESC    MySql 数据源读取
  *
  * @author
  * @create 2017-06-28 上午11:01
  **/
object ReadMysqlTest extends App {
  var sparkConf = new SparkConf().setMaster("local").setAppName("ReadMysqlTest")
  val sqlContext = new SparkSession.Builder().config(sparkConf).getOrCreate()
  val url: String = "jdbc:mysql://172.19.110.250:3307/bigdata_etl?" + "useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC"
  val table = "(select id , username from etl_user where id > 17) as tmp" //user表
  val driver = "com.mysql.jdbc.Driver"

  // TODO: 1、查询sql Mysql数据库jdbc   弊端：多sql查询次数太多
  val jdbcDF = sqlContext.read.format("jdbc").option("driver", driver).option("url", url).option("dbtable", table).option("user", "root").option("password", "123456").load()
  jdbcDF.createOrReplaceTempView("tmp")
  sqlContext.sql("select id , username from tmp ").show()


  // TODO: 弊端： 一次得到所有的数据，容易给单节点造成OOM异常
  val connection = MySQLUtil.createConnection("root", "123456", url)
  try {
    val statement = connection.createStatement()
    val resultSet = statement.executeQuery("select id , username from etl_user where id > 17")
    while (resultSet.next()) {
      val name = resultSet.getString("id")
      val password = resultSet.getString("username")
      println("name, password = " + password + ", " + password)
    }
  } catch {
    case e: Exception => e.printStackTrace()
  } finally {
    MySQLUtil.closeConnection(connection)
  }


  val rdd = sqlContext.sparkContext.parallelize(Array("16", "17", "21", "22", "23", "25", "100")).coalesce(4)
    .mapPartitions {
      line: Iterator[String] => {
        var arrayBuffer = ArrayBuffer[(String, String ,String)]();
        val connection = MySQLUtil.createConnection("root", "123456", url)
        try {
          val statement = connection.createStatement()
          for (a <- line) {
            val resultSet = statement.executeQuery("select id , username , password from etl_user where id = " + a)
            val columnCount = resultSet.getMetaData().getColumnCount()
            val primaryKeyColName = EtlHandler.getPrimaryKeyColName()
            //
            while (resultSet.next()) {
              for (i <- 1 to columnCount) {
                val colName = resultSet.getMetaData().getColumnLabel(i).toUpperCase()
                val primaryValue = resultSet.getObject(primaryKeyColName).toString
                val colValue = resultSet.getObject(i).toString
                arrayBuffer += new Tuple3(primaryValue,colName,colValue)
              }
            }
          }
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          MySQLUtil.closeConnection(connection)
        }
        arrayBuffer.toIterator
      }
    }

  println(rdd.collect().mkString("\n"))
  //jdbcDF.show()
  sqlContext.stop()


}


object MySQLUtil {

  val log = Logger.getLogger(MySQLUtil.getClass)

  def createConnection(user: String, password: String, url: String): Connection = {
    log.info("************开启数据库链接*************")
    Class.forName("com.mysql.jdbc.Driver").newInstance();
    DriverManager.getConnection(url, user, password)
  }


  def closeConnection(connection: Connection): Unit = {
    log.info("************关闭数据库链接*************")
    if (connection != null)
      connection.close()
  }

}


object RedisUtil{

  def cteateConnection(): Unit ={
    val r = new RedisClient("localhost", 6379)
    val value = r.get() match {
      case Some(a) => a
      case None => null
    }
  }
}


object EtlHandler {

  def getPrimaryKeyColName(): String = {

    "id"
  }

}
