package org.zjt.spark.etl

import java.io.UnsupportedEncodingException
import java.sql.{Connection, DriverManager}

import com.redis.RedisClient
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import scala.collection.mutable.ArrayBuffer

/**
  * DESC    
  *
  * @author
  * @create 2017-07-04 下午5:58
  **/
object ETL extends App {
  var sparkConf = new SparkConf().setMaster("local").setAppName("ETL")
  val sqlContext = new SparkSession.Builder().config(sparkConf).getOrCreate()
  val redisConf = sqlContext.sparkContext.broadcast(("172.19.110.223", 6379))
  try {
    val etlconfRDD = sqlContext.sparkContext.parallelize(Seq("etl_conf")).mapPartitions {
      partition => {
        val data: ArrayBuffer[ETLConfiguration] = ArrayBuffer[ETLConfiguration]()
        val redisClient = new RedisClient(redisConf.value._1, redisConf.value._2)
        for (line <- partition) {
          val value = redisClient.get(line) match {
            case Some(a) => a
            case None => null
          }
          if (value != null) {
            implicit val format = Serialization.formats(NoTypeHints)
            val etlConf2: ETLConfiguration = read[ETLConfiguration](value)
            data += etlConf2
          }
        }
        data.toIterator
      }
    }.persist()


    println(etlconfRDD.mapPartitions(etlConfs => {
      etlConfs.map {
        etlConf => {
          etlConf.sourceConfs.map {
            sourceConf => {
              val mysqlPW = if (sourceConf.encrypted) AES.decryptFromBase64(sourceConf.password, "I am a fool, OK?") else sourceConf.password
              val connection = MySQLUtil.createConnection(sourceConf.username, mysqlPW, sourceConf.url)
              var arrayBuffer = ArrayBuffer[(String, String, String)]();
              try {
                val statement = connection.createStatement()
                val resultSet = statement.executeQuery(sourceConf.sql)
                val columnCount = resultSet.getMetaData().getColumnCount()
                val primaryKeyColName = EtlHandler.getPrimaryKeyColName()
                while (resultSet.next()) {
                  for (i <- 1 to columnCount) {
                    val colName = String.valueOf(resultSet.getMetaData().getColumnLabel(i)).toUpperCase()
                    val primaryValue = String.valueOf(resultSet.getObject(primaryKeyColName))
                    val colValue = String.valueOf(resultSet.getObject(i))
                    arrayBuffer += new Tuple3(primaryValue, colName, colValue)
                  }
                }
              } catch {
                case e: Exception => e.printStackTrace()
              } finally {
                MySQLUtil.closeConnection(connection)
              }
              arrayBuffer
            }
          }
        }
      }
    }).collect().mkString("\n"))

    implicit val format = Serialization.formats(NoTypeHints)
    println(write(etlconfRDD.collect()))
  }
  catch {
    case e: Exception => e.printStackTrace()
  }
  finally {
    sqlContext.stop()
  }
}


object MySQLUtil {

  val log = Logger.getLogger(MySQLUtil.getClass)

  def createConnection(user: String, password: String, url: String): Connection = {
    log.info("************开启数据库链接*************")
    Class.forName("com.mysql.jdbc.Driver").newInstance();
    DriverManager.getConnection(url, user, password)
  }


  def closeConnection(connection: Connection): Unit = {
    log.info("************关闭数据库链接*************")
    if (connection != null)
      connection.close()
  }

}


object RedisUtil {

  def cteateConnection(): Unit = {
    val r = new RedisClient("localhost", 6379)
    val value = r.get() match {
      case Some(a) => a
      case None => null
    }
  }
}


object EtlHandler {

  def getPrimaryKeyColName(): String = {

    "orderNo"
  }

}


