package org.zjt.spark.etl

import java.sql.{Connection, DriverManager}
import java.util.Properties

import com.redis.RedisClient
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

import scala.collection.mutable.ArrayBuffer

/**
  * DESC    
  *
  * @author
  * @create 2017-07-04 下午5:58
  **/
object ETL2 extends App {

  val redisConf = ("172.19.110.223", 6379)
  val redisKey = "etl_conf"
  var sparkConf = new SparkConf().setMaster("local").setAppName("ETL")
  val sqlSession = new SparkSession.Builder().config(sparkConf).getOrCreate()
  val driver = "com.mysql.jdbc.Driver"
  val salt = "I am a fool, OK?"
  try {
    // 得到redis中的配置信息
    implicit val format = Serialization.formats(NoTypeHints)
    val redisClient = new RedisClient(redisConf._1, redisConf._2)
    val value = redisClient.get(redisKey) match {
      case Some(a) => a
      case None => null
    }

    if (value != null) {
      val etlConf: ETLConfiguration = read[ETLConfiguration](value)
      var data = ArrayBuffer[(EConf, RdbSourceConf)]()
      val eConfs = etlConf.eConfs
      for (i <- 0 until eConfs.size) {
        data += new Tuple2(eConfs(i), etlConf.sourceConfs(i))
      }
      for (o <- data) {
        val password = if (o._2.encrypted) AES.decryptFromBase64(o._2.password, salt) else o._2.password
        val pros = new Properties()
        pros.setProperty("driver", driver)
        pros.setProperty("user", o._2.username)
        pros.setProperty("password", password)
        val jdbcDF = sqlSession.read.jdbc(o._2.url, s"(${o._2.sql} ) as tmp", pros)
        val eTaskId = o._1.sourceId
        import sqlSession.implicits._
        val eData = jdbcDF.map(row => {
          val data = ArrayBuffer[(String, String, String)]()
          val cols = row.schema.map { case StructField(field, fieldType, _, _) => (field, fieldType) }
          var primaryKeyValue: String = null
          for (i <- 0 until cols.size) {
            if (i == 0)
              primaryKeyValue = String.valueOf(row.get(i))
            else
              data += new Tuple3(primaryKeyValue, cols(i)._1, String.valueOf(row.get(i)))
          }
          (eTaskId, data)
        }).groupByKey(_._1).agg() /*{ (v1, v2) => (v1._1, v1._2 ++ v2._2 ) }*/

        eData.show()

        //eData.collect().map(a => println(a._2.mkString("\n")))
      }
    }
  } catch {
    case e: Exception => e.printStackTrace()
  } finally {
    sqlSession.stop()
  }
}





