package com.lvmama.rhino.common.utils.JDBCUtil

import java.sql.{Connection, DriverManager, Statement}

import org.apache.spark.sql.functions._
import org.apache.spark.sql._
import org.apache.spark.streaming.dstream.DStream

import scala.reflect.ClassTag

/**
  * Created by yuanxiaofeng on 2016/10/25.
  */
object Conversions {
  implicit class ImplicitInsert(df: DataFrame) extends Serializable {
    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val connP = JDBCTemplate.getConProperties
    def insertDF2Mysql(tableName: String): Unit = {
      var filterDF: DataFrame = null
      if (df.columns.contains("category_id")) {
        filterDF = df.transform(filterUsefulData)
      } else {
        filterDF = df
      }
      filterDF.write.mode(SaveMode.Append)
        .option("driver", JDBCDefault.get("driver").get)
        .jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }

    def insertDF2MysqlDirect(tableName: String): Unit = {
      df.write.mode(SaveMode.Append)
        .option("driver", JDBCDefault.get("driver").get)
        .jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }

    def insertDF2MysqlToWolverine(tableName: String): Unit = {
      df.write.mode(SaveMode.Append)
        .option("driver", JDBCDefault.get("driver").get)
        .jdbc(JDBCDefault.get("wolverineConn").get, tableName, connP)
    }
  }

  implicit class ImplicitLoad(sqlCtx: SQLContext) extends Serializable {
    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val connP = JDBCTemplate.getConProperties
    def loadFromMysql(tableName: String): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }

    def loadFromMysql(tableName: String, predicates: Array[String]): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("conn").get, tableName, predicates, connP)
    }

    def loadFromMysqlHippo(tableName: String, predicates: Array[String]): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("hippoConn").get, tableName, predicates, connP)
    }

    def loadFromMysqlHippo(tableName: String): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("hippoConn").get, tableName, connP)
    }

    def loadFromMysqlWolverine(tableName: String, predicates: Array[String]): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("wolverineConn").get, tableName, predicates, connP)
    }

    def loadFromMysqlWolverine(tableName: String): DataFrame = {
      sqlCtx
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("wolverineConn").get, tableName, connP)
    }
  }

  implicit class JDBCLoadV2(spark: SparkSession) extends Serializable {
    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val connP = JDBCTemplate.getConProperties

    def jdbc(tableName: String):DataFrame = {
      spark
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }

    def jdbc(tableName: String, predicates: Array[String]): DataFrame = {
      spark
        .read
        .option("driver", JDBCDefault("driver"))
        .jdbc(JDBCDefault.get("conn").get, tableName, predicates, connP)
    }
  }

  implicit class DStream2Mysql[T: ClassTag](dstream: DStream[((String, String), Int)]) extends Serializable {
    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val driver = "com.mysql.jdbc.Driver"
    val url = JDBCDefault.getOrElse("url", "")
    val user = JDBCDefault.getOrElse("user", "")
    val pwd = JDBCDefault.getOrElse("password", "")
    var connection: Connection = _
    var statement: Statement = _

    def insert2Mysql(sql: String): Unit = {
      dstream.foreachRDD(rdd => {
        rdd.foreachPartition(record => {
          try {
            Class.forName(driver)
            connection = DriverManager.getConnection(url, user, pwd)
            statement = connection.createStatement()
            record.foreach { r =>

              statement.executeUpdate(sql)
            }
          } catch {
            case e: Exception => e.printStackTrace()
          } finally {
            if (statement != null) {
              statement.close()
            }
            if (connection != null) {
              connection.close()
            }
          }
        })
      })
    }
  }

  import java.sql._
  class JDBCSink extends ForeachWriter[Row]{
    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val connP = JDBCTemplate.getConProperties
    val driver = "com.mysql.jdbc.Driver"
    val url = JDBCDefault.getOrElse("url", "")
    val user = JDBCDefault.getOrElse("user", "")
    val pwd = JDBCDefault.getOrElse("password", "")
    var connection: Connection = _
    var statement: Statement = _

    def open(partitionId: Long, version: Long): Boolean = {
      Class.forName(driver)
      connection = DriverManager.getConnection(url, user, pwd)
      statement = connection.createStatement()
      true
    }

    def process(value: Row): Unit = {
      value.toSeq
      //statement.executeUpdate(s"INSERT INTO ${value._1} VALUES (${value._2.mkString("\"", "\",\"", "\"")})")
    }

    def close(errorOrNull: Throwable): Unit = {
      connection.close()
    }
  }



  /**
    * 目前在插入数据库时，只有满足下列情况才会插入数据库，在以后回全部放开
    * 跟团游15，当地游16，酒店套餐17 ，度假酒店1，其他 null
    */
  val filterUsefulData = (df: DataFrame) =>
    df.filter(col("category_id") === 1 || col("category_id") === 15 || col("category_id") === 16
      || col("category_id") === 11 || col("category_id") === 12 || col("category_id") === 13
      || col("category_id") === 17 || col("category_id").isNull)
}
