package com.kingsoft.dc.khaos.dsl.utils

import java.sql.{Connection, DriverManager, Statement}
import java.util.Properties
import java.util.regex.Pattern

import com.kingsoft.dc.khaos.dsl.spark.udf.constants.UdfInfo
import com.kingsoft.dc.khaos.dsl.spark.udf.model.{Ks3Udf, UdfDefine}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.udf

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

/**
  * @author gaosong3@kingsoft.com
  *         2019/3/14
  */
object UdfUtils extends Logging {

  private var udfList: List[(String, List[String])] = List[(String, List[String])]()

  def callMethod[T](className: String, methodName: String) = {
    var clazz: Class[_] = null
    try {
      clazz = Class.forName(className)
      val obj = clazz.getMethod(methodName).invoke(clazz.newInstance)
      obj.asInstanceOf[T]
    } catch {
      case e => {
        e.printStackTrace()
      }
    }
  }

  /**
    * 注册hive udf
    *
    * @param sparkSession
    * @param udfList
    */
  def registerHiveUdf(sparkSession: SparkSession, udfList: List[UdfInfo]): Unit = {
    //      sparkSession.sparkContext.addJar(udf.getJarAbsolutePath)
    for (udf <- udfList) {
      sparkSession.sqlContext.sql(s"add jar ${udf.getJarAbsolutePath}")
      sparkSession.sqlContext.sql(s"create temporary function ${udf.getUdfName} as '${udf.getClassName}'")
    }
    logInfo(s"===>register hive udf successed! udfList=${udfList} ")
  }

  /**
    * JDBC方式注册hive udf
    *
    * @param sparkSession
    * @param udfList
    */
  def registerHiveUdfByJDBC(sparkSession: SparkSession, jdbcprops: Properties, udfList: List[UdfInfo]): Unit = {
    val driver = "org.apache.hive.jdbc.HiveDriver"
    var conn: Connection = null
    var stmt: Statement = null
    try {
      Class.forName(driver)
      conn = DriverManager.getConnection(jdbcprops.getProperty("url"),
        jdbcprops.getProperty("user"),
        jdbcprops.getProperty("password"))

      val stmt = conn.createStatement()
      for (udf <- udfList) {
        stmt.execute(s"add jar ${udf.getJarAbsolutePath}")
        stmt.execute(s"create temporary function ${udf.getUdfName} as '${udf.getClassName}'")
      }
    } catch {
      case e => {
        e.printStackTrace()
        logInfo(s"hive connect failed! url=[${jdbcprops.getProperty("url", "")}] " +
          s"user=[${jdbcprops.getProperty("user", "")}]")
      }
    } finally {
      if (stmt != null) stmt.close()
      if (conn != null) conn.close()
    }
  }

  /**
    * 注册默认内置函数
    *
    * @param sparkSession
    */
  def registerUDF(sparkSession: SparkSession): Unit = {

    //功能性函数
    val md5 = udf(UdfDefine.md5EncryptUDF _)
    val base64encrypt = udf(UdfDefine.base64EncryptUDF _)
    val base64decrypt = udf(UdfDefine.base64DecryptUDF _)
    val aes_encrypt = udf(UdfDefine.aesEncryptUDF _)
    val aes_decrypt = udf(UdfDefine.aesDecryptUDF _)
    val data_hide = udf(UdfDefine.dataHideUDF _)
    val half_data_hide = udf(UdfDefine.halfDataHideUDF _)

    //日期函数
    val from_unixtime = udf(UdfDefine.fromUnixTimeUDF _)
    val current_timestamp = udf(UdfDefine.currentTimestampUDF _)
    val current_timestampms = udf(UdfDefine.currentTimestampMsUDF _)

    val unix_timestamp = udf(UdfDefine.unixTimestampUDF _)
    val unix_timestampms = udf(UdfDefine.unixTimestampMsUDF _)

    val unix_timestamp_pattern = udf(UdfDefine.unixTimestampPatternUDF _)
    val unix_timestampms_pattern = udf(UdfDefine.unixTimestampMsPatternUDF _)
    val unix_timestampmsplus_pattern = udf(UdfDefine.unixTimestampMsPlusPatternUDF _)

    val to_date = udf(UdfDefine.toDateUDF _)
    val to_date2 = udf(UdfDefine.toDate2UDF _)
    val year = udf(UdfDefine.yearUDF _)
    val quarter = udf(UdfDefine.quarterUDF _)
    val month = udf(UdfDefine.monthUDF _)

    //字符函数
    val ascii = udf(UdfDefine.asciiUDF _)
    val length = udf(UdfDefine.lengthUDF _)
    val trim_all = udf(UdfDefine.trimAllUDF _)
    val trim = udf(UdfDefine.trimUDF _)
    val reverse = udf(UdfDefine.reverseUDF _)
    val toupper = udf(UdfDefine.toUpperUDF _)
    val tolower = udf(UdfDefine.toLowerUDF _)
    val substr = udf(UdfDefine.subStrUDF _)
    val substr_later = udf(UdfDefine.subStrLaterUDF _)
    val substr_before = udf(UdfDefine.subStrBeforeUDF _)
    val substr_mid = udf(UdfDefine.subStrMidUDF _)
    val substr_mid2 = udf(UdfDefine.subStrMid2UDF _)
    val concat = udf(UdfDefine.concatUDF _)
    val concat_separator = udf(UdfDefine.concatSeparatorUDF _)

    // 数学函数
    val abs = udf(UdfDefine.absUDF _)
    val add = udf(UdfDefine.addUDF _)
    val bround = udf(UdfDefine.broundUDF _)
    val bround_bits = udf(UdfDefine.broundBitsUDF _)
    val div = udf(UdfDefine.divUDF _)
    // val greatest = udf(UdfDefine.greatestUDF _)
    // val least = udf(UdfDefine.leastUDF _)
    val mul = udf(UdfDefine.mulUDF _)
    val negative_double = udf(UdfDefine.negativeDoubleUDF _)
    val negative_int = udf(UdfDefine.negativeIntUDF _)
    val pmod_double = udf(UdfDefine.pmodDoubleUDF _)
    val pmod_int = udf(UdfDefine.pmodIntUDF _)
    val round = udf(UdfDefine.roundUDF _)
    val round_bits = udf(UdfDefine.roundBitsUDF _)
    val sub = udf(UdfDefine.subUDF _)

    // 条件函数
    val isnotnull = udf(UdfDefine.isNotNullUDF _)
    val isnull = udf(UdfDefine.isNullUDF _)

    // 类型转换函数
    val any2str = udf(UdfDefine.any2StrUDF _)
    val double2float = udf(UdfDefine.double2FloatUDF _)
    val double2long = udf(UdfDefine.double2LongUDF _)
    val float2double = udf(UdfDefine.float2DoubleUDF _)
    val float2Int = udf(UdfDefine.float2IntUDF _)
    val float2long = udf(UdfDefine.float2LongUDF _)
    val int2double = udf(UdfDefine.int2DoubleUDF _)
    val int2float = udf(UdfDefine.int2FloatUDF _)
    val int2long = udf(UdfDefine.int2LongUDF _)
    val long2double = udf(UdfDefine.long2DoubleUDF _)
    val long2Int = udf(UdfDefine.long2IntUDF _)
    val str2boolean = udf(UdfDefine.str2BooleanUDF _)
    val str2char = udf(UdfDefine.str2CharUDF _)
    val str2double = udf(UdfDefine.str2DoubleUDF _)
    val str2float = udf(UdfDefine.str2FloatUDF _)
    val str2Int = udf(UdfDefine.str2IntUDF _)
    val str2long = udf(UdfDefine.str2LongUDF _)
    val str2short = udf(UdfDefine.str2ShortUDF _)

    // 字符集转换函数
    val gbk_to_utf8 = udf(UdfDefine.GBKToUTF8UDF _)
    val iso_to_gbk = udf(UdfDefine.ISOToGBKUDF _)
    val recode = udf(UdfDefine.recodeUDF _)
    val uft8_to_gbk = udf(UdfDefine.UTF8ToGBKUDF _)
    val unicode_to_utf8 = udf(UdfDefine.UnicodeToUTF8UDF _)
    val utf8_to_unicode = udf(UdfDefine.UTF8ToUnicodeUDF _)

    // ks3数据迁移新增udf
    val coverTimeFormatLong = udf(Ks3Udf.coverTimeFormatLong _)
    val coverTimeFormat = udf(Ks3Udf.coverTimeFormat _)
    val get_split_size = udf(Ks3Udf.get_split_size _)
    val get_split_value = udf(Ks3Udf.get_split_value _)
    val get_second = udf(Ks3Udf.get_second _)
    val get_minute = udf(Ks3Udf.get_minute _)
    val get_code = udf(Ks3Udf.get_code _)
    val get_json = udf(Ks3Udf.get_json _)
    val custom = udf(Ks3Udf.custom _)
    val concat_plus = udf(Ks3Udf.concat_plus _)
    val concat_custom = udf(Ks3Udf.concat_custom _)

    val getLogLength = udf(Ks3Udf.getLogLength _)
    val parseToMinute = udf(Ks3Udf.parseToMinute _)
    val getDefaultValue = udf(Ks3Udf.getDefaultValue _)
    val get5Min = udf(Ks3Udf.get5Min _)
    val getSecond = udf(Ks3Udf.getSecond _)
    val getMin = udf(Ks3Udf.getMin _)



    // 注册新增udf
    sparkSession.udf.register("coverTimeFormatLong", coverTimeFormatLong)
    sparkSession.udf.register("coverTimeFormat", coverTimeFormat)
    sparkSession.udf.register("get_split_size", get_split_size)
    sparkSession.udf.register("get_split_value", get_split_value)
    sparkSession.udf.register("get_second", get_second)
    sparkSession.udf.register("get_minute", get_minute)
    sparkSession.udf.register("get_json", get_json)
    sparkSession.udf.register("get_code", get_code)
    sparkSession.udf.register("custom", custom)
    sparkSession.udf.register("concat_plus", concat_plus)
    sparkSession.udf.register("concat_custom", concat_custom)

    sparkSession.udf.register("getLogLength", getLogLength)
    sparkSession.udf.register("parseToMinute", parseToMinute)
    sparkSession.udf.register("getDefaultValue", getDefaultValue)
    sparkSession.udf.register("get5Min", get5Min)
    sparkSession.udf.register("getMin", getMin)
    sparkSession.udf.register("getSecond", getSecond)



    // 注册udf
    sparkSession.udf.register("md5", md5)
    sparkSession.udf.register("base64encrypt", base64encrypt)
    sparkSession.udf.register("base64decrypt", base64decrypt)
    sparkSession.udf.register("aes_encrypt", aes_encrypt)
    sparkSession.udf.register("aes_decrypt", aes_decrypt)
    sparkSession.udf.register("data_hide", data_hide)
    sparkSession.udf.register("half_data_hide", half_data_hide)

    sparkSession.udf.register("from_unixtime", from_unixtime)
    sparkSession.udf.register("current_timestamp", current_timestamp)
    sparkSession.udf.register("current_timestampms", current_timestampms)
    sparkSession.udf.register("unix_timestamp", unix_timestamp)
    sparkSession.udf.register("unix_timestampms", unix_timestampms)
    sparkSession.udf.register("unix_timestamp_pattern", unix_timestamp_pattern)
    sparkSession.udf.register("unix_timestampms_pattern", unix_timestampms_pattern)
    sparkSession.udf.register("unix_timestampmsplus_pattern", unix_timestampmsplus_pattern)
    sparkSession.udf.register("to_date", to_date)
    sparkSession.udf.register("to_date2", to_date2)
    sparkSession.udf.register("year", year)
    sparkSession.udf.register("quarter", quarter)
    sparkSession.udf.register("month", month)

    sparkSession.udf.register("ascii", ascii)
    sparkSession.udf.register("length", length)
    sparkSession.udf.register("trim_all", trim_all)
    sparkSession.udf.register("trim", trim)
    sparkSession.udf.register("reverse", reverse)
    sparkSession.udf.register("toupper", toupper)
    sparkSession.udf.register("tolower", tolower)

    sparkSession.udf.register("abs", abs)
    sparkSession.udf.register("add", add)
    sparkSession.udf.register("bround", bround)
    sparkSession.udf.register("bround_bits", bround_bits)
    sparkSession.udf.register("div", div)
    // sparkSession.udf.register("greatest", greatest)
    // sparkSession.udf.register("least", least)
    sparkSession.udf.register("mul", mul)
    sparkSession.udf.register("negative_double", negative_double)
    sparkSession.udf.register("negative_int", negative_int)
    sparkSession.udf.register("pmod_double", pmod_double)
    sparkSession.udf.register("pmod_int", pmod_int)
    sparkSession.udf.register("round", round)
    sparkSession.udf.register("round_bits", round_bits)
    sparkSession.udf.register("sub", sub)

    sparkSession.udf.register("isnotnull", isnotnull)
    sparkSession.udf.register("isnull", isnull)

    sparkSession.udf.register("any2str", any2str)
    sparkSession.udf.register("double2float", double2float)
    sparkSession.udf.register("double2long", double2long)
    sparkSession.udf.register("float2double", float2double)
    sparkSession.udf.register("float2Int", float2Int)
    sparkSession.udf.register("float2long", float2long)
    sparkSession.udf.register("int2double", int2double)
    sparkSession.udf.register("int2float", int2float)
    sparkSession.udf.register("int2long", int2long)
    sparkSession.udf.register("long2double", long2double)
    sparkSession.udf.register("long2Int", long2Int)
    sparkSession.udf.register("str2boolean", str2boolean)
    sparkSession.udf.register("str2char", str2char)
    sparkSession.udf.register("str2double", str2double)
    sparkSession.udf.register("str2float", str2float)
    sparkSession.udf.register("str2Int", str2Int)
    sparkSession.udf.register("str2long", str2long)
    sparkSession.udf.register("str2short", str2short)

    sparkSession.udf.register("gbk_to_utf8", gbk_to_utf8)
    sparkSession.udf.register("iso_to_gbk", iso_to_gbk)
    sparkSession.udf.register("recode", recode)
    sparkSession.udf.register("uft8_to_gbk", uft8_to_gbk)
    sparkSession.udf.register("unicode_to_utf8", unicode_to_utf8)
    sparkSession.udf.register("utf8_to_unicode", utf8_to_unicode)

    sparkSession.udf.register("substr", substr)
    sparkSession.udf.register("substr_later", substr_later)
    sparkSession.udf.register("substr_before", substr_before)
    sparkSession.udf.register("substr_mid", substr_mid)
    sparkSession.udf.register("substr_mid2", substr_mid2)
    sparkSession.udf.register("concat", concat)
    sparkSession.udf.register("concat_separator", concat_separator)


  }

  // 注册另一套处理逻辑的udf
  def registerUDFCustom() = {
    val map = mutable.HashMap[String, String]()
    map.put("get_IpState", "1")
    map.put("get_IpProvince", "2")
    map.put("get_IpCity", "3")
    map.put("get_IpOperators", "4")
    map.toMap
  }

  /**
    * 从表达式中提取 需要转换udf名称及参数
    *
    * @param expression
    * @return
    */
  def extractUDF(expression: String): (String, List[String]) = {
    //    logInfo(s"expression:${expression}")
    val pattern1 = Pattern.compile("[\\w]+\\([^\\(\\)]*\\)") //udf格式：udf()或udf(字符,字符 ...)
    val pattern2 = Pattern.compile("('.+')?(\".+\")?([\\w]+)?") //"任意字符" 或'任意字符' 或 aa_12 注：任意字符需要前端支持
    val isUdf = pattern1.matcher(expression.trim).matches()
    val isConstants = pattern2.matcher(expression.trim).matches()

    val abs = new ArrayBuffer[String]()
    if (isUdf) { //符合udf格式
      val udfName = StringUtils.substringBefore(expression.trim, "(")
      val argStr = StringUtils.substringBetween(expression.trim, "(", ")").trim
      // 20200526 使用逗号切割,单引号或者双引号内的逗号不切割
      var argList = argStr.split(",(?=([^\\\']*\\\'[^\\\']*\\\')*[^\\\']*$)", -1)
      if (argStr.contains("\"")) {
        argList = argStr.split(",(?=([^\\\"]*\\\"[^\\\"]*\\\")*[^\\\"]*$)", -1)
      }
      for (i <- 0 until argList.length) {
        argList(i) = argList(i).trim //排除参数可能带空格的情况
      }
      (udfName, argList.toList)
    } else if (isConstants) { //常量处理 "ab_1" 或者 'ab_1' 或者 ab_1
      var s = ""
      if (expression.startsWith("\"")) { // "abc"
        s = StringUtils.substring(expression, expression.trim.indexOf("\"") + 1, expression.trim.lastIndexOf("\"")).trim
        abs.append(s)
      } else if (expression.startsWith("\'")) { //'abc'
        s = StringUtils.substring(expression, expression.trim.indexOf("\'") + 1, expression.trim.lastIndexOf("\'")).trim
        abs.append(s)
      } else {
        abs.append(expression.trim)
      }
      (null, abs.toList)
    } else {
      throw new Exception(s"不支持该表达式！expression=${expression}")
    }

    /*
      val abs = new ArrayBuffer[String]()
      if (!expression.trim.startsWith("(") && expression.endsWith(")")) { // md5(columnName)
          val udfName = StringUtils.substringBefore(expression, "(").trim
          val argStr = StringUtils.substringBetween(expression, "(", ")")

          // 20200526 使用逗号切割,单引号或者双引号内的逗号不切割
          var argList = argStr.trim.split(",(?=([^\\\']*\\\'[^\\\']*\\\')*[^\\\']*$)", -1)

          if (argStr.contains("\"")) {
            argList = argStr.trim.split(",(?=([^\\\"]*\\\"[^\\\"]*\\\")*[^\\\"]*$)", -1)
          }

          for (i <- 0 until argList.length) {
            argList(i) = argList(i).trim
          }
          (udfName, argList.toList)
        } else { // "abc" 或者 'abc' 或者 '123' 或者123
          abs.append(expression.replace("'", ""))
          (null, abs.toList)
        }*/
  }
}
