package com.kingsoft.dc.khaos.module.spark.util

import com.kingsoft.dc.khaos.KhaosContext

import java.sql.{Date, Timestamp}
import java.util.regex.Pattern
import com.kingsoft.dc.khaos.module.spark.constants.ColumnType
import org.apache.spark.internal.Logging
import org.apache.spark.sql.Column
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.types._

import scala.collection.mutable.ArrayBuffer

/**
 *
 * @author gaosong3@kingsoft.com
 * */
object SparkJobHelper extends Logging {

  /**
   * 同步任务表达式常量处理
   *
   * @param sColName
   * @param colType
   * @param expression
   * @param argNameList
   * @return
   */
  def expressionConstantConvert(sColName: String,
                                colType: String,
                                expression: String,
                                argNameList: List[String]): Column = {
    val intPattern = Pattern.compile("^[-\\+]?[\\d]*$"); //123
    val floatPattern = Pattern.compile("([1-9]+[0-9]*|0)(\\.[\\d]+)?"); //123.23

    colType match {
      case ColumnType.NUMBER => {
        val isDigit = intPattern.matcher(expression.replaceAll(" ", "")).matches()
        if (isDigit) {
          lit(argNameList(0).replaceAll(" ", "").toLong).cast(LongType) as sColName
        } else {
          throw new Exception(s"常量转换为字段类型失败！ColumnType=$colType 表达式=$expression")
        }
      }
      case ColumnType.DATE => {
        //        val isDate = DateUtils.isValidDate(expression.replaceAll(" ",""))
        //        if (isDate) {
        //          lit(Date.valueOf(argNameList(0).replaceAll(" ",""))).cast(DateType) as sColName
        //        }else{
        //          throw new Exception(s"常量转换为字段类型失败！ColumnType=$colType 表达式=$expression")
        //        }
        lit(DataTypeConvertUtils.getDate(argNameList(0))).cast(DateType) as sColName
      }
      case ColumnType.DECIMAL => {
        val isFloat = floatPattern.matcher(expression.replaceAll(" ", "")).matches()
        if (isFloat) {
          lit(argNameList(0).replaceAll(" ", "").toDouble).cast(DoubleType) as sColName
        } else {
          throw new Exception(s"常量转换为字段类型失败！ColumnType=$colType 表达式=$expression")
        }
      }
      case ColumnType.TIME => {
        val isTime = DateUtils.isValidTime(expression.replaceAll(" ", ""))
        if (isTime) {
          lit(argNameList(0).replaceAll(" ", "")).cast(StringType) as sColName
        } else {
          throw new Exception(s"常量转换为字段类型失败！ColumnType=$colType 表达式=$expression")
        }
      }
      case ColumnType.DATETIME => {
        //        val isDatetime = DateUtils.isValidDateTime(expression.trim)
        //        if (isDatetime) {
        //          lit(Timestamp.valueOf(argNameList(0).trim)).cast(TimestampType) as sColName
        //        }else{
        //          throw new Exception(s"常量转换为字段类型失败！ColumnType=$colType 表达式=$expression")
        //        }
        lit(DataTypeConvertUtils.getDateTime(argNameList(0))).cast(DateType) as sColName
      }
      case _ => { //其他情况统一转为字符串
        lit(argNameList(0).trim).cast(StringType) as sColName
      }
    }
  }

  /**
   * 构建数据管理数据类型和spark数据类型的映射关系
   *
   * @param dmColType 数据管理数据类型
   * @return
   */
  def dmDataType2SparkDataType(dmColType: String) = {
    val colType = dmColType.trim.toUpperCase
    colType match {
      case ColumnType.STRING => StringType
      case ColumnType.NUMBER => LongType
      case ColumnType.DATE => DateType
      case ColumnType.DECIMAL => DoubleType
      case ColumnType.TIME => StringType
      case ColumnType.DATETIME => TimestampType
      case _ => NullType
    }
  }

  /**
   * 构建数据管理数据类型和spark数据类型的映射关系
   *
   * @param columnArr <字段名称，字段类型>
   * @return
   */
  def dynamicBuildDFSchema(columnArr: ArrayBuffer[(String, String)]): org.apache.spark.sql.types.StructType = {

    val fields = columnArr.map(
      col => {
        val colType = col._2.trim.toUpperCase
        colType match {
          case ColumnType.STRING => StructField(col._1, StringType, nullable = true)
          case ColumnType.NUMBER => StructField(col._1, LongType, nullable = true)
          case ColumnType.DATE => StructField(col._1, DateType, nullable = true)
          case ColumnType.DECIMAL => StructField(col._1, DoubleType, nullable = true)
          case ColumnType.TIME => StructField(col._1, StringType, nullable = true)
          case ColumnType.DATETIME => StructField(col._1, TimestampType, nullable = true)
          case _ => StructField(col._1, NullType, nullable = true)
        }
      }).toArray
    val schema = org.apache.spark.sql.types.StructType(fields)
    schema
  }

  /**
   * //TODO 2020/12/10 gaosong: 污染了khaos框架,在cos->es时,cos浮点类型字段强转string ，解决写入es精度丢失问题
   *
   * @param columnArr <字段名称，字段类型>
   * @return
   */
  def dynamicBuildDFSchemaForES(columnArr: ArrayBuffer[(String, String)]): org.apache.spark.sql.types.StructType = {
    val fields = columnArr.map(
      col => {
        val colType = col._2.trim.toUpperCase
        colType match {
          case ColumnType.STRING => StructField(col._1, StringType, nullable = true)
          case ColumnType.NUMBER => StructField(col._1, LongType, nullable = true)
          case ColumnType.DATE => StructField(col._1, DateType, nullable = true)
          case ColumnType.DECIMAL => StructField(col._1, StringType, nullable = true)
          case ColumnType.TIME => StructField(col._1, StringType, nullable = true)
          case ColumnType.DATETIME => StructField(col._1, TimestampType, nullable = true)
          case _ => StructField(col._1, NullType, nullable = true)
        }
      }).toArray
    val schema = org.apache.spark.sql.types.StructType(fields)
    schema
  }

  /**
   * * 构建数据管理数据类型和spark数据类型的映射关系
   *
   * @param columnArr     <字段名称，字段类型>
   * @param columnSizeArr <字段名称，数据长度>
   * @return
   */
  def dynamicBuildDFSchema(columnArr: ArrayBuffer[(String, String)], columnSizeArr: ArrayBuffer[(String, String)]): org.apache.spark.sql.types.StructType = {
    val columnSizeMap = columnSizeArr.toMap
    logInfo(s"=>>> field size mapping:$columnSizeMap")
    val fields = columnArr.map(
      col => {
        val colType = col._2.trim.toUpperCase
        colType match {
          case ColumnType.STRING => StructField(col._1, StringType, nullable = true)
          case ColumnType.NUMBER => StructField(col._1, LongType, nullable = true)
          case ColumnType.DATE => StructField(col._1, DateType, nullable = true)
          case ColumnType.DECIMAL => {
            val length = columnSizeMap.get(col._1).get.split(",")
            if (length(0).toInt > 38 || length(1).toInt > 23) { //spark支持的decimal类型是DecimalType,spark底层已经校验了最大精度范围，小数范围如果超过23会导致数据转换失败，结果为null
              throw new Exception(s"=> Decimal datatype can only support precision up to (38,23) ")
            }
            StructField(col._1, DecimalType(length(0).toInt, length(1).toInt), nullable = true)
            //            StructField(col._1, DecimalType(38, 23), nullable = true)
          }
          case ColumnType.TIME => StructField(col._1, StringType, nullable = true)
          case ColumnType.DATETIME => StructField(col._1, TimestampType, nullable = true)
          case _ => StructField(col._1, NullType, nullable = true)
        }
      }).toArray
    val schema = org.apache.spark.sql.types.StructType(fields)
    schema
  }  

  /**
   * 构建数据管理数据类型和spark数据类型的映射关系(数据检核)
   *
   * @param columnArr <字段名称，字段类型>
   * @return
   */
  def dynamicBuildDFSchemaForTechcheck(columnArr: ArrayBuffer[(String, String)]): org.apache.spark.sql.types.StructType = {

    val fields = columnArr.map(
      col => {
        val colType = col._2.trim.toUpperCase
        colType match {
          case ColumnType.STRING => StructField(col._1, StringType, nullable = true)
          case ColumnType.NUMBER => StructField(col._1, StringType, nullable = true)
          case ColumnType.DECIMAL => StructField(col._1, StringType, nullable = true)
          case ColumnType.DATE => StructField(col._1, StringType, nullable = true)
          case ColumnType.TIME => StructField(col._1, StringType, nullable = true)
          case ColumnType.DATETIME => StructField(col._1, StringType, nullable = true)
          case _ => StructField(col._1, NullType, nullable = true)
        }
      }).toArray
    val schema = org.apache.spark.sql.types.StructType(fields)
    schema
  }

  /*  /**
      * 统一将接到的df数据类型转换为String
      * @param columnArr
      * @return
      */
    def buildDFStringSchema(columnArr: ArrayBuffer[(String, String)]): org.apache.spark.sql.types.StructType = {
      val fields = columnArr.map(
        col => {
          val colType = col._2.trim.toUpperCase
          colType match {
            case ColumnType.STRING => StructField(col._1, StringType, nullable = true)
            case ColumnType.NUMBER => StructField(col._1, StringType, nullable = true)
            case ColumnType.DATE => StructField(col._1, StringType, nullable = true)
            case ColumnType.DECIMAL => StructField(col._1, StringType, nullable = true)
            case ColumnType.TIME => StructField(col._1, StringType, nullable = true)
            case ColumnType.DATETIME => StructField(col._1, StringType, nullable = true)
            case _ => StructField(col._1, NullType, nullable = true)
          }
        }).toArray
      val schema = org.apache.spark.sql.types.StructType(fields)
      schema
    }*/

}