package com.dmall.scf.utils

import java.util.Date

import org.apache.spark.sql.types._

import scala.collection.JavaConverters._

/**
 * @descrption StructType工具类
 * scf-spark
 * @author wangxuexing
 * @date 2020/1/20
 */
object StructTypeUtil {

  /**
   * 获取根据case class获取Spark StructType
   * @param clazz
   * @tparam T
   * @return
   */
  def getStructType[T](clazz: Class[T]): StructType = {
    val fields = clazz.getDeclaredFields
    val sparkFields = fields.map(x => {
      x.getType.getName match {
        case "int" => StructField(StringUtils.camelToUnderLine(x.getName), IntegerType, nullable = false)
        case "long" => StructField(StringUtils.camelToUnderLine(x.getName), LongType, nullable = false)
        case "float" => StructField(StringUtils.camelToUnderLine(x.getName), FloatType, nullable = false)
        case "double" => StructField(StringUtils.camelToUnderLine(x.getName), DoubleType, nullable = false)
        case "boolean" => StructField(StringUtils.camelToUnderLine(x.getName), BooleanType, nullable = false)
        case "short" => StructField(StringUtils.camelToUnderLine(x.getName), ShortType, nullable = false)
        case "char" => StructField(StringUtils.camelToUnderLine(x.getName), CharType(10), nullable = false)
        case "byte" => StructField(StringUtils.camelToUnderLine(x.getName), ByteType, nullable = false)
        case "java.lang.String" => StructField(StringUtils.camelToUnderLine(x.getName), StringType, nullable = false)
        case "scala.math.BigDecimal" => StructField(StringUtils.camelToUnderLine(x.getName), DecimalType(30, 15), nullable = false)
        case "java.util.Date" => StructField(StringUtils.camelToUnderLine(x.getName), DateType, nullable = false)
        case _ => throw new Exception("未找到对应类型")
      }
    }).toList
    StructType(sparkFields)
  }

  def main(args: Array[String]): Unit = {
    getStructType(classOf[A])
  }

  case class A(aB:Int, bs3_D:String, csD: BigDecimal, d2W:Long, esSc:Float, f:Double, g:Boolean, h:Short, i:Date, j:Char, k:Byte)
}
