package com.kingsoft.dc.khaos.module.spark.util

import java.sql.Types

import org.apache.spark.sql.jdbc.{JdbcDialect, JdbcType}
import org.apache.spark.sql.types._

/**
  *
  * create by gs 20-3-12 
  *
  */
object HiveDialect extends JdbcDialect {
  override def canHandle(url: String): Boolean = url.startsWith("jdbc:hive2")

  //  override def quoteIdentifier(fieldName: String): String = s"`$fieldName`"

  override def quoteIdentifier(fieldName: String): String = {
    if (fieldName.contains(".")) { //tableName.colName
      val col: Array[String] = fieldName.split("\\.", -1)
      s"`${col(1)}`"
    } else {
      s"`$fieldName`"
    }
  }

/*    override def getJDBCType(dt: DataType): Option[JdbcType] = dt match {
      case StringType => Option(JdbcType("STRING", Types.VARCHAR))
      case TimestampType => Option(JdbcType("TIMESTAMP", Types.DATE))
      case DateType => Option(JdbcType("DATE", Types.DATE))
      case DoubleType => Option(JdbcType("DOUBLE", Types.DOUBLE))
      case LongType => Option(JdbcType("BIGINT", Types.BIGINT))
      case IntegerType => Option(JdbcType("INT", Types.INTEGER))

      case ByteType => Option(JdbcType("TINYINT", Types.TINYINT))
      case ShortType => Option(JdbcType("SMALLINT", Types.SMALLINT))
      case FloatType => Option(JdbcType("FLOAT", Types.FLOAT))
      case BooleanType => Option(JdbcType("BOOLEAN", Types.BOOLEAN))
      case _ => None
    }*/
}
