package com.kingsoft.dc.khaos.module.spark.enhance

import com.kingsoft.dc.khaos.module.spark.util.OracleUtils
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils._
import org.apache.spark.sql.execution.datasources.jdbc.{JdbcOptionsInWrite, JdbcRelationProvider, JdbcUtils}
import org.apache.spark.sql.sources.BaseRelation
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

class OracleRelationProvider  extends JdbcRelationProvider with  Logging{
  override def createRelation(
                               sqlContext: SQLContext,
                               mode: SaveMode,
                               parameters: Map[String, String],
                               df: DataFrame): BaseRelation = {
    val options = new JdbcOptionsInWrite(parameters)
    val isCaseSensitive = sqlContext.sparkSession.sessionState.conf.caseSensitiveAnalysis

    try {
      mode match {
        case SaveMode.Overwrite =>
          if (options.isTruncate && isCascadingTruncateTable(options.url) == Some(false)) {
            // In this case, we should truncate table and then load.
            val conn = JdbcUtils.createConnectionFactory(options)()
            truncateTable(conn, options)
            val tableSchema = JdbcUtils.getSchemaOption(conn, options)
            OracleUtils.close(conn)
            saveTable(df, tableSchema, isCaseSensitive, options)
          } else {
            // Otherwise, do not truncate the table, instead drop and recreate it
            val conn = JdbcUtils.createConnectionFactory(options)()
            dropTable(conn, options.table, options)
            createTable(conn, df, options)
            OracleUtils.close(conn)
            saveTable(df, Some(df.schema), isCaseSensitive, options)
          }
        case SaveMode.Append =>
          val conn = JdbcUtils.createConnectionFactory(options)()
          val tableSchema = JdbcUtils.getSchemaOption(conn, options)
          OracleUtils.close(conn)
          saveTable(df, tableSchema, isCaseSensitive, options)
        case SaveMode.ErrorIfExists =>
          throw new Exception(
            s"Table or view '${options.table}' already exists. " +
              s"SaveMode: ErrorIfExists.")
        case SaveMode.Ignore =>
        // With `SaveMode.Ignore` mode, if table already exists, the save operation is expected
        // to not save the contents of the DataFrame and to not change the existing data.
        // Therefore, it is okay to do nothing here and then just return the relation below.
      }
    } catch {
      case e: Throwable => {
        throw new Exception("save data by reflect function failed！", e)
      }
    }
    createRelation(sqlContext, parameters)
  }
}
