/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 * 
 *   http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
 
package org.apache.linkis.engineplugin.spark.cs

import org.apache.linkis.common.utils.Logging
import org.apache.linkis.cs.client.utils.ContextServiceUtils
import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import org.apache.linkis.engineplugin.spark.extension.SparkPreExecutionHook
import org.apache.linkis.hadoop.common.conf.HadoopConf.KEYTAB_PROXYUSER_SUPERUSER
import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel

import javax.annotation.PostConstruct
import org.springframework.stereotype.Component


@Component
class CSSparkPreExecutionHook extends SparkPreExecutionHook with Logging{

  @PostConstruct
  def  init(): Unit ={
    SparkPreExecutionHook.register(this)
  }



  override def hookName: String = "CSSparkPreExecutionHook"

  override def callPreExecutionHook(engineExecutionContext: EngineExecutionContext, code: String): String = {

    var parsedCode = code
    val contextIDValueStr = ContextServiceUtils.getContextIDStrByMap(engineExecutionContext.getProperties)
    val nodeNameStr = ContextServiceUtils.getNodeNameStrByMap(engineExecutionContext.getProperties)
    logger.info(s"Start to call CSSparkPreExecutionHook,contextID is $contextIDValueStr, nodeNameStr is $nodeNameStr")
    parsedCode = try {
      CSTableParser.parse(engineExecutionContext, parsedCode, contextIDValueStr, nodeNameStr)
    } catch {
      case t: Throwable =>
        logger.info("Failed to parser cs table", t)
        parsedCode
    }

    // if (spark.catalog.tableExists("101ppt.BDC_cross_table")) {
    //  tmp4.withColumn("qualitis_partition_key", lit("20220930")).write.mode("overwrite").insertInto("101ppt.BDC_cross_table");
    // } else {
    //  tmp4.withColumn("qualitis_partition_key", lit("20220930")).write.mode("append").partitionBy("qualitis_partition_key").format("hive").saveAsTable("101ppt.BDC_cross_table");
    // }
    if (parsedCode.contains("spark.catalog.tableExists") && parsedCode.contains("withColumn(\"qualitis_partition_key\"")) {
      val principal = KEYTAB_PROXYUSER_SUPERUSER.getValue
      val user = engineExecutionContext.getLabels.find(_.isInstanceOf[UserCreatorLabel]).map {case label: UserCreatorLabel => label.getUser}.getOrElse(principal);
      logger.info(s"before replace code: $parsedCode")
      parsedCode = parsedCode.replace(user + ".", principal + ".")
      logger.info(s"after replace code: $parsedCode")
      logger.info(s"Finished to replace code: $user -> $principal ")
    }
    logger.info(s"Finished to call CSSparkPreExecutionHook,contextID is $contextIDValueStr, nodeNameStr is $nodeNameStr")
    parsedCode
  }
}
