package com.kingsoft.dc.khaos.module.spark.util

import com.kingsoft.dc.khaos.module.spark.constants.Ks3Constants
import com.kingsoft.dc.khaos.module.spark.model.ks3.Ks3AccessConfig
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path

/**
  * Created by haorenhui on 2019/12/04.
  */
object HadoopKs3Utils {

    val PATH_DELIMITER:String = Path.SEPARATOR

    def keyToPath(key: String): String = {
        if (!key.startsWith(PATH_DELIMITER)) {
            s"/$key"
        }else{
            key
        }
    }

    /**
      * 加载默认配置
      * @param hadoopConf hadoop配置
      * @return
      */
    def loadDefault(hadoopConf: Configuration): Configuration = {
        hadoopConf.addResource(this.getClass.getClassLoader.getResourceAsStream(Ks3Constants.KS3_HADOOP_CONFIG_FILE))
        hadoopConf
    }


    def appendKs3HadoopConfigs(hadoopConf: Configuration, ks3AccessConfig: Ks3AccessConfig): Configuration = {
        val hadoopConfNew:Configuration = loadDefault(hadoopConf)
        //val hadoopConfNew:Configuration = hadoopConf
        val fsName:String = "ks3://" + ks3AccessConfig.getBucket
        hadoopConfNew.set(Ks3Constants.DEFAULT_FS, fsName)

        hadoopConfNew.set(Ks3Constants.FS_KS3_ACCESSKEY, ks3AccessConfig.getAccessKey)
        hadoopConfNew.set(Ks3Constants.FS_KS3_ACCESSSECRET, ks3AccessConfig.getSecretKey)
        hadoopConfNew.set(Ks3Constants.FS_KS3_ENDPOINT, ks3AccessConfig.getEndPoint)

        hadoopConfNew
    }


}
