package com.carol.bigdata

import com.carol.bigdata.utils.Flag
import com.typesafe.config.ConfigFactory
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.SparkConf

import java.lang.reflect.Field
//import org.apache.log4j.Logger


/*
 * 项目配置读取类
 * 统一管理
 */
object Config {

    private lazy val config = ConfigFactory.load()

    val games: String = Flag.GetString("games",  config.getString("games"))
    val gameList: Array[String] = games.split(",")
    val retentionLabels: String = Flag.GetString("retentionLabels",  config.getString("retentionLabels"))
    val retentionLabelList: Array[Int] = retentionLabels.split(",").map(x=>x.toInt)
    // TODO: spark config
    val logLevel: String = Flag.GetString("spark.log.level", config.getString("spark.log.level"))
    // local, yarn, yarn-cluster
    val sparkMaster: String = Flag.GetString("spark.master", "local[*]")
    val sparkAppName: String = Flag.GetString("spark.app.name", config.getString("spark.app.name"))
    val sparkSerializer: String = Flag.GetString("spark.serializer", config.getString("spark.serializer"))
    val HiveMetaStoreUris: String = Flag.GetString("hive.metastore.uris", config.getString("hive.metastore.uris"))
    val sparkWarehouseDir: String = Flag.GetString("spark.sql.warehouse.dir", config.getString("spark.sql.warehouse.dir"))
    val sparkConf: SparkConf = new SparkConf()
        .set("spark.serializer", sparkSerializer)
        .set("dfs.client.use.datanode.hostname", "true")
        .set("fs.hdfs.impl", classOf[org.apache.hadoop.hdfs.DistributedFileSystem].getName)
        .set("fs.file.impl", classOf[org.apache.hadoop.fs.LocalFileSystem].getName)
        .setAppName(sparkAppName)
    // 序列化开关设置为kryo，如果存在自定义类则需要.registerKryoClasses(Array(classOf[ProductClass]))
    if (sparkMaster.contains("local"))
        sparkConf.setMaster(sparkMaster)


    // TODO: Hbase config
    val hbaseRootDir: String = Flag.GetString("hbase.rootdir", config.getString("hbase.rootdir"))
    val hbaseZkQuorum: String = Flag.GetString("hbase.zookeeper.quorum", config.getString("hbase.zookeeper.quorum"))
    val hbaseZkZnodeParent: String = Flag.GetString("hbase.zookeeper.znode.parent", config.getString("hbase.zookeeper.znode.parent"))
    val hbaseParams: Map[String, String] = Map[String, String](
        "hbase.rootdir" -> hbaseRootDir,
        "hbase.zookeeper.quorum" -> hbaseZkQuorum,
        "zookeeper.znode.parent" -> hbaseZkZnodeParent
    )

    val conf: Configuration = HBaseConfiguration.create()
    val jobConf = new JobConf(conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])



    print()

    def print(): Unit = {
        println("==> 当前Config配置")
        val fields: Array[Field] = Config.getClass.getDeclaredFields
        for (filed <- fields) {
            filed.setAccessible(true)
            try {
                if (filed.getName != "config")
                    println(s"${filed.getName}:${filed.get(Config)}")
            } catch {
                case e: Exception => e.printStackTrace()
            }
        }
    }


    def main(args: Array[String]): Unit = {
        Flag.Parse(args)
        // 测试配置和默认配置是否生效
        println(Config.logLevel)
        println(Config.sparkConf)
        val test_log = 1
        try println(test_log + "100")
        catch {
            case e: Exception => e.printStackTrace()
            //errorLog.error(e.getMessage)

        }
    }
}
