package com.xyz.scalamodule
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hdfs.HAUtil
import org.apache.spark.sql.{DataFrame, SparkSession}
class HiveTest {
  def doTask(args: Array[String]): Unit = {
    val address = getHdfsActiveNode()
    val sparksession = SparkSession
      .builder()
      .master("local[*]")
//            .appName(SparkConfig.appName)
      .config("hive.metastore.uris", "thrift://10.195.185.31:9083")
      .config("spark.sql.warehouse.dir", s"""hdfs://$address/user/hive/warehouse""")
      .config("spark.sql.broadcastTimeout", "36000")
      .enableHiveSupport()
      .getOrCreate()

    val sql = s"""select * from (select *,row_number() over(partition by a order by b asc) rn_min,
                 |row_number() over(partition by a order by b desc) rn_max from t2 ) tmp
                 |where rn_min<=2 or rn_max <=2""".stripMargin

    var sql1 = s"""select tmp.a,min(if(rn_min = 2, c, null)) as min_c, max(if(rn_max = 2, c, null)) as max_c
                  |from (select *,row_number() over(partition by a order by b asc) rn_min,
                  |row_number() over(partition by a order by b desc) rn_max from t2 ) tmp
                  |where tmp.rn_min = 2 or tmp.rn_max = 2  group by tmp.a""".stripMargin

    val sourceDf: DataFrame = sparksession.sql(sql)
    sourceDf.show()

    import sparksession.implicits._
    var resultDf: DataFrame = sourceDf.map(row => {
      Tmp(
        row.getString(0),
        row.getString(1),
        row.getString(2)
      )
    }).toDF()

    resultDf.show()

    val sourceDf2: DataFrame = sparksession.sql(sql1)
    sourceDf2.show()
  }

  /**
   * 方法功能：获取Hdfs的Active节点
   * @return Hdfs的Active节点
   */
  def getHdfsActiveNode(): String = {
    // 加载hadoop配置
    val configuration: Configuration = new Configuration()
    configuration.addResource("core-site.xml")
    println(" xxx:"+FileSystem.get(configuration).getUri.getPath)
    val info = HAUtil.getAddressOfActive(FileSystem.get(configuration))
    // active节点的地址
    info.getAddress.getHostAddress + ":" + info.getPort
  }
}

case class Tmp(
                a:String,
                b:String,
                c:String
              )

