package com.leal.elasticsearch

import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.elasticsearch.spark.rdd.EsSpark

import scala.collection.mutable

object HiveToEs {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName("HiveToEs")
      .enableHiveSupport()
      .getOrCreate()

    val frame: DataFrame = spark.sql("select * from test.test_hive")

    val mappingId: String = "id"
    val indexName: String = "test_hive"
    if (mappingId == null) {
      EsSpark.saveToEs(
        frame.rdd.map((row: Row) =>
          getResultMap(row)
        ),
        s"$indexName/doc"
      )
    } else {
      EsSpark.saveToEsWithMeta(
        frame.rdd.map((row: Row) => (row.getAs[String](spark.sparkContext.broadcast(mappingId).value),
          getResultMap(row))
        ), s"$indexName/doc"
      )
    }

  }

  // 将row转换为Map
  private def getResultMap(row: Row): mutable.Map[String, Any] = {
    val map: mutable.Map[String, Any] = mutable.Map[String, Any]()
    val size: Int = row.size
    // 需要对字段的数据类型进行进一步判断处理
    val types: Array[String] = row.schema.fieldNames

    for (i <- 0 until size if row.get(i) != null && row.get(i).toString.trim.nonEmpty && !"已下线".equals(row.getAs[String](i))) {
      map.put(row.schema.fieldNames(i), row.getAs[Any](i))
    }
    map
  }
}