import org.apache.spark.rdd.RDD
import org.apache.spark.sql.functions.{get_json_object, sum}
import org.apache.spark.sql.types.{DataTypes, IntegerType}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark._
import org.json4s.jackson.Json


object SparkReadES {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("esrdd")
    conf.setMaster("local[*]")
    // 配置es配置信息
    conf.set("es.nodes", "localhost")
    conf.set("es.port", "9200")
    conf.set("es.index.auto.create", "true")
    val sc = new SparkContext(conf)

    val spark = SparkSession.builder().config(conf).getOrCreate()
    import spark.implicits._

    val value: RDD[(String, collection.Map[String, AnyRef])] = sc.esRDD("myrow")
    val value1: RDD[String] = value.map(tuple => tuple._2.getOrElse("_row", "").toString)
    val df: DataFrame = value1.toDF("_row")

    df.show(10, false)

    val frame = df
      .select(
        get_json_object($"_row", "$.name").as("name"), //
        get_json_object($"_row", "$.age").cast(IntegerType).as("age")
      )
    frame.show(10, false)


    val frame1 = frame.groupBy($"name").agg(
      sum($"age").as("sumAge")
    ).select($"name", $"sumAge")

    frame1.show(10, false)

    spark.stop()


  }

}