import com.alibaba.fastjson.JSON
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

object Test7 {

  def main(args: Array[String]) {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("test_es")
      .config("es.nodes", "192.168.1.29")
      // .config("es.nodes", "192.168.2.247")
      .config("es.port", "9200")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .config("cluster.name", "O2OElastic")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").getOrCreate()
    var sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    val index = "2020_taobao/taobao_2020_7"
    var value1: RDD[String] = sc.esJsonRDD(index,
      """
        |{
        |  "query": {
        |    "bool": {
        |      "must": [
        |        {"match_phrase": {
        |          "fourthCategoryId": "10021020709"
        |        }}
        |      ],"must_not": [
        |        {"match_phrase": {
        |          "title": "罐"
        |        }},{"match_phrase": {
        |          "BaseInfo": "罐"
        |        }}
        |      ]
        |    }
        |  }
        |}
      """.stripMargin).values
    val frame = spark.read.json(value1)
    frame.toJSON.rdd.map(lines=>{
      val nObject = JSON.parseObject(lines)
      nObject.put("fourthCategoryId","10021020799")
      nObject
    }).saveToEs(index,Map("es.mapping.id"->"good_id"))




 }
}




