package com.kay.es

import com.sksamuel.elastic4s.ElasticsearchClientUri
import com.sksamuel.elastic4s.http._
import org.slf4j.LoggerFactory
import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
  * Created by Administrator on 2018/6/12.
  */
object elastic4s2es {
  val logger = LoggerFactory.getLogger(this.getClass)
  def main(args: Array[String]): Unit = {
    import com.sksamuel.elastic4s.http.ElasticDsl._
    val url = "localhost"
    val port = 9200
    val client: HttpClient = HttpClient(ElasticsearchClientUri(url,port))
//    val result =  client.execute{
//      bulk(
//        indexInto("sparkes"/"info")
//      ).refresh(RefreshPolicy.WAIT_UNTIL)
//    }.await
//    println(result)

    // 查询所有
//    val resp = client.execute {
//      search("sparkes").matchAllQuery()
//    }.await
//    println(resp.hits.hits.length)
//
//
//    println("查询所有")
//    for ( i <- 0 until resp.hits.hits.length){
//       println(resp.hits.hits(i).sourceAsString)
//    }

    // 指定查询
//    val searchOne = client.execute{
//      search("sparkes").matchQuery("khh","1001001")
//    }.await
//

//    println("指定查询:" + searchOne.hits.hits.head.sourceAsString)

//        val searchOne = client.execute{
//          search("sparkes").matchQuery("age","20")
//        }.await
//    println("age=20: " + searchOne.hits.hits.length)
//    for (i <- 0 until searchOne.hits.hits.length){
//      println(searchOne.hits.hits(i).sourceAsString)
//    }


    // 字符串查询
//    val q = client.execute{
//      search("sparkes"/"info").query("20")
//    }.await
//    println("q:" + q.hits.hits.length)
//
//    for( i<- 0 until q.hits.hits.length){
//      println(q.hits.hits(i).sourceAsString)
//    }

    // create index   TimeoutException
//    client.execute {
//      createIndex("places") mappings (
//        mapping("city") as (
//           textField("cityname")
//          )
//        )
//    }.await

    //按 id get
//    val q = client.execute{
//      get(1001003).from("sparkes","info")
//    }.await
//    //
//    logger.info("按id get")
//    println("q:" + q.sourceAsString)

    // 按 id  获得 某些 列
//    val q = client.execute{
//      get(1001003).from("sparkes","info").fetchSourceInclude("khh","name")
//    }.await
//    //
//    logger.info("按id get")
//    println("q:" + q.sourceAsString)

//    val json = JSON.parseObject(q.sourceAsString)
//    val khh = json.get("khh")
//    val name = json.get("name")
//    println(s"khh:${khh}  name:" + name)


    // rdd 批量更新
// val updatestr = " update(1001001).in(\"sparkes\" / \"info\").docAsUpsert(\"name\" -> \"bbbaaa111\"),\n
//     update(1001003).in(\"sparkes\" / \"info\").docAsUpsert(\"name\" -> \"bbbaaa333\")"
//   update
//   client.execute{
//     bulk (
//       requests
//     )
//   }.await
    val conf: SparkConf = new SparkConf().setMaster("local[2]").setAppName("rddupdatees")
    val sc: SparkContext = new SparkContext(conf)
    val rdd: RDD[String] = sc.makeRDD(List("1001001|abc1001","1001003|abc1003"))
    val trdd: RDD[(String, String)] = rdd.map(t => {
      (t.split("\\|")(0), t.split("\\|")(1))
    })
    trdd.foreach(println(_))
    val ops = for( line <- trdd) yield {
      update(line._1).in("sparkes/info").docAsUpsert("name" -> line._2)
    }

    client.execute(bulk(ops.collect())).await
    sc.stop()
//    val q = client.execute{
//      get(1001003).from("sparkes","info").fetchSourceInclude("khh","name")
//    }.await
    //    println("q:" + q.sourceAsString)


//    client.execute{
//
//    }

    client.close()
  }
}
