import cn.tecnova.bean.BaAnalysisBean
import cn.tecnova.utils.{ConfigHandler, JedisPools}
import com.google.gson.Gson
import org.apache.commons.pool2.impl.GenericObjectPoolConfig
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark.rdd.EsSpark
import redis.clients.jedis.JedisPool

/**
  * description:es测试
  **/
object TestES {

  Logger.getLogger("org").setLevel(Level.ERROR)

  import org.elasticsearch.spark._

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("estest").setMaster("local[*]")

    conf.set("es.nodes", "192.168.100.3")
    conf.set("es.port", "9200")
    //    conf.set("es.query", " {  \"query\": {    \"match_all\": {    }  }}") //使用query字符串对结果进行过滤
    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)
    import sQLContext.implicits._
    //    val s = "户2"


//
    val rdd: RDD[(String, String)] = EsSpark.esJsonRDD(sc, "fin_insur_claims_threecar/fin_insur_claims_threecar","{\"query\":{\"bool\":{\"must\":[{\"term\":{\"threeDriverName.keyword\":\"武艾颖\"}}],\"must_not\":[],\"should\":[]}},\"from\":0,\"size\":10,\"sort\":[],\"aggs\":{}}")

    /*rdd.foreach(tp=>{
      println(tp._2)
    })*/

    rdd.foreachPartition(iter=>{
//      val pool = new JedisPool("node-1",6379)
      val pool = new JedisPool(new GenericObjectPoolConfig, "node-1", 6379, 1000, "", 2)
      val jedis = pool.getResource
      iter.foreach(tp=>{
        println(tp._2)
        jedis.rpush("base",tp._2)
      })

    })

    sc.stop()

  }

}
