package cn.lecosa.spark.es

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark
import org.elasticsearch.spark._
import org.elasticsearch.spark.sql._
object App {
  case class Trip(content: String)
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("tyid app").setMaster("local[2]")
    sparkConf.set("es.index.auto.create", "true")
    sparkConf.set("es.nodes", "10.20.8.54,10.20.8.55,10.20.8.58")
    sparkConf.set("es.port", "9200")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)
    val query: String = s""" {
            "query": {
              "term":{
           "imsi":"310410277098480"  
              }
            }
          }
    """
    val res1 = sqlContext.esDF("trails/person-data");
    val res2 = res1.rdd;
    res1.show()
    val res3 = res2.map { row =>
      row.mkString(",")
    }
    //      res3.foreach(println)

    //    areaid|  capturetime|        collecttime|comefrom|        createdtime|   gaddress|gid|  gname|            idcard|imei|           imsi|imsiaddr|isdrug|isrisk|laiyuan| latitude|            location| longitude|mac|
    res3.repartition(5).saveAsTextFile("F:/spark/workspace2/bigdata/data/trail-person/clear")
    /* val rdd = sqlContext.esDF("/test/test-data/").rdd.map(row => {
     val attrs = row.mkString("---")
     println(attrs);
     val trailpo = new TrailPo()
                trailpo
    } )*/
    // sqlContext.createDataFrame(persons,new Person().getClass).registerTempTable("persons")

    //val rdd = sqlContext.esDF("device/test-data",query2)

    //    val rdd = EsSpark.esRDD(sc, "spark/docs")

    // val rdd = sqlContext.esDF("/trails/person-data",query).rdd.map(row => {
    //     val attrs = row.mkString(",")
    //     println(attrs);
    //     val person = new Person
    //                person
    //    } )

    // 
    //           for (p <- rdd.toJavaRDD.collect().toArray) {
    //            println("单数据====="+p.toString())
    //          }
    //           rdd.saveToEs("test/test-data")
    //               val sparkEs = SparkSession.builder().config(sparkConf).getOrCreate()
    //               sparkEs.createDataFrame(rdd, TrailPo.cl);

    //           val dataFrame1 = sparkEs.createDataFrame(rdd)

    //      rdd.printSchema();
    //     ," ID"
    //      rdd.select("ADDRESS","AGENTID","AREAID","DEVICELATITUDE","DEVICELONGITUDE","DEVICENAME" ,"REMARK","TYPEID","VIDEOAGENTID").distinct().show()
    //      rdd.select("CREATETIME").distinct().show()

    var gid = "450703198908127000";

    val query2 = s"""
          {
            "query": {
                 "term":{
              "ADDRESS.keyword":"铜溪场镇"  
              }
            }
          }"""

    val query3 = s"""   
 GET /tyid/tyid-data/_search
{
    "query": {
        "nested" : {
            "path" : "IDENTITY",
            "query" : {
                "bool" : {
                    "must" : [
                    { "match" : {"IDENTITY.F_IDENTITY_ID" : ${gid} } }
                    ]
                }
            }
        }
    }
}
"""

    sc.stop()
  }
}
   
