package cn.cihon.stream.elasticsearch

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.elasticsearch.spark.sql._

/**
  * Created by eeexiu on 17-2-9.
  */
object ElasticsearchDemo01 {

  def main(args: Array[String]): Unit = {
    //val sparkConf = new SparkConf().setAppName("ETL-Step1-").setMaster("local[2]")
    val sparkConf = new SparkConf().setAppName("ETL-Step1-")
    val sc = new SparkContext(sparkConf)
    val sqlc = new org.apache.spark.sql.SQLContext(sc)

    import sqlc.implicits._
    //val sourceData = sqlc.read.parquet("/home/eeexiu/workspace/kafka2hdfs/telematics-offline/src/test/testfiles/16/*")
    val sourceData = sqlc.read.parquet("/mnt/lkx_test_elasticsearch/dayResult/*")
    //sourceData.select("vin").show(1)
    //sourceData.show(1)
    sourceData.registerTempTable("table01")
    //sqlc.sql("select count(distinct vin) from table01").show()
    val resultData = sqlc.sql("select distinct vin from table01")
    resultData.saveToEs("obd/vin")
  }
}
