package cn.wangjie.spark.store.es

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

object Test {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("app1").setMaster("local[2]")
    conf.set("es.index.auto.create", "true"); //在spark中自动创建es中的索引
    conf.set("es.nodes", "localhost"); //设置在spark中连接es的url和端口
    conf.set("es.port", "9200");
    conf.set("es.nodes.wan.only", "true");

    val spark = SparkSession.builder().config(conf).getOrCreate()
    // DataFrame schema automatically inferred


    val df = spark.read.format("es").load("myrow")
    df.show(10, true)
    //import spark.implicits._

    //read elasticsearch  layout session table
    /*
        val essessionDataFrame: DataFrame = spark.sqlContext.read
          .format("org.elasticsearch.spark.sql")
          .option("inferSchema", "true")
          .load("myrow")
        essessionDataFrame.createOrReplaceTempView("sessionTable")
        essessionDataFrame.show()*/


    spark.stop()
    println("over....")


  }
}
