package com.atguigu.bigdata.spark.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.util.*;

public class SparkSQLES {
    public static void main(String[] args) {
        SparkConf conf = new  SparkConf().setMaster("local[*]").setAppName("sparkSQL");
        SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
        Map<String,String> map = new HashMap<>();
        map.put("es.nodes","127.0.0.1");
        map.put("es.port","9200");
        Dataset<Row> df = spark.read().format("es").options(map).load("bank/_doc");
        Dataset<Row> df1 =df.select("city").distinct();


    }
}
