package com.tl.spark.scala
//
//import org.apache.spark.sql.SQLContext
//import org.apache.spark.sql.execution.SparkSqlAstBuilder
//import org.apache.spark.{SparkConf, SparkContext}
//
///**
//  * @program: spark-test
//  * @description:
//  * @author: dong.tl
//  * @create: 2018-11-07 17:30
//  **/
//object Spark_solr {
//  def main(args: Array[String]): Unit = {
//    val conf = new SparkConf().setAppName("Spark Solr").setMaster("local[2]")
//    //      .setJars(Seq("F:\\IdeaProjects\\spark-test\\target\\spark-test-1.0-SNAPSHOT.jar"))
//    val sc = new SparkContext(conf)
//    val sqlContext = new SQLContext(sc)
//
//    val options = Map("collection" -> "VDB", "zkhost" -> "192.168.173.231:8983/solr")
//
//    val df = sqlContext.read.format("solr").options(options).load
//
//
//    println(df.count())
//
//
//  }
//}
