package com.spark

import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer


/**
  * Created by Administrator on 2017/8/9.
  */
object Sort {



  def main(args: Array[String]): Unit = {

    val conf=new SparkConf()
    conf.setMaster("local").setAppName("helo")
    val path="hdfs://10.200.73.90:9000/zjol/21550000.json";
    val sparkSession = SparkSession.builder
      .config("spark.sql.warehouse.dir", "D:\\WorkSpace\\spark\\spark-learning\\spark-warehouse")
      .master("local")
      .appName("spark session example")
      .getOrCreate()

    val json=sparkSession.read.json(path)
    json.createOrReplaceTempView("news")

    val sql="select newsid,title from news "


    json.printSchema()

    sparkSession.sql(sql).take(10).foreach(x=>println(x.getString(0)))








  }

}
