package com.shujia.sql

import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

object Demo3Student {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      //.setMaster("local") //本地运行
      .setAppName("map")

    //spark sql 默认并行度
    conf.set("spark.sql.shuffle.partitions", "4")

    val sc = new SparkContext(conf)


    //创建sparksql上下文对象
    val sqlContext = new SQLContext(sc)

    //道路一个隐式转换


    //读取json数据  创建dataFrame
    val df = sqlContext.read.json("/data/student.json")

    df.registerTempTable("student")

    val countDF = sqlContext.sql("select clazz ,count(1) from student group by clazz")

    countDF
      .write
      .mode(SaveMode.Overwrite)
      .json("/data/sqlout")


    /**
      * 提交任务
      * spark-submit --class com.shujia.sql.Demo3Student --master yarn-client --executor-memory 512m  --num-executors 1 ./bigdata_class-1.0-SNAPSHOT.jar
      *
      * 查看日志   找第一个ERROR
      * yarn logs -applicationId application_1565331499198_0006
      *
      */

  }

}
