import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SparkSession}

object job3{
  def main(args: Array[String]): Unit = {
    val spark=new SparkSession
    .Builder()
      .appName(s"${Job1.getClass.getSimpleName}")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    //加载数据

    val conf = new SparkConf()
      .setAppName("TransformationOps")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    val list=sc.textFile("a.text")
    val result=list.flatMap(line=>line.split("\\s"))
      .map(work=>(work,1)).sortByKey(_).take(3)
    println(result)


    val pdf1: DataFrame = spark.read.json("C://employee.json")
    print("要求基于sparkcore查询出每个科目前三的信息")
    pdf1.createOrReplaceTempView("topn")
    pdf1.distinct().show()
    spark.sql(
      s"""
         | select *
         | from topn to
         |group by
         | """.stripMargin).show()
    //stripMargin创建多行字符串

  }


}
