package NBA

import org.apache.spark.sql.SparkSession

case class Champion(year: String, date: String, first: String, sorce: String, second: String, flag: String)
object ChamptionCountSQL {
  def main(args: Array[String]): Unit = {
    getChampionCount()
  }
  
  def getChampionCount():Unit = {
    val sess = SparkSession.builder().appName("ChamptionCountSQL").master("local").getOrCreate();
    
    // 读文件
    import sess.implicits._
    val df = sess.read.textFile("A:/output3/nba_clear/part-00000")
                 .map(_.split(","))
                 .map(arr => {
                   val year = arr(0).toInt
                    if(year < 1970)
                      Champion(arr(0).toString(),arr(1).toString(),arr(2).toString(),arr(3).toString(),arr(4).toString(),"")
                      else 
                        Champion(arr(0).toString(),arr(1).toString(),arr(2).toString(),arr(3).toString(),arr(4).toString(),arr(5).toString())
                 })
                 .toDF()
     
    // 创建视图             
    df.createOrReplaceTempView("champion")
    
    // SQL查询
    sess.sql("select first,count(1) from champion group by first order by count(1) desc")
        .show(1000,false)
    
  }
}