package me.chendan.sql

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * Created by chendan on 16-4-6.
  */
class ScoreCalculator {

}

object ScoreCalculator extends App {
  val conf = new SparkConf().setAppName("ScoreCalculator")
  val sc = new SparkContext(conf)
  val sqlContext = new SQLContext(sc)
  val dataRDD = sc.textFile("student.txt")
  val scoreRDD = dataRDD.map(line => line.split(",")).cache()
  import sqlContext.implicits._
  val student = scoreRDD.map(s => Student(s(0), s(1).toInt, s(2).toInt, s(3).toInt)).toDF()
  student.registerTempTable("student")
  // Sql查询返回的是DataFrame对象的集合，它支持所有的RDD操作
  val mathTop = sqlContext.sql("SELECT math FROM student ORDER BY math DESC LIMIT 1")
  val chineseLow = sqlContext.sql("SELECT chinese FROM student ORDER BY chinese ASC LIMIT 1")
  val englishAvg = sqlContext.sql("SELECT AVG(english) FROM student")
  val rank = scoreRDD.map(s => (s(0), s.takeRight(3).map(n => n.toInt).sum)).map(k => (k._2, k._1)).sortByKey(false)
  val top3 = rank.take(3).map{ case (v,k) => (k,v) }
  println("Top 3: ")
  top3.foreach(println)
  println("math最高分")
  mathTop.show()
  println("chinese最低分")
  chineseLow.show()
  println("english平均分")
  englishAvg.show()
}

case class Student(uid: String, chinese: Int, math: Int, english: Int)
