package sql

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.{Column, DataFrame, Row, SparkSession}
import org.apache.spark.rdd.RDD

//todo:利用反射机制实现把rdd转成dataFrame
case class Info(uid:Int,articleId:Int,date:String,province:String)

object Count_Province_Top {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().master("local[2]").getOrCreate()

    val sc = sparkSession.sparkContext

    val data = sc.textFile("data/access.txt")

    val personRDD = data.map(x => x.split(" ")).map(x => Info(x(0).toInt,x(1).toInt,x(2),x(3)))

    //6、将rdd转换成dataFrame
    //需要手动导入隐式转换
    import sparkSession.implicits._

    val personDF : DataFrame = personRDD.toDF

    personDF.show()

    //----------------SQL风格-------------
    personDF.createTempView("person")
    val sql = "select date,province,articleId,count(distinct uid) as total from person group by date,province,articleId"
    sparkSession.sql(sql).show()



  }
}
