package com.hdaccp.ch08

import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

object Demo4 {
  def main(args: Array[String]): Unit = {
    //1.SparkSession
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("ch08Demo1App")
      .getOrCreate()

   /* val rdd = spark.sparkContext.textFile("F:\\accp教学\\sparkresources\\music1.txt")

    //rdd => DataFrame
   val df = rdd.map(x=>x.split("\t")).map(y=>Music(y(0),y(1),y(2),y(3),y(4),y(5))).toDF()*/


    val df = spark.read.format("parquet").load("F:\\accp教学\\sparkresources\\clean12")
    //注册一个临时视图
    df.createOrReplaceTempView("music")

    val dfs = df.sqlContext.sql("select song,count(1) as times from music group by song order by times desc limit(3)").show()

    /*// dfs -> mysql
    dfs.foreachPartition(a=>{
      val list = new ListBuffer[Mu1]
      a.foreach(b=>{
        var gender = b.getAs[String]("gender")
        var times = b.getAs[Long]("times")
        list.append(Mu1(gender,times))
      })
      MusicDao.insertMu1(list)
    })*/



   // df.show()
    spark.stop()
  }
}
