package main.java

import org.apache.spark.sql.SparkSession

/**
  * SalesScala
  *
  * @author zhangyimin
  * @date 2018-11-07 上午11:03
  * @version 1.0
  */
object SalesScala {

  def main(args: Array[String]): Unit = {
    //        13,987,1998-01-10,3,999,1,1232.16
    val sparkSession=SparkSession.builder().appName("sales").master("local").getOrCreate();

    val sc=sparkSession.sparkContext
    val rdd=sc.textFile("hdfs://10.16.7.36:9000/data/input/sales/sales")

    val sqlContext=sparkSession.sqlContext

    import  sqlContext.implicits._


    val arrys2=rdd.map(line=>{
      val words=line.split(",")
      //      取出年份
      val dateStr=words(2).substring(0,4)
      val amount=words(6).toDouble
      (dateStr,amount)
    }).reduceByKey(_+_);



    arrys2.foreach(x=>println(x._1+"年的总金额为:"+x._2))


    val arrys3=rdd.map(line=>{
      val words=line.split(",")
      //      取出年份
      val dateStr=words(2).substring(0,4)

      (dateStr,1)
    }).reduceByKey(_+_);

    arrys3.foreach(x=>println(x._1+"订单总数为:"+x._2))


    val arrys=rdd.map(line=>{
      val words=line.split(",")
//      取出年份
      val dateStr=words(2).substring(0,4)
      val amount=words(6).toDouble
      (dateStr,amount)
      //df转DS
//    }).map(x=>OrderInfo(x._1,x._2)).toDF().as[OrderInfo]
    //转DF
    }).map(x=>OrderInfo(x._1,x._2)).toDF()
    //转DS
    //}).map(x=>OrderInfo(x._1,x._2)).toDS()

    arrys.createOrReplaceTempView("sales")

    val res=sparkSession.sql("select year,sum(amount) as totalAmount ,count(*) as countOrder  from sales group by year order by year")


    res.show



    sparkSession.stop()




  }

  case  class OrderInfo(year:String,amount:Double)


}
