package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.api.java.function
import org.apache.spark.sql.{SparkSession, functions}

object Test01_sparkSession {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    //创建一个sparkSession
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("session")
      .getOrCreate()
    println(spark)
    val df = spark.read.json("input/user.json")
    df.show
    //读文件，转换成DF
    val sc = spark.sparkContext
    val value = sc.textFile("input/house.txt")
    val value1 = value.map(e => e.split("\t").toList)
      .map(e=>(e(0),e(1),e(2),e(3),e(4),e(5),e(6).toInt,e(7)))
    println(value1.take(1).toList)
    import spark.implicits._
    val df1 = value1.toDF("id", "title", "link", "area", "loc", "station", "num", "daytime")
    df1.show()
    val value2 = df1.filter($"station".notEqual(""))
    value2.groupBy("area").sum("num").show
    //使用DataSet读入
    val value3 = spark.read.textFile("input/house.txt")
    val value4 = value3.map(e => e.split("\t"))
    val value5 = value4.map(e => houseData(e(0), e(1), e(2), e(3), e(4), e(5), e(6).toInt, e(7)))
    val value6 = value5.filter(e => !e.station.equals(""))
    value6.show()
    value6.groupBy("area").sum("num").show
    spark.stop()
  }
}
case class houseData(id:String,
                     title:String,
                     link:String,
area:String, loc:String, station:String, num:Int, daytime:String)
