package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

object Test04_houseDF {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("house")
      .getOrCreate()
    import spark.implicits._
    val sc = spark.sparkContext
    val value = sc.textFile("input/house.txt")
    //数据切分
    val value1 = value.map(e => e.split("\t"))
    //id title link area loc substation num(Int) daytime
    val df = value1.map(e => (e(0), e(1), e(2), e(3),
        e(4), e(5), e(6).toInt, e(7)))
      .toDF("id", "title", "link",
        "area", "loc", "substation", "num", "daytime")
    df.show()
    //过滤数据 subStation为空
    val df1 = df.filter("substation!=''")
    df1.show()
    //计算每个区域中 可以租房的数量
    df1.groupBy("area").sum("num").show
  }
}
