package day3

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

object time_consume_sql {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir", "D:\\devtools\\hadoop")
    Logger.getLogger("org").setLevel(Level.OFF)

    val session = SparkSession.builder()
      .appName("SparkSQL-test")
      .master("local")
      .getOrCreate()
    import session.implicits._

    session.sparkContext.textFile("D:\\data\\HCIP\\files\\time_consume.log")
      .map(i =>{
        val list = i.split("\t")
        (list(0),list(1),list(2),list(3))
      }).toDF("log_id","user","time","cost").createOrReplaceTempView("time_consume")

    val sql = "select user,collect_set(time) as time_all,round(sum(cost),2) as cost_all from time_consume group by user"

    session.sql(sql).show(false)


  }
}
