package soft863

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession


//import java.util.logging.{Level, Logger}
//import org.slf4j.Logger
//import org.slf4j.LoggerFactory



object App {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)


    // 创建Spark运行配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    // 创建SparkSession对象（连接对象）
    val spark = new SparkSession.Builder().config(sparkConf).getOrCreate()

    val df_app = spark.read.json("hdfs://hadoop100:9000/data/startup.log")
        df_app.show

    df_app.createOrReplaceTempView("logtable")

    spark.sql("select platform from logtable group by platform").show
    //使用系统
    spark.sql("select city from logtable group by city").show
    //城市分布
    spark.sql("select appID from logtable group by appID").show
    //应用编号

    println(df_app.isEmpty)

    df_app.write.csv("input/applog.csv")

  }
}
