package com.zyh.oa

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object LoginCityApplication {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .appName("evaluateRiskRate")
      .master("local[*]")
      .getOrCreate()

    val rdd1: RDD[String] = spark.sparkContext.textFile("hdfs://hadoop10:9000/final-project/2022-07")
    val rdd2: RDD[(Long, String, String, String, String, String, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean)] = rdd1.map(_.split("\\s+")).map(array => (array(0).toLong, array(1), array(2), array(3), array(4), array(5), array(6).toBoolean, array(7).toBoolean, array(8).toBoolean, array(9).toBoolean, array(10).toBoolean, array(11).toBoolean))
    import spark.implicits._
    val df: DataFrame = rdd2.toDF("time", "app_name", "username", "uuid", "login_city", "geo_point", "city", "device", "habit", "input_features", "orderness_passowrd", "speed")
    df.createOrReplaceTempView("t_report")

    //统计某一个时间段内，城市登录
    val start: Long = 1592533457000L
    val end: Long = 1592533457000L
    val sql =
      s"""
         |select count(*)
         |from t_report
         |where time between ${start} and ${end}
         |group by app_name,login_city
         |""".stripMargin
    val result: DataFrame = spark.sql(sql)
    result.show()
    spark.close()
  }
}
