package com.lianzt

import net.minidev.json.{JSONObject, JSONValue}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}

object Main {

  case class Request(ip: String, userAgent: String, sendTime: String, serviceCode: String)

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("count log file")
      .getOrCreate()

    var hdfs = "hdfs://FatServer1:9000/jtgzfw"
//    var hdfs = "hdfs://hacluster/user/sjtp/jtgzfw"

    val sc = spark.sparkContext;
    val jsons = sc.textFile(s"${hdfs}/xaa")
//    val jsons = sc.textFile(s"${hdfs}/*.log")
      .filter(x => x.indexOf("['log-request-time']") != -1)
//      .filter(x => x.indexOf("['log-request-time']") != -1 && x.indexOf("sj_csid") != -1 && x.indexOf("_sj006") != -1 && x.indexOf("P24031") != -1)
      .mapPartitions[JSONObject](list => {
      list.map(x => JSONValue.parse(x.substring(21)).asInstanceOf[JSONObject])
    })

    val requests = jsons.mapPartitions(list => {
      list.map(x => {
        val head = x.get("head").asInstanceOf[JSONObject]
        Row(head.get("_ip").asInstanceOf[String], head.get("_user_agent").asInstanceOf[String], head.get("send_time").asInstanceOf[String], head.get("service_code").asInstanceOf[String])
      })
    })

    val schema =
      StructType(
        StructField("ip", StringType, true) ::
          StructField("user_agent", StringType, true) ::
          StructField("send_time",StringType,true) ::
          StructField("service_code", StringType, true) :: Nil)

//    requests.saveAsTextFile(s"${hdfs}/res/req")
//    requests.foreach(println)

    spark.createDataFrame(requests, schema).createOrReplaceTempView("jtgzfw_request");

    spark.sql("select ip, user_agent, count(*) from jtgzfw_request group by ip, user_agent").rdd.saveAsTextFile(s"${hdfs}/res/ip")
    spark.sql("select substring(send_time, 1, 15), ip, count(*) from jtgzfw_request group by substring(send_time, 1, 15), ip order by substring(send_time, 1, 15)")
      .rdd.saveAsTextFile(s"${hdfs}/res/ip-access")

    spark.stop()
  }
}
