package com.jinghang.nginxProject.APP

import com.jinghang.nginxProject.utils.ip.IPParser
import com.jinghang.nginxProject.utils.ip.IPParser.RegionInfo
import com.kumkee.userAgent.{UserAgent, UserAgentParser}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}

/**
  * 解析后的日志格式：
  * ip time url method  protocal http_status bytes_sent referer user_agent
  *
  *
  * ip country province city time method url protocal http_status bytes_sent referer
  * user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
  */
object _020_LogFormatApp {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("SparkStatFormatJob")
      .master("local[4]")
      .getOrCreate()

    //根据文件路径获取资源
    val path = "data/output/LogFormatApp1/*"
    var sourceRDD = spark.sparkContext.textFile(path, 2)
    //打印输出前十条数据
    //sourceRDD.take(10).foreach(println)

    val rdd_row = sourceRDD.map(line => {
      /**
        * 解析后的日志格式：
        * ip time url method  protocal http_status bytes_sent referer user_agent
        *
        *
        * ip country province city time method url protocal http_status bytes_sent referer
        * user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
        */
      val splits = line.split("\t")
      val ip = splits(0)
      val regionInfo: RegionInfo = IPParser.getInstance().analyseIp(ip)
      val country = regionInfo.getCountry
      val province = regionInfo.getProvince
      val city = regionInfo.getCity


      val time = splits(1)
      val url = splits(2)
      val method = splits(3)
      val protocal = splits(4)
      val http_status = splits(5)
      val bytes_sent = splits(6)
      val referer = splits(7)
      val user_agent = splits(8)

      val userAgentParser = new UserAgentParser
      val agent: UserAgent = userAgentParser.parse(user_agent)

      val browser_name = agent.getBrowser //
      val browser_version = agent.getVersion
      val engine_name = agent.getEngine
      val engine_version = agent.getEngineVersion
      val os_name = agent.getOs
      val platform_name = agent.getPlatform
      val is_mobile = agent.isMobile

      val log_format = ip + "\t" + country + "\t" + province + "\t" + city + "\t" + time + "\t" + url + "\t" + method + "\t" + protocal + "\t" + http_status + "\t" + bytes_sent + "\t" + referer + "\t" + user_agent + "\t" + browser_name
      //log_format // RDD[String]
      Row(ip, country, province, city, time, url, method)  //RDD[Row]
    })




    //rowRDD: RDD[Row], schema: StructType

    /**
      * val schema =
      *    StructType(
      *      StructField("name", StringType, false) ::
      *       StructField("age", IntegerType, true) :: Nil)
      */


   val structFieldArray = Array(
     StructField("ip", StringType),
     StructField("country", StringType),
     StructField("province", StringType),
     StructField("city", StringType),
     StructField("time", StringType),
     StructField("url", StringType),
     StructField("method", StringType)
   )


    val schema =StructType( structFieldArray)

    val dataFrame = spark.createDataFrame(rdd_row,schema)

    dataFrame.show()
    dataFrame.printSchema()




  }

}
