package com.jianghang.class_three.log_format.APP

import com.jianghang.class_three.log_format.utils.{LogConvertUtil, StringUtil}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Row, SaveMode, SparkSession}

/**
  * 解析后的日志格式：
  * ip time url method  protocal http_status bytes_sent referer user_agent
  *
  *
  * ip country province city time method url protocal http_status bytes_sent referer
  * user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
  */
object _020_LogFormatApp {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("SparkStatFormatJob")
      .master("local[4]")
      .getOrCreate()

    //根据文件路径获取资源
    val path = "data/output/LogFormatApp1/*"
    var sourceRDD = spark.sparkContext.textFile(path, 2)
    //打印输出前十条数据
    sourceRDD.take(10).foreach(println)




    val rdd_row:RDD[Row] = sourceRDD.map(log_format => {LogConvertUtil.parseLog(log_format)})
    val schema: StructType = LogConvertUtil.struct

    val dataFrame = spark.createDataFrame(rdd_row,schema)
    dataFrame.printSchema()
    dataFrame.show()

    dataFrame
      .write
      .mode(SaveMode.Overwrite)
      .format("json")
      .save("data/output/LogFormatApp2")



    spark.stop()
  }




}


