package com.hdaccp.ch10

import org.apache.spark.sql.{SaveMode, SparkSession}
import com.ggstar.util.ip.IpHelper
object MyLogCleanJob {
  def main(args: Array[String]): Unit = {
    //1.得一个SparkSession对象
    val spark = SparkSession.builder()
      .appName("ch01MyLogCleanJobApp")
      .master("local[2]")
      .getOrCreate()
    //引入隐式转换
    import spark.implicits._

    val rdd = spark.sparkContext.textFile("F:\\accp教学\\sparkresources\\log4")

    //rdd => dataframe
 //   val df =  rdd.map(x=>x.split("\t")).map(y=>MyLog(y(0),y(1),y(2),y(3),y(4))).toDF()

    //GET learn/821 HTTP/1.0
    //learn/821
    val df =  rdd.map(x=>x.split("\t")).map(y=>{
      var ip = y(0)
      ip = IpHelper.findRegionByIp(ip)
      var fangdate = y(1)
      fangdate = fangdate.substring(0,10).replaceAll("-","")
      var url=y(2)
      var url0 = url.split(" ")(1)
      var urls = url0.split("/")
      var url1 = urls(0)
      var url2 = urls(1)
      var url3 = ""
      if(url2.indexOf(".") == -1){
          url3 = url2
      }
      else{
        url3 = url2.substring(0,url2.indexOf("."))
      }
      MyLog(ip,fangdate,url,url1,url3,y(3),y(4))
    }).toDF()

    df.coalesce(1).write.format("parquet").mode(SaveMode.Overwrite).save("F:\\accp教学\\sparkresources\\cleanlog")
    spark.stop()
  }
}
