package pk

import org.apache.spark.sql.SparkSession
case class aaa(
        ip: String ,//访问IP地址
        mid: String ,//设备ID
        userid: String ,//用户ID
        time: String ,//访问日期时间
        request_method: String ,//请求方式
        request_url: String ,//请求URL地址
        http_info: String ,//HTTP协议
        status: Int ,//响应状态码
        size: Long //响应流量大小
              )
object demo3 {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .master("local[2]")
      .appName("demo3")
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()
    import spark.implicits._
    val sc = spark.sparkContext
    val logrdd = sc.textFile(
      "hdfs://node101:8020/rk/lx/access_2013-05-30.log", minPartitions = 2
    )

    val logdf = logrdd.filter(log => null != log && log.split("\\s").length == 10)
      .map(log => {
        val split = log.split("\\s")

        aaa(
          split(0),
          split(1),
          split(2),
          split(3).replace("[", ""),
          split(5).replace("\"", ""),
          split(6),
          split(7).replace("\"", ""),
          split(8).toInt,
          split(9).toLong
        )
      }).toDF()
    logdf.show(10,truncate = false)




  }
}
