package com.sisyphus.spark.batch

import java.util.{Date, Locale}

import org.apache.commons.lang3.time.FastDateFormat
import org.apache.spark.sql.SparkSession

/**
 *  测试spark和hbasede的兼容性
 */
object TestApp {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName("TestApp").master("local[2]").getOrCreate()

//    val rdd = sparkSession.sparkContext.parallelize(List(1,2,3,4))
//    rdd.collect().foreach(println)

    val path = this.getClass.getClassLoader.getResource("test-access.log").getPath

    var logDF = sparkSession.read.format("com.imooc.log.type")
      .load(path)

//    logDF.printSchema()
//    logDF.show(false)

    /**
     * Spark SQL自定义函数的使用
     */
    import org.apache.spark.sql.functions._
    def formatTime() = udf((time: String) => {
      FastDateFormat.getInstance("yyyyMMddHHmm").format(
        new Date(FastDateFormat.getInstance("dd/MMM/yyyy:HH:mm:ss Z", Locale.ENGLISH)
          .parse(time.substring(time.indexOf("[") + 1, time.lastIndexOf("]"))).getTime
        ))
    })

    // 在已有的DF之上添加或者修改字段
    logDF = logDF.withColumn("formattime", formatTime()(logDF("time")))

    logDF.show(false)

    sparkSession.stop()
  }
}
