package com.om.etl

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.functions.udf


object ScalaETL {
  def extract(spark: SparkSession): DataFrame = {
    val path = "./data/logstash.100.json"
    spark.read.json(path)
  }

  def transform(df: DataFrame): DataFrame = {
    df.select("_source.*").filter(col("vhost") === "repo.openeuler.org").withColumn(
      "agent_info",
      callUDF("mockAgentInfoUDF", col("http_user_agent"))
    ).withColumn(
      "ip_info",
      callUDF("mockIPInfoUDF", col("remote_addr")).alias("ip_info")
    ).withColumn(
      "is_iso_download",
      when(col("path").contains(".iso"), 1).otherwise(0)
    ).withColumn(
      "is_rpm_download",
      when(col("path").contains(".rpm"), 1).otherwise(0)
    ).select(
      col("path"),
      col("bytes_sent"),
      col("vhost"),
      col("proxy_remote_addr"),
      col("remote_addr"),
      col("request_query"),
      col("remote_addr"),
      col("agent_info.*"),
      col("ip_info.*"),
      col("path"),
      col("is_iso_download"),
      col("is_rpm_download"),
      col("@timestamp").alias("updated_at"),
      col("request_query").alias("link"),
      col("@timestamp").alias("created_at"),
      col("remote_addr").alias("location_ip"),
      col("vhost").alias("hostname"),
      col("path").alias("is_internal_path")
    )
  }

  def load(df: DataFrame): Unit = {
    df.rdd.saveAsTextFile("./data/out")
  }

  val ipSchema = StructType(Seq(
    StructField("location", StringType, nullable = false),
    StructField("country", StringType, nullable = false),
    StructField("city", StringType, nullable = false),
    StructField("region_name", StringType, nullable = false),
    StructField("continent_name", StringType, nullable = false),
    StructField("region_iso_code", StringType, nullable = false)
  ))

  val agentSchema = StructType(Seq(
    StructField("os", StringType, nullable = false),
    StructField("equipment", StringType, nullable = false),
    StructField("browser", StringType, nullable = false)
  ))

  def registerUDF(spark: SparkSession): Unit = {
    val ipUDF = udf((s: String) => {
      (s, s, s, s, s, s)
    }, ipSchema)
    val agentUDF = udf((s: String) => {
      (s, s, s)
    }, agentSchema)
    spark.udf.register("mockIPInfoUDF", ipUDF)
    spark.udf.register("mockAgentInfoUDF", agentUDF)
  }

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo")
      .master("local")
      .getOrCreate()
    registerUDF(spark)
    val df = extract(spark)
    val df_transformed = transform(df)
    load(df_transformed)
  }
}
