package com.lqfan.bigdata.Project.business

import com.lqfan.bigdata.Project.utils.{IPUtils, KuduUtils, SQLUtils, SchemaUtils}
import com.lqfan.bigdata.Project.utils.{IPUtils, KuduUtils, SQLUtils, SchemaUtils}
import org.apache.spark.sql.{DataFrame, SparkSession}

object LogETLApp {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("LogETLApp")
      .getOrCreate()

    var jsonDF: DataFrame = spark.read.json("file:///D:\\coding\\bigData\\sparksql-train\\data\\data-test.json")
//    jsonDF.printSchema()
//    jsonDF.show(false)

    import spark.implicits._
    val ipRuleDF: DataFrame = spark.sparkContext.textFile("file:///D:\\coding\\bigData\\sparksql-train\\data\\ip.txt")
      .map(x => {
        val splits: Array[String] = x.split("\\|")
        val startIP: Long = splits(2).toLong
        val endIP: Long = splits(3).toLong
        val province: String = splits(6)
        val city: String = splits(7)
        val isp: String = splits(9)
        (startIP, endIP, province, city, isp)
      }).toDF("start_ip", "end_ip", "province", "city", "isp")

//    ipRuleDF.printSchema()
//    ipRuleDF.show(false)

    import org.apache.spark.sql.functions._

    def getLongIp() = udf((ip: String) => {
      IPUtils.ip2Long(ip)
    })

    jsonDF = jsonDF.withColumn("ip_long", getLongIp()($"ip"))


//    jsonDF.join(ipRuleDF, jsonDF("ip_long")
//      .between(ipRuleDF("start_ip"), ipRuleDF("end_ip"))
//    ).show()

    //Spark SQL 方式的实现
    jsonDF.createOrReplaceTempView("logs")
    ipRuleDF.createOrReplaceTempView("ips")

    val sql = SQLUtils.SQL
    val result: DataFrame = spark.sql(sql)


    //TODO...  将规整后的数据放入kudu
    val kuduMasters = "hadoop000"
    val kuduTables  = "ods"
    val partitonKey = "ip"

    val schema = SchemaUtils.ODSSchema

    //只需要定义表相关的信息，  创建表、删除表等操作都封装到KuduUtils.sink方法中
    KuduUtils.sink(kuduMasters, kuduTables, partitonKey, schema, result)

    spark.stop()
  }
}
