package org.qnit.dw

import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.qnit.util.SparkUtil

object DwdApp {

  def writeToHive(spark: SparkSession) = {
    // dwd.dwd_expert_logbook_info
    spark.udf.register("clean_content", (content: String) => {
      if (StringUtils.isNotBlank(content))
        content.replaceAll("&nbsp;|'|<[^>]+>|\\n|\\t|\\s*", "")
      else
        ""
    })
    spark.udf.register("contains_chinese", (input: String) => {
      val chineseRegex = ".*[\\u4e00-\\u9fa5].*"
      input.matches(chineseRegex)
    })
    spark.sql(
      s"""
         |select id, clean_content(content) content
         |from ods.ods_expert_logbook_content
         |""".stripMargin).where("contains_chinese(content) = true").createOrReplaceTempView("v1")
    val expertLogbookDetailDF = spark.sql(
      s"""
         |select v2.*, v1.content from v1
         |join ods.ods_expert_logbook v2 on v1.id = v2.id
         |where province_name > '' and city_name > '' and county_name > ''
         |""".stripMargin)
    expertLogbookDetailDF.repartition(1)
      .write.mode(SaveMode.Overwrite)
      .format("parquet")
      .saveAsTable("dwd.dwd_expert_logbook_info")
  }

  def main(args: Array[String]): Unit = {
    val env = args(0)
    println(s"env=$env")
    if (!SparkUtil.verifyEnv(env))
      System.exit(1)
    val spark: SparkSession = SparkUtil.initSparkSession(env, "DwdApp")

    writeToHive(spark)

    spark.stop()
  }

}
