package com.tech

import com.tech.common.KafkaUtil
import com.tech.config.ApplicationConfig
import com.tech.java4ParseSql.process.CustomerGroupCalculationServiceImpl
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

import java.time.LocalDate
import java.util

object DynamicGroupMonitor4D365 {

  def main(args: Array[String]): Unit = {
    println(s"创建 DynamicGroupMonitor4D365 spark ${ApplicationConfig.PROJECT}")

    while (true) {
      foreachDynamicGroup()
    }

  }

  def foreachDynamicGroup(): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    println("创建spark")
    val spark = SparkSession
      .builder
      .appName("DynamicGroupMonitor")
      .config("spark.debug.maxToStringFields", "5000")
      .config("spark.sql.session.timeZone", ApplicationConfig.TIMEZONE)
      .master("local[*]")
      .getOrCreate
    println("读取sql")


    val df_tsr_marketing_activity = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "tsr_marketing_activity")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_tsr_marketing_activity.createOrReplaceTempView("tsr_marketing_activity")

    val df_tsr_marketing_process_customer = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "tsr_marketing_process_customer")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_tsr_marketing_process_customer.createOrReplaceTempView("tsr_marketing_process_customer")

    val now = LocalDate.now()

    //获取当前活跃活动包含的动态群组
    val df_group = spark.sql(
      s"""
        |select
        |	distinct mpc.customer_group_id group_id
        |from
        |	tsr_marketing_activity ma
        |left join
        |	tsr_marketing_process_customer mpc
        |on
        |	ma.id = mpc.activity_id
        |where
        |	ma.status = '100000002'
        |and
        |	current_timestamp() between ma.begin_time and ma.end_time
        |and
        |	mpc.customer_group_id is not null
        |and
        |	mpc.group_type = 100000001
        |and
        | (ma.activity_type = 0
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000000 and ma.period_value = ${now.getDayOfYear})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000001 and ma.period_value = ${now.getDayOfMonth})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000002 and ma.period_value = ${now.getDayOfWeek.getValue})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000003)
        |  )
        |""".stripMargin)
    df_group.show(100)

    //group_id activity_id process_id
    val df_group_activity_process = spark.sql(
      s"""
        |select
        |	mpc.customer_group_id group_id,
        |	ma.id activity_id,
        |	mpc.process_id process_id
        |from
        |	tsr_marketing_activity ma
        |left join
        |	tsr_marketing_process_customer mpc
        |on
        |	ma.id = mpc.activity_id
        |where
        |	ma.status = '100000002'
        |and
        |	current_timestamp() between ma.begin_time and ma.end_time
        |and
        |	mpc.customer_group_id is not null
        |and
        |	mpc.group_type = 100000001
        |and
        | (ma.activity_type = 0
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000000 and ma.period_value = ${now.getDayOfYear})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000001 and ma.period_value = ${now.getDayOfMonth})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000002 and ma.period_value = ${now.getDayOfWeek.getValue})
        |   or
        |  (ma.activity_type = 1 and ma.period_unit = 100000003)
        |  )
        |""".stripMargin)
    df_group_activity_process.show(100, false)

    val df_customer = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")
      .selectExpr("customer_id", "member_id")

    df_customer.cache()

    println("df_customer:" + df_customer.count())
    df_customer.show(100, false)

    val rows = df_group.collect()

    val iterator = rows.iterator

    val df_customer_all = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_customer_all.createOrReplaceTempView("customer")

    val df_dimdatagroupdetail = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "dimdatagroupdetail")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_dimdatagroupdetail.createOrReplaceTempView("dimdatagroupdetail")

    val df_customer_tag_result = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_tag_result")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_customer_tag_result.createOrReplaceTempView("customer_tag_result")

    val df_customer_group_detail = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_group_detail")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_customer_group_detail.createOrReplaceTempView("customer_group_detail")

    val df_retailevent = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "retailevent")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")

    df_retailevent.createOrReplaceTempView("retailevent")

    val df_customer_activity = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_activity")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")
      .selectExpr("customer_id", "activity_id")

    //    val df_customer_activity_dynamic = spark.read
    //      .format("org.apache.kudu.spark.kudu")
    //      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
    //      .option("kudu.table", "customer_activity_dynamic")
    //      .load()
    //      .selectExpr("customer_id", "activity_id")


    while (iterator.hasNext) {
      val group_id = iterator.next().getString(0)
      println(group_id)

      val df_group_rule = spark.sql(s"select group_calculation from tsr_marketing_process_customer where customer_group_id = '$group_id' limit 1")
      val iterator_rule = df_group_rule.collect().iterator
      val impl: CustomerGroupCalculationServiceImpl = new CustomerGroupCalculationServiceImpl
      val map: util.Map[Integer, AnyRef] = new util.HashMap[Integer, AnyRef]()
      while (iterator_rule.hasNext) {
        val row = iterator_rule.next()
        val rule = row.getString(0)
        println(rule)
        //获取sql
        val groupCalculation = impl.getGroupCalculation(rule)
        val sql = impl.getSql(groupCalculation, map).replace("myProject", ApplicationConfig.PROJECT).replace("\r\n", "")
        println(sql)
        val dataFrame = spark.sql(sql)
        val df_group_customer = dataFrame.selectExpr(s"'$group_id' as group_id", "cust_id as customer_id")
        df_group_customer.show(100, false)


        val df_final = df_group_customer
          .join(df_customer, Seq("customer_id"), "inner")
          .join(df_group_activity_process, Seq("group_id"), "inner")
//          .join(df_customer_activity_dynamic, Seq("customer_id", "activity_id"), "left_anti")
          .join(df_customer_activity, Seq("customer_id", "activity_id"), "left_anti")
          .selectExpr("customer_id", "activity_id", "process_id", "member_id")
        df_final.show(100, false)
        println("df_final.count:" + df_final.count())

        val array = df_final.collect()

        val finalIterator = array.iterator
        while (finalIterator.hasNext) {

          val finalRow = finalIterator.next()
          val finalCustomer = finalRow.getString(0)
          val finalActivity = finalRow.getString(1)
          val finalProcess = finalRow.getString(2)
          val finalMember = finalRow.getString(3)
          KafkaUtil.send2Kafka(ApplicationConfig.DYNAMIC_CUSTOMER_TOPIC, ApplicationConfig.PROJECT, finalActivity, finalProcess, finalCustomer, finalMember)
        }

        //        println(System.currentTimeMillis())
        //        df_final
        //          .selectExpr("customer_id", "activity_id", s"${System.currentTimeMillis()*1000} as create_time")
        //          .write
        //          .format("org.apache.kudu.spark.kudu")
        //          .option("kudu.master", ApplicationConfig.KUDU_MASTER)
        //          .option("kudu.table", "customer_activity_dynamic")
        //          .mode("append")
        //          .save()
        //        println(System.currentTimeMillis())
      }


    }

    println("执行完毕，休眠120秒")

    Thread.sleep(120000)

    spark.close()

  }


}
