package com.tech

import com.tech.common.KafkaUtil
import com.tech.config.ApplicationConfig
import com.tech.java4ParseSql.process.CustomerGroupCalculationServiceImpl
import org.apache.spark.sql.SparkSession

import java.util
import java.util.Properties

object DynamicGroupMonitor {

  def main(args: Array[String]): Unit = {

    var time = 0L

    while (true) {
      time = foreachDynamicGroup(time)
    }

  }


  def foreachDynamicGroup(lastTime: Long = 0): Long = {

    val start_time = if (lastTime == 0) {
      System.currentTimeMillis() / 1000 - 60 * 60 * 24
    } else {
      lastTime
    }

    val url_shuat_socialhub = "jdbc:mysql://52.130.94.89:63306/shuat_socialhub?useSSL=false"
    val properties = new Properties
    properties.setProperty("user", "shuat")
    properties.setProperty("password", "P@ssw0rd`468")

    println("创建spark")
    val spark = SparkSession
      .builder
      .appName("DynamicGroupMonitor")
      .config("spark.debug.maxToStringFields", "5000")
      .config("spark.sql.session.timeZone", ApplicationConfig.TIMEZONE)
      .master("local[*]")
      .getOrCreate
    println("读取sql")

    //获取当前活跃活动包含的动态群组
    val group =
      """
        |(select
        |	distinct mpc.customer_group_id group_id
        |from
        |	tsr_marketing_activity ma
        |left join
        |	tsr_marketing_process_customer mpc
        |on
        |	ma.id = mpc.activity_id
        |left join
        |	shuat_datanow.customergroups cg
        |on
        |	mpc.customer_group_id = cg.id
        |where
        |	ma.status = '100000002'
        |and
        |	current_timestamp() between ma.begin_time and ma.end_time
        |and
        |	mpc.customer_group_id is not null
        |and
        |	cg.group_type = 100000001)t
        |""".stripMargin

    //group_id activity_id process_id
    val group_activity_process =
      """
        |(select
        |	mpc.customer_group_id group_id,
        |	ma.id activity_id,
        |	mpc.process_id process_id
        |from
        |	tsr_marketing_activity ma
        |left join
        |	tsr_marketing_process_customer mpc
        |on
        |	ma.id = mpc.activity_id
        |left join
        |	shuat_datanow.customergroups cg
        |on
        |	mpc.customer_group_id = cg.id
        |where
        |	ma.status = '100000002'
        |and
        |	current_timestamp() between ma.begin_time and ma.end_time
        |and
        |	mpc.customer_group_id is not null
        |and
        |	cg.group_type = 100000001)t
        |""".stripMargin


    val df_group = spark.read.jdbc(url_shuat_socialhub, group, properties)
    df_group.show(100)
    val df_group_activity_process = spark.read.jdbc(url_shuat_socialhub, group_activity_process, properties)
    df_group_activity_process.show(100, false)


    //    val df_customer = spark.read
    //      .format("org.apache.kudu.spark.kudu")
    //      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
    //      .option("kudu.table", "retailevent")
    //      .load()
    //      .selectExpr("customer_id")
    //      //      .filter(s"created_time < current_timestamp()")
    //      //      .filter(s"created_time > from_unixtime($lastTime, 'yyyy-MM-dd HH:mm:ss')")
    //      .distinct()

    val df_customer = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer")
      .load()
      .filter(s"project = '${ApplicationConfig.PROJECT}'")
      .selectExpr("customer_id", "member_id")

    df_customer.cache()

    println("df_customer:" + df_customer.count())
    df_customer.show(100, false)

    val rows = df_group.collect()

    val iterator = rows.iterator

    val df_customer_all = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer")
      .load()

    df_customer_all.createOrReplaceTempView("customer")

    val df_dimdatagroupdetail = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "dimdatagroupdetail")
      .load()

    df_dimdatagroupdetail.createOrReplaceTempView("dimdatagroupdetail")

    val df_customer_tag_result = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_tag_result")
      .load()

    df_customer_tag_result.createOrReplaceTempView("customer_tag_result")

    val df_customer_group_detail = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_group_detail")
      .load()

    df_customer_group_detail.createOrReplaceTempView("customer_group_detail")

    val df_retailevent = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "retailevent")
      .load()

    df_retailevent.createOrReplaceTempView("retailevent")

    val df_customer_activity = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_activity")
      .load()
      .selectExpr("customer_id", "activity_id")

    val df_customer_activity_dynamic = spark.read
      .format("org.apache.kudu.spark.kudu")
      .option("kudu.master", ApplicationConfig.KUDU_MASTER)
      .option("kudu.table", "customer_activity_dynamic")
      .load()
      .selectExpr("customer_id", "activity_id")

    df_customer_activity_dynamic.show(1, false)

    while (iterator.hasNext) {
      val group_id = iterator.next().getString(0)
      println(group_id)

      val group_rule = s"(select group_calculation from shuat_datanow.customergroups where id = $group_id)t"
      val df_group_rule = spark.read.jdbc(url_shuat_socialhub, group_rule, properties)
      val iterator_rule = df_group_rule.collect().iterator
      val impl: CustomerGroupCalculationServiceImpl = new CustomerGroupCalculationServiceImpl
      val map: util.Map[Integer, AnyRef] = new util.HashMap[Integer, AnyRef]()
      while (iterator_rule.hasNext) {
        val row = iterator_rule.next()
        val rule = row.getString(0)
        println(rule)
        //获取sql
        val groupCalculation = impl.getGroupCalculation(rule)
        val sql = impl.getSql(groupCalculation, map).replace("myProject", ApplicationConfig.PROJECT).replace("\r\n", "")
        println(sql)
        val dataFrame = spark.sql(sql)
        val df_group_customer = dataFrame.selectExpr(s"$group_id as group_id", "cust_id as customer_id")
        df_group_customer.show(100, false)
        val df_final = df_group_customer
          .join(df_customer, Seq("customer_id"), "inner")
          .join(df_group_activity_process, Seq("group_id"), "inner")
          //          .join(df_customer_activity_dynamic, Seq("customer_id", "activity_id"), "left_anti")
          .join(df_customer_activity, Seq("customer_id", "activity_id"), "left_anti")
          .selectExpr("customer_id", "activity_id", "process_id", "member_id")
        println("df_final:")
        df_final.show(100, false)

        df_final
          .selectExpr("customer_id", "activity_id", s"${System.currentTimeMillis()} as create_time")
          .write
          .format("org.apache.kudu.spark.kudu")
          .option("kudu.master", ApplicationConfig.KUDU_MASTER)
          .option("kudu.table", "customer_activity_dynamic")
          .mode("append")
          .save()


        val array = df_final.collect()

        val finalIterator = array.iterator
        while (finalIterator.hasNext) {

          val finalRow = finalIterator.next()
          val finalCustomer = finalRow.getString(0)
          val finalActivity = finalRow.getString(1)
          val finalProcess = finalRow.getString(2)
          val finalMember = finalRow.getString(3)
          KafkaUtil.send2Kafka(ApplicationConfig.DYNAMIC_CUSTOMER_TOPIC, ApplicationConfig.PROJECT, finalActivity, finalProcess, finalCustomer, finalMember)
        }
      }

    }

    println("执行完毕，休眠10秒")

    Thread.sleep(10000)

    spark.close()

    start_time

  }


}
