import java.text.SimpleDateFormat
import java.util.{Date, Properties}

import com.google.gson.JsonObject
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.sql.{DataFrame, SparkSession}

object kafkaProducerBJ {
  // 配置kafka 参数
  val props = new Properties()
  //济南
  //props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.17.104.230:9092")
  //props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.17.104.226:9092,172.17.104.225:9092,172.17.104.227:9092")
  //北京
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "dpcdh106:9092,dpcdh107:9092,dpcdh108:9092")
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
  props.put("acks", "-1")
  // 得到生产者的实例
  val producer = new KafkaProducer[String, String](props)

  def main(args: Array[String]): Unit = {


    val spark: SparkSession = SparkSession
      .builder()
      .appName("sink")
      .config("spark.sql.parquet.writeLegacyFormat", true)
      .config("spark.sql.hive.convertMetastoreParquet","false")
      .getOrCreate()

    import spark.implicits._


    val date: Date = new Date(new Date().getTime - 86400000L)
    val dateformat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
    val statisDate: String = dateformat.format(date)

    //生成string 并发送到kafka
    val querySql =
      """
            select
                a.user_id
                ,b.college_code
            from
                (
                    select
                        user_id
                    from (
                        SELECT
                          user_id
                        from  ods.ods_matomo_log_visit_fun
                        where   visit_last_action_time>='2021-01-01 00:00:00'
                            and user_id is not null
                            and user_id <>'未登录用户'
                        union all
                        select
                            user_id
                        from
                            dws.dws_event_user_active
                        where dt BETWEEN date_add(current_date,-365) and '2021-01-01' ) a
                    group by user_id
                ) a
            left join
                (
                    select
                        user_id
                        ,college_code
                    from
                        dim.dim_user_affiliated_info
                    where college_code != ''
                    group by user_id,college_code
                ) b on a.user_id = b.user_id
            where b.college_code is not null

        """



    val resultDF: DataFrame = spark.sql(querySql).repartition(8)

    resultDF.show(100)
    resultDF.rdd.foreach(row => {
      val user_id: String = row.getAs[String]("user_id").toString()
      val college_code: String = row.getAs[String]("college_code").toString()

      val json = new JsonObject()
      json.addProperty("user_id",user_id).toString
      json.addProperty("college_code",college_code).toString
      kafkaProducerSend(json.toString)
    })






    def kafkaProducerSend(args: String) {
      if (args != null) {
        //济南测试
        //val topic = "dp_dw_info_test"
        //北京测试
        val topic = "jn_zs"
        val message: ProducerRecord[String, String] = new ProducerRecord[String, String](topic, null, args)
        producer.send(message)
      }
    }

    spark.stop()
  }
}
