package qiche.mock

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import java.util.Properties
import com.qiche.tools.DateUtils
import org.apache.spark.sql.SparkSession
import qiche.mock.OdsCurriculumInfoMock.{getContextLocal}
import scala.util.Random

/** {
  * {"student_id":"","teacher_id":"","task_id":"","access_time":""}
  * */
object WriteToKafka {
  val topic = "visitor_log"
  private val random = new Random()

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._
    val studentIdArray = spark.sql("select student_id from pingtai.ods_student_info").map(x => x.getString(0)).collect()
    val teacherIdArray = spark.sql("select teacher_id from pingtai.ods_teacher_info").map(x => x.getString(0)).collect()
    val taskIdArray = spark.sql("select task_id from pingtai.ods_task_info").map(x => x.getString(0)).collect()
    val kafkaProducer = createKafkaProducer("192.168.10.102:9092")
    for (i <- 1 to 50000) {
      val studentId = studentIdArray(random.nextInt(studentIdArray.length))
      val teacherId = teacherIdArray(random.nextInt(teacherIdArray.length))
      val taskId = taskIdArray(random.nextInt(taskIdArray.length))
      val accessTime = DateUtils.getYesterdayHour(scala.util.Random.nextInt(7), scala.util.Random.nextInt(23) + 1, scala.util.Random.nextInt(60))
      val dataJson = "{\"student_id\":\"" + studentId + "\",\"teacher_id\":\"" + teacherId + "\",\"task_id\":\"" + taskId + "\",\"access_time\":\"" + accessTime + "\"} "

      val datas =studentId + "," + teacherId + "," + taskId + "," + accessTime

      kafkaProducer.send(new ProducerRecord[String, String](topic, datas))
    }
    kafkaProducer.flush()
    kafkaProducer.close()
  }


  def createKafkaProducer(broker: String): KafkaProducer[String, String] = {
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    new KafkaProducer[String, String](prop)
  }

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}