package org.niit.mock

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.niit.bean.AdClickData
import org.niit.util.MyKafkaUtil

object TestData {

    def main(args: Array[String]): Unit = {

//        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
//        val ssc = new StreamingContext(sparkConf, Seconds(3))
//
//        val kfDataDS: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream("Test","AD1", ssc)
//        val adClickData: DStream[AdClickData] = kfDataDS.map(kafkaData => {
//            val data = kafkaData.value()
//            val datas = data.split(" ")
//            println(s"Kafka数据：${datas(0)},${data(1)},${data(2)},${data(3)},${data(4)}")
//            AdClickData(datas(0), datas(1), datas(2), datas(3), datas(4))
//        })
//
//        adClickData.print()
//        ssc.start()
//        ssc.awaitTermination()

        var i = 1680793352123L/(60 * 10000) * (60 * 10000)
        println(i)


    }

}
