package com.zhaosc.spark.stream

import org.apache.spark.streaming.StreamingContext
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.Durations

object WindowOperator {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("WindowOperator").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val ssc = new StreamingContext(conf, Durations.seconds(5)) //5秒处理一次消息

    ssc.checkpoint("D:\\tmp\\kafka")
    val topic = Map("test" -> 1);
    // 这里的日志格式，就简化一下，就是date username的方式
    val kafkaStream = KafkaUtils.createStream(ssc, "localhost:2181", "MyFirstConsumerGroup", topic, StorageLevel.MEMORY_AND_DISK);

    val result = kafkaStream.flatMap(v => {
      println("********v.1*******" + v._1)
      println("********v._2*******" + v._2)
      v._2
    }).map(v => {
      Tuple2(v, 1)
    }).reduceByKeyAndWindow((v1, v2) => { v1 + v2 }, (v1, v2) => { v1 - v2 }, Durations.seconds(15), Durations.seconds(10))
    result.print();
    ssc.start();
    ssc.awaitTermination();

  }
}