/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.zt.bigdata.flink.stream

import java.util
import java.util.concurrent.TimeUnit.MILLISECONDS

import com.zt.bigdata.flink.stream.ApplyDemo.MyWindowFunction3
import com.zt.bigdata.template.spark.redis.RedisTemplate
import org.apache.flink.api.scala._
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction
import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import redis.clients.jedis.Jedis

/**
  * An example of grouped stream windowing into sliding time windows.
  * This example uses [[RichParallelSourceFunction]] to generate a list of key-value pair.
  */
object DefineSourceAndSink {

  private class MyRedisSink(val address: String) extends RichSinkFunction[(Long, Long)] {
    private var jedis: Jedis = null;

    override def open(parameters: Configuration): Unit = {
      RedisTemplate.init(address)
      jedis = RedisTemplate.get(0)
    }

    override def invoke(value: (Long, Long), context: SinkFunction.Context[_]): Unit = {
      jedis.hset("adb", value._1.toString, value._2.toString)
    }

    override def close(): Unit = {
      jedis.close()
    }
  }

  private class DataSource extends RichParallelSourceFunction[(Long, Long)] {
    @volatile private var running = true

    override def run(ctx: SourceContext[(Long, Long)]): Unit = {
      val startTime = System.currentTimeMillis()

      val numElements = 2000
      val numKeys = 100
      var value = 1L
      var count = 0L

      while (running && count < numElements) {

        ctx.collect((value, 1L))
        Thread.sleep(500)

        count += 1
        value += 1

        if (value > numKeys) {
          value = 1L
        }
      }

      val endTime = System.currentTimeMillis()
      println(s"Took ${endTime - startTime} msecs for ${numElements} values")
    }

    /**
      * Parallel data source that serves a list of key-value pair.
      */


    override def cancel(): Unit = running = false
  }

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(4)

    val stream: DataStream[(Long, Long)] = env.addSource(new DataSource)


    //    stream
    //      .keyBy(0)
    //      .timeWindow(Time.of(2500, MILLISECONDS), Time.of(500, MILLISECONDS))
    //      .reduce((value1, value2) => (value1._1, value1._2 + value2._2))
    //      .addSink(new MyRedisSink("localhost:6379"))


    RedisTemplate.init("localhost:6379")

    stream.map(x => (x._1.toString, x._2.intValue())).keyBy(_._1)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
      .apply(new ProcessWindowFunction[(String, Int), Unit, String, TimeWindow] {
        var jedis: Jedis = null

        override def open(): Unit = {
          jedis = RedisTemplate.get(0)
        }

        override def process(key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[Unit]): Unit = {
          val map = new util.HashMap[String, String]()
          input.foreach {
            x => {
              map.put(x._1, x._2.toString)
            }
          }
          jedis.hmset("adb", map)
        }

        override def close(): Unit = {
          jedis.close()
        }
      })


    env.execute()
  }


}
