package com.zt.bigdata.flink.stream

import java.lang

import com.zt.bigdata.template.spark.redis.RedisTemplate
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.datastream.DataStreamUtils
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.assigners.{GlobalWindows, TumblingProcessingTimeWindows}
import org.apache.flink.streaming.api.windowing.windows.{GlobalWindow, TimeWindow}
import org.apache.flink.util.Collector
import redis.clients.jedis.Jedis
import org.apache.flink.streaming.api.windowing.time.Time

/**
  * @ClassName Test
  * @Description
  * @Author zhangtonghy
  * @Date 2019-07-10 16:51
  * @Copyright: 版权所有 (C) zt zt.
  * @注意 ：本内容仅限于zt内部传阅，禁止外泄以及用于其他的商业目的
  **/
case class Word(val word: String, val count: Int)


object Test {


  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val strings: DataStream[String] = env.socketTextStream("localhost", 999)
    val someIntegers = strings.map(_.toInt)
    /*

          val iteratedStream = someIntegers.iterate(
              iteration => {
                val minusOne = iteration.map(v => v - 1)
                val feedback = minusOne.filter(_ > 0) // 满足条件的将被重复迭代
                val output = minusOne.filter(_ <= 0)
                (feedback, output)
              }
            )*/


    /**
      * 原始 [0,1,2,3,4,5]
      * 第一次迭代 minusOne [-1,0,1,2,3,4]
      * feedback [1,2,3,4]
      * output = [-1,0]       将output添加到输出流 -1,0
      * 第二次迭代 minusOne [0,1,2,3]
      * output = [0]
      * feedback [1,2,3]      将output添加到输出流 -1,0,0
      * 第三次迭代 minusOne [0,1,2]
      * feedback [1,2]
      * output = [0]          将output添加到输出流 -1,0,0,0
      * 第四次迭代 minusOne [0,1]
      * feedback [1]
      * output = [0]          将output添加到输出流 -1,0,0,0,0
      * 第五次迭代 minusOne [0]
      * feedback null
      * output = [0]          将output添加到输出流 -1,0,0,0,0,0
      */

    RedisTemplate.init("localhost:6379")
    /*        import scala.collection.JavaConverters.asScalaIteratorConverter


            //流转换为集合
            val iter = DataStreamUtils.collect(iteratedStream.javaStream).asScala
            while (iter.hasNext) {
              jedis.lpush("adb", iter.next().toString)
            }*/


    /*    val even = OutputTag[String]("even-output")
        val odd = OutputTag[String]("odd-output")

            val mainDataStream = someIntegers
              .process(new ProcessFunction[Int, Int] {
                override def processElement(
                                             value: Int,
                                             ctx: ProcessFunction[Int, Int]#Context,
                                             out: Collector[Int]): Unit = {
                  // emit data to regular output
                  out.collect(value)

                  // emit data to side output
                  if (value % 2 == 0) {
                    ctx.output(even, "even-" + String.valueOf(value))
                  } else {
                    ctx.output(odd, "odd-" + String.valueOf(value))
                  }
                }
              })

            val sideOutputStream: DataStream[String] = mainDataStream.getSideOutput(even)
            sideOutputStream.print()
            mainDataStream.print()*/


    someIntegers.process(processFunction = new ProcessFunction[Int, String] {
      var jedis: Jedis = null

      override def processElement(value: Int, ctx: ProcessFunction[Int, String]#Context, out: Collector[String]): Unit = {
        if (value % 2 == 0) {
          jedis.lpush("adb", value.toString)
        }
      }

      override def open(parameters: Configuration): Unit = {
        jedis = RedisTemplate.get(0)
      }

      override def close(): Unit = {
        jedis.close()
      }
    }).print()


    env.execute()
  }

}
