package com.udf.flink.scala.examples

import java.sql.{Date, Timestamp}
import java.time.Duration

import com.udf.flink.udsf.TsToLong
//import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api._
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.util.Collector
import com.udf.flink.udsf.TsToLong
import org.apache.flink.api.scala._
import org.apache.flink.table.expressions.{ApiExpressionUtils, Expression}


object reduces {
//  case class User(user_id: Int, count: Int, a: String,  rowtime: Timestamp)
  case class User(user_id: Int, count: Int, a: String,  rowtime: Long)
  def main(args: Array[String]): Unit = {

    // set up the Scala DataStream API
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // set up the Scala Table API
    val tableEnv = StreamTableEnvironment.create(env)
    val sql= """CREATE TABLE ordr (
              user_id INT,
              cnt INT,
              a STRING,
              rowtime AS localtimestamp,
              WATERMARK FOR rowtime AS rowtime
              ) WITH (
                'connector' = 'datagen',
              'rows-per-second'='10',
              'fields.user_id.kind'='sequence',
              'fields.user_id.start'='1',
              'fields.user_id.end'='200',
              'fields.cnt.min'='1',
              'fields.cnt.max'='2',
              'fields.a.length'='1'
              )"""
    tableEnv.executeSql(sql)
    tableEnv.createTemporarySystemFunction("TsToLong", classOf[TsToLong])
    val tb=tableEnv.sqlQuery("""select user_id,cnt,a,TsToLong(rowtime) rowtime from ordr""")
//    val tb=tableEnv.from("ordr").addOrReplaceColumns($"rowtime", concat($("a"), "sunny").as("a"))
    tableEnv.toDataStream(tb,classOf[User])
      .assignTimestampsAndWatermarks( WatermarkStrategy
      .forBoundedOutOfOrderness(Duration.ofSeconds(60))
      .withTimestampAssigner(new SerializableTimestampAssigner[User] {
      override def extractTimestamp(element: User,recordTimestamp: Long): Long = element.rowtime
       }))
      .keyBy(_.a).window(TumblingEventTimeWindows.of(Time.seconds(60)))
//      .reduce { (v1, v2) => User(0, v1.count + v2.count,v1.a,0)}
      // 使用 reduce 和 processWindowFunction
//      .reduce(new MyReduceFunction, new MyProcessFunction)
      .process(new MyProcessFunction2)
      .print()

    env.execute("rds")

  }
  class MyReduceFunction extends ReduceFunction[User] {
    override def reduce(value1: User, value2: User): User = {
      if (value1.count > value2.count) value2
      else value1
    }
  }

  class MyProcessFunction extends ProcessWindowFunction[User, (Long, User), String, TimeWindow] {
    override def process(key: String, context: Context, elements: Iterable[User], out: Collector[(Long, User)]): Unit = {
      val min = elements.iterator.next
      out.collect((context.window.getStart, min))
    }
  }
  class MyProcessFunction2 extends ProcessWindowFunction[User, String, String, TimeWindow] {
    override def process(key: String, context: Context, elements: Iterable[User], out: Collector[String]): Unit = {
      var count = 0
      // 遍历，获得窗口所有数据
      for (user <- elements) {
        println(user)
        count += 1
      }
      out.collect(s"Window ${context.window} , count : ${count}")
    }
  }

}


