package com.zt.bigdata.flink.stream

import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.wikiedits.{WikipediaEditEvent, WikipediaEditsSource}


object WikipediaAnalysis {
  def main(args: Array[String]): Unit = {
    val see = StreamExecutionEnvironment.getExecutionEnvironment
    val edits = see.addSource(new WikipediaEditsSource())
    val keyedEdits = edits.keyBy(_.getUser)
    val result = keyedEdits
      .timeWindow(Time.seconds(5))
      //      .fold(("", 0l))((acc, event) => {
      //        (event.getUser, acc._2 + event.getByteDiff)
      //      })
      .aggregate(new AggregateFunction[WikipediaEditEvent, (String, Long), (String, Long)] {
      override def createAccumulator(): (String, Long) = ("", 0l)

      override def add(in: WikipediaEditEvent, acc: (String, Long)): (String, Long) = {
        (in.getUser, acc._2 + in.getByteDiff)
      }

      override def getResult(acc: (String, Long)): (String, Long) = acc

      override def merge(acc: (String, Long), acc1: (String, Long)): (String, Long) = {
        (acc._1, acc._2 + acc1._2)
      }
    })
    //ACC: TypeInformation, R: TypeInformation
    see.execute()
  }
}


//def aggregate[ACC: TypeInformation, R: TypeInformation](
//      aggregateFunction: AggregateFunction[T, ACC, R]): DataStream[R] = {
//
//    val accumulatorType: TypeInformation[ACC] = implicitly[TypeInformation[ACC]]
//    val resultType: TypeInformation[R] = implicitly[TypeInformation[R]]
//
//    asScalaStream(javaStream.aggregate(
//      clean(aggregateFunction), accumulatorType, resultType))
//  }
