package com.ddxz.flink.dataset

import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.operators.Order
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.api.scala._

object MyAgg extends App {
  val env = ExecutionEnvironment.getExecutionEnvironment
  val dataset1 = env.fromCollection(List(("a", 1, 1.1), ("b", 2, 2.2), ("c", 3, 3.3), ("b", 4, 4.4), ("c", 5, 5.5)))
  dataset1.sum(2).max(1).print()
  dataset1.groupBy(0).sum(2).min(1).print()
  dataset1.sum(2).maxBy(0).minBy(0).print()
  dataset1.sum(1).print()
  dataset1.max(1).print()
  dataset1.groupBy(0).sortGroup(2, Order.DESCENDING).max(2).first(2).print()
  dataset1.filter(item => item._2 % 2 == 0).map(new MapFunction[(String, Int, Double), (String, Int)] {
    override def map(value: (String, Int, Double)): (String, Int) = {
      (value._1 + "<->" + value._2 + "<->" + value._3, 1)
    }
  }).first(2).print()

  dataset1.filter(item => item._2 % 2 != 0).map(new MapFunction[(String, Int, Double), (String, Int)] {
    override def map(value: (String, Int, Double)): (String, Int) = {
      (value._1 + "<=>" + value._2 + "<=>" + value._3, 1)
    }
  }).sum(1).print()
}
