package com.zt.bigdata.flink.batch


import org.apache.flink.api.scala._

/**
  * 原生的map filter 等算子不支持模式匹配  case ； 使用mapWith filterWith 可以支持
  */
object Main {

  import org.apache.flink.api.scala.extensions._

  case class Point(x: Double, y: Double)

  def main(args: Array[String]): Unit = {
    val env = ExecutionEnvironment.getExecutionEnvironment
    val ds = env.fromCollection(Seq(Point(1, 2), Point(3, 4), Point(5, 6)))
//    val ds = env.fromElements(Point(1, 2), Point(3, 4), Point(5, 6))
    val b = ds
      //      .filter(a => a.x > 1)
      //      .mapWith(a => (a.x, a.y + 1))
      .filterWith {
      case Point(x, _) => x > 1
    }
      .mapWith {
        case xx if xx.x < 0 => (xx.x, xx.y + 1)
        case Point(x, y) => (x, y + 1)
      }.collect()

    println(b)
  }
}
