package com.mjf.transformation

import org.apache.flink.api.common.functions.FilterFunction
import org.apache.flink.streaming.api.scala._

object FilterDemo {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val source: DataStream[String] = env.fromCollection(List(
      "insert,paul,22",
      "delete,tina",
      "update,bill,23"
    ))

    source.print("source")

    // 方式一：通过传入函数使用 filter
    val result1: DataStream[String] = source.filter(line => "delete".equals(line.split(",")(0)))

    // 方式二：通过传入自定义函数类使用 filter
    val result2: DataStream[(String, String)] = source
      .filter(new MyFilterFunction)
      .map {
        line =>
          val words: Array[String] = line.split(",")
          (words(1), words(2))
      }

    result1.print("函数")
    result2.print("自定义函数类")

    env.execute(FilterDemo.getClass.getName)

  }
}

class MyFilterFunction extends FilterFunction[String] {
  override def filter(value: String): Boolean = {
    val words: Array[String] = value.split(",")
    if (words.length == 3) {
      return true
    } else {
      return false
    }
  }
}
