/**
 * 表函数
 */
package com.atguigu.day8
import com.atguigu.source.{SensorReading, SensorSource}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.scala._
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api._
import org.apache.flink.table.functions.{ScalarFunction, TableFunction}
import org.apache.flink.types.Row


object TableFunctionExample {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val settings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner()
      .inStreamingMode()
      .build()


    val tEnv = StreamTableEnvironment.create(env,settings)
    val stream = env.fromElements(
      "hello#world",
      "atguigu#bigdata"
    )

    val split = new Split("#")

    //table写法
    val table = tEnv.fromDataStream(stream,'s)
    table
      .joinLateral(split('s) as ('word,'length))
      .select('s,'word,'length)
      .toAppendStream[Row]
//      .print()
    //sql写法

    tEnv.registerFunction("split",split)
    tEnv.createTemporaryView("t",table)
    tEnv
      //'T'的意思时元组，flink里面的固定语法
        .sqlQuery("SELECT s, word, length FROM t ,LATERAL TABLE(split(s)) as T(word,length)")
        .toAppendStream[Row]
        .print()

env.execute()
  }

  class Split(seq:String) extends TableFunction[(String,Int)]{
    def eval(s:String):Unit = {
      //使用collect方法向下游发送
      s.split(seq).foreach(x=>collect(x,x.length))
    }

  }


}
