import org.apache.flink.api.common.typeinfo.{TypeInformation, Types}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.sinks.CsvTableSink
import org.apache.flink.api.scala._
import org.apache.flink.table.functions.TableFunction
import org.apache.flink.table.sources.CsvTableSource
import org.apache.flink.types.Row

class Split(separator: String) extends TableFunction[(String, Int, Int)] {
  @scala.annotation.varargs
  def eval(fileds: String*): Unit = {
    // use collect(...) to emit a row.
    var i: Int = 0
    for (filed <- fileds) {
      i = i + 1
      filed.split(separator).foreach(x => collect((x, x.length, i)))
    }
  }
}

class HangToLie extends TableFunction[(String, Int)] {
  @scala.annotation.varargs
  def eval(fileds: String*): Unit = {
    var i: Int = 0
    for (filed <- fileds) {
      println(filed)
      collect((i + "时", filed.toInt))
      i = i + 1

    }
  }
}

class HangToLie2 extends TableFunction[String] {
  @scala.annotation.varargs
  def eval(fileds: String*): Unit = {
    var i: Int = 0
    for (filed <- fileds) {
      println(filed)
      collect(i + "时" + "#" + filed.length)
      i = i + 1

    }
  }

  //  override def getResultType: TypeInformation[Row] = {
  //    Types.ROW(Types.STRING)
  //  }
  override def getResultType: TypeInformation[String] = {
    Types.STRING
  }
}

object testUDTF {
  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val tabEnv = StreamTableEnvironment.create(env)

    val fieldNames: Array[String] = Array("field1", "field2", "field3")
    val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.STRING, Types.STRING)

    // 定义source
    val csvSource = new CsvTableSource("data/testUDTF", fieldNames, fieldTypes)

    // 注册成table
    tabEnv.registerTableSource("CSVTableSource", csvSource)

    // 定义sink
    val csvSink = new CsvTableSink("data/output5")

    // 注册自定义UDTF
    //    tabEnv.registerFunction("myUDTF", new Split("#"))
    tabEnv.registerFunction("myUDTF", new HangToLie2())

    val sqlStr1 =
      """
        |select word,length,i
        |from CSVTableSource, LATERAL TABLE(myUDTF(field1,field2)) as T(word, length, i)
        |""".stripMargin

    val sqlStr2 =
      """
        |select newWord
        |       ,SUBSTRING(newWord,1,POSITION('#' IN newWord)-1) as field1
        |       ,SUBSTRING(newWord,POSITION('#' IN newWord)+1) as field2
        |from CSVTableSource LEFT JOIN LATERAL TABLE(myUDTF(field1,field2)) AS T(newWord) ON TRUE
        |""".stripMargin
    val result1 = tabEnv.sqlQuery(sqlStr2)

    val result1FiledNames = result1.getSchema.getFieldNames
    val result1FiledTypes = result1.getSchema.getFieldTypes
    println(result1.getSchema)
    println(result1.getSchema.getFieldNames())
    println(result1FiledTypes)
    tabEnv.registerTableSink("CSVTableSink", result1FiledNames, result1FiledTypes, csvSink)
    result1.insertInto("CSVTableSink")

    env.execute()

  }


}
