package com.hngy.scala.tablesql

import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.table.api.bridge.scala.BatchTableEnvironment
import org.apache.flink.types.Row


/**
 * 将table转换成 DataSet
 */
object TableToDataSetScala {

  def main(args: Array[String]): Unit = {
    //获取BatchTableEnvironment
    val bEnv = ExecutionEnvironment.getExecutionEnvironment
    val bTableEnv = BatchTableEnvironment.create(bEnv)

    //创建输入表
    bTableEnv.executeSql("" +
      "create table myTable(\n" +
      "id int,\n" +
      "name string\n" +
      ") with (\n" +
      "'connector.type' = 'filesystem',\n" +
      "'connector.path' = 'D:\\data\\source',\n" +
      "'format.type' = 'csv'\n" +
      ")");

    //获取table
    val table = bTableEnv.from("myTable");

    //将table转换为DataSet
    import org.apache.flink.api.scala._
    val set = bTableEnv.toDataSet[Row](table)
    set.map(row=>(row.getField(0).toString.toInt,row.getField(1).toString))
      .print()
  }
}
