package com.sunzm.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.Table
import org.slf4j.{Logger, LoggerFactory}

object ScalaFlinkSQLDemo {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  def main(args: Array[String]): Unit = {
    //1.创建流执行环境
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }
/*
    //创建表执行环境
    val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(env)

    //从socke端口读取数据
    val dataStream: DataStream[User] = env.socketTextStream("82.156.210.70", 9999)
      .map(line => {
        val fields = line.split(",")

        User(fields(0), fields(1).toInt)
      })

    //把一个流创建为一张表
    tableEnv.createTemporaryView("t_user", dataStream)

    //执行查询语句,并得到结果
    val resultTable: Table = tableEnv.sqlQuery(
      """
        |SELECT userId, age
        | FROM t_user WHERE age >= 18
        |""".stripMargin)

    // 直接把表查询结果输出
    //resultTable.execute().print()

    val resultDS: DataStream[User] = tableEnv.toDataStream(resultTable, classOf[User])

    resultDS.print()*/

    //Table转换为DataStream<User>,追加的流
    //val appendStream: DataStream[User] = tableEnv.toAppendStream[User](resultTable)
    //Table转换为带flag的DataStream（flag=true表示该条数据为新增，flag=false表示撤回该条数据）
    //val retractStream: DataStream[(Boolean, User)] = tableEnv.toRetractStream[User](resultTable)

    env.execute()
  }

  private case class User(userId: String, age: Int)
}
