package com.atguigu.example.TableSql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.descriptors.{Csv, FileSystem, Schema}
import org.apache.flink.types.Row

/**
  * Copyright (c) 2018-2028 尚硅谷 All Rights Reserved 
  *
  * Project: FlinkTutorial
  * Package: com.atguigu.apitest.tabletest
  * Version: 1.0
  *
  * Created by wushengran on 2020/8/10 16:30
  */
object MyFileOutputTest {
  def main(args: Array[String]): Unit = {
    // 1. 创建环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val tableEnv = StreamTableEnvironment.create(env)

    // 2. 连接外部系统，读取数据，注册表
    val filePath = "E:\\workspace\\flink-example\\src\\main\\resources\\sensor.txt"

    tableEnv.connect(new FileSystem().path(filePath))
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("timestamp", DataTypes.BIGINT())
        .field("temp", DataTypes.DOUBLE())
      ).createTemporaryTable("inputTable")

    // 3. 转换操作
    val sensorTable = tableEnv.from("inputTable")

    //table api
    val aggTable = sensorTable
      .groupBy("id")
      .select("id,id.count as count")
//      .groupBy('id)    // 基于id分组
//      .select('id, 'id.count as 'count)

    //  SQL
    val resultSqlTable = tableEnv.sqlQuery(
      """
        |select id, count(*)
        |from inputTable
        |group by id
      """.stripMargin)

    aggTable.toRetractStream[Row].print("agg")
    resultSqlTable.toRetractStream[Row].print("aggsql")

    env.execute()
  }
}
