
package com.feiwei

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.descriptors.{Csv, FileSystem, Kafka, Schema}
import org.apache.flink.table.api.scala._
object day8_mysql {


  def main(args: Array[String]): Unit = {

    // 1. 创建环境
    val set = StreamExecutionEnvironment.getExecutionEnvironment

    val tableEnv = StreamTableEnvironment.create(set)

/*
    tableEnv.connect(new Kafka().version("0.11")
      .topic("sensor")
      .property("zookeeper.connect", "localhost:2181")
      .property("bootstrap.servers", "localhost:9092")
    ).withFormat(new Csv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("timestamp", DataTypes.BIGINT())
        .field("temperature", DataTypes.DOUBLE()))
      .createTemporaryTable("inputTable")*/
val filePath = "E:\\repository\\company\\myself\\flink-learning\\flink-learning-demo\\src\\main\\resources\\sensor.txt"

  tableEnv.connect(new FileSystem().path(filePath))
       .withFormat(new Csv())
       .withSchema(new Schema()
         .field("id", DataTypes.STRING())
         .field("timestamp", DataTypes.BIGINT())
         .field("temp", DataTypes.DOUBLE())
       )
       .createTemporaryTable("inputTable")

    //2转换操作
    val source= tableEnv.from("inputTable")

  /*  val t1= source
      .select('id,'temperature)
      .filter('id ==="sensor_1")*/

    // 3.2 聚合转换
    val t1 = source
      .groupBy('id) // 基于id分组
      .select('id, 'id.count as 'count)

    val stringDDL=
  """
    |create table jdbcTable(
    |
    |  id varchar(100) not null,
    |  cnt bigint not null
    |) with (
    | 'connector.type'='jdbc',
    | 'connector.url'='jdbc:mysql://47.96.89.11:3306/flink',
    | 'connector.table'='test2',
    | 'connector.driver'='com.mysql.cj.jdbc.Driver',
    | 'connector.username'='root',
    | 'connector.password'='cht@cloud6510'
    |
    |)
  """.stripMargin


    tableEnv.sqlUpdate(stringDDL)

    t1.insertInto("jdbcTable")
    set.execute()
  }

}
