package com.shujia.stream

import org.apache.flink.table.api.{EnvironmentSettings, TableEnvironment}

object CardFlowDay {
  def main(args: Array[String]): Unit = {

    /**
     * 1、创建flink环境
     *
     */


    //环境设置对象
    val settings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .inStreamingMode()
      .build()

    /**
     * flink sql环境
     *
     */
    val table: TableEnvironment = TableEnvironment.create(settings)


    /**
     * \、创建 kafka source 表   --动态表
     *
     */

    table.executeSql(
      """
        |
        |CREATE TABLE cars (
        |car STRING,
        |city_code STRING,
        |county_code STRING,
        |card BIGINT,
        |camera_id STRING,
        |orientation STRING,
        |road_id BIGINT,
        |`time` BIGINT,
        |speed DOUBLE
        |) WITH (
        |  'connector' = 'kafka',
        |  'topic' = 'car',
        |  'properties.bootstrap.servers' = 'master:9092',
        |  'properties.group.id' = 'testGroup',
        |  'scan.startup.mode' = 'earliest-offset',
        |  'format' = 'json'
        |)
        |
        |""".stripMargin)

    /**
     * 2、创建sink表
     *
     */

    table.executeSql(
      """
        |
        |CREATE TABLE card_flow_day (
        |  `card` BIGINT,
        |  `day` STRING ,
        |  `flow` BIGINT,
        |  PRIMARY KEY (card,`day`) NOT ENFORCED
        |) WITH (
        |   'connector' = 'jdbc',
        |   'url' = 'jdbc:mysql://master:3306/car',
        |   'table-name' = 'card_flow_day',
        |    'username' ='root',
        |    'password'='123456'
        |)
        |
        |""".stripMargin)


    /**
     * 3、实时统计卡口车流量
     *
     */

    table.executeSql(
      """
        |
        |insert into card_flow_day
        |select
        | card,
        | FROM_UNIXTIME(`time`,'yyyy-MM-dd') as `day`,
        | count(1) as flow
        | from cars
        |group by card,FROM_UNIXTIME(`time`,'yyyy-MM-dd')
        |
        |
        |""".stripMargin)


  }

}
