package com.shujia.stream

import org.apache.flink.table.api.{EnvironmentSettings, TableEnvironment}

object CardFlowDay {

  def main(args: Array[String]): Unit = {
    /**
     * 创建flink sql环境
     *
     */
    val settings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .inStreamingMode() //流处理模式
      //.inBatchMode()//批处理模式
      .build()

    val table: TableEnvironment = TableEnvironment.create(settings)

    /**
     * 创建kafka 卡口source表
     *
     */

    table.executeSql(
      """
        |CREATE TABLE cars (
        |    car STRING,
        |    city_code STRING,
        |    county_code STRING,
        |    card BIGINT,
        |    camera_id STRING,
        |    orientation STRING,
        |    road_id BIGINT,
        |    `time` BIGINT,
        |    speed DOUBLE
        |) WITH (
        |  'connector' = 'kafka',
        |  'topic' = 'cars',
        |  'properties.bootstrap.servers' = 'master:9092',
        |  'properties.group.id' = 'testGroup',
        |  'scan.startup.mode' = 'earliest-offset',
        |  'format' = 'json'
        |)
        |
        |""".stripMargin)

    /**
     * MySQL sink 表
     * 将统计好的结果保存到mysql中
     *
     */

    table.executeSql(
      """
        |CREATE TABLE card_flow_day (
        |  `card` BIGINT,
        |  `day_id` STRING ,
        |  `flow` BIGINT,
        |  PRIMARY KEY (card,`day_id`) NOT ENFORCED
        |) WITH (
        |    'connector' = 'jdbc',
        |    'url' = 'jdbc:mysql://master:3306/car',
        |    'table-name' = 'card_flow_day',
        |    'username' ='root',
        |    'password'='123456'
        |)
        |""".stripMargin)

    /**
     * 实时统计卡口车流量，将统计结果保存到mysql
     *
     */
    table.executeSql(
      """
        |insert into card_flow_day
        |select
        | card,
        | FROM_UNIXTIME(`time`,'yyyy-MM-dd') as day_id,
        | count(1) as flow
        | from
        |cars
        |group by
        |card,
        |FROM_UNIXTIME(`time`,'yyyy-MM-dd')
        |
        |""".stripMargin)


  }

}
