package com.shujia.init

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.catalog.hive.HiveCatalog

object InitFLinkOdsTable {


  def main(args: Array[String]): Unit = {


    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流处理模型
      .build()


    //创建table环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    val hiveCatalog: HiveCatalog = new HiveCatalog("myHive",
      "sent","sentcompute/src/main/resources")


    //注册一个元数据
    bsTableEnv.registerCatalog("myHive",hiveCatalog)

    //使用元数据
    bsTableEnv.useCatalog("myHive")


    bsTableEnv.executeSql(
      """
        |use ods
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |drop table IF EXISTS  ods_kafka_weibo
        |
      """.stripMargin)

    bsTableEnv.executeSql(
      """
        |CREATE TABLE ods_kafka_weibo (
        |  id String,
        |  comments_count bigint,
        |  created_at String,
        |  source String,
        |  reposts_count bigint,
        |  attitudes_count bigint,
        |  text String,
        |  user_id bigint
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'ods_kafka_weibo',
        | 'properties.bootstrap.servers' = 'node1:9092,node2:9092,master:9092',
        | 'properties.group.id' = 'asdasd',
        | 'format' = 'json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'json.ignore-parse-errors' = 'true'
        |)
        |
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |drop table IF EXISTS  ods_kafka_user
        |
      """.stripMargin)
    bsTableEnv.executeSql(
      """
        |CREATE TABLE ods_kafka_user (
        |  id bigint,
        |  gender String,
        |  screen_name String,
        |  followers_count bigint,
        |  follow_count bigint
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'ods_kafka_user',
        | 'properties.bootstrap.servers' = 'node1:9092,node2:9092,master:9092',
        | 'properties.group.id' = 'asdasda',
        | 'format' = 'json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'json.ignore-parse-errors' = 'true'
        |
        |)
        |
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |drop table IF EXISTS  ods_kafka_comment
        |
      """.stripMargin)
    bsTableEnv.executeSql(
      """
        |CREATE TABLE ods_kafka_comment (
        |  comment_id String,
        |  comment_time String,
        |  like_count bigint,
        |  comment_text String,
        |  user_id bigint,
        |  weibo_id String
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'ods_kafka_comment',
        | 'properties.bootstrap.servers' = 'node1:9092,node2:9092,master:9092',
        | 'properties.group.id' = 'asdasd',
        | 'format' = 'json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'json.ignore-parse-errors' = 'true'
        |)
        |
        |
      """.stripMargin)

  }
}
