//package scala.hdfs
//
//import cn.getech.data.development.utils.FlinkUtils
//import org.apache.flink.streaming.api.scala.DataStream
//import org.apache.flink.table.api.scala.StreamTableEnvironment
//import org.apache.flink.table.catalog.hive.HiveCatalog
//import org.apache.flink.types.Row
//
//Flink 10
//object FlinkDStreamSyncHiveMain {
//
//  def main(args: Array[String]): Unit = {
//
//    val sql =
//      """
//        |CREATE TABLE user_log(
//        |      userid STRING,
//        |      action STRING,
//        |      duration BIGINT,
//        |      dt BIGINT
//        |    ) WITH (
//        |      'connector.type' = 'kafka',
//        |    'connector.version' = 'universal',
//        |    'connector.topic' = 'test118',
//        |    'connector.properties.zookeeper.connect' = 'bigdata-test-1:2181,bigdata-test-3:2181,bigdata-test-5:2181',
//        |    'connector.properties.bootstrap.servers' = 'bigdata-test-4:9092,bigdata-test-5:9092,bigdata-test-6:9092',
//        |    'connector.startup-mode' = 'earliest-offset',
//        |    'update-mode' = 'append',
//        |    'format.derive-schema' = 'true',
//        |    'format.type' = 'json')
//        |""".stripMargin
//
//    val tEnv: StreamTableEnvironment = FlinkUtils.createStreamTableEnv(true)
//
//    val name = "myhive" // Catalog名称，定义一个唯一的名称表示
//    val defaultDatabase = "test" // 默认数据库名称
//    val hiveConfDir = "D:\\softwore\\conf" // hive-site.xml路径
//    val version = "2.1.1"
//    val catalog = new HiveCatalog(name, defaultDatabase, hiveConfDir, version)
//    tEnv.registerCatalog(name, catalog)
//    tEnv.useCatalog(name)
//    //    tEnv.sqlUpdate(sql)
//
//    //        val table = tEnv.sqlQuery("show databases")
//    //        table.printSchema()
//    val table = tEnv.sqlQuery("select * from `test`.`test_118_k` where dt='120'")
////    import org.apache.flink.api.scala._
////    val value: DataStream[Row] = tEnv.toAppendStream[Row](table)
////    value.print()
//
//    //    val test_table = tEnv.from("test.test_118_k")
//    //    val test_table1 = tEnv.from("user_log").select("userid, action,duration, action as dt")
////        test_table1.insertInto("test.test_118_k")
//        tEnv.sqlUpdate("insert into test_119_k select userid, action, duration from test_118_k where dt='120'")
//
//    tEnv.execute("test001")
//  }
//
//
//}
//
//
