package com.xc.spark_dw.KuduSource

import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object KuduReader {
  def main(args: Array[String]): Unit = {
    val kuduMaster = "192.168.83.244:7051,192.168.83.82:7051,192.168.83.48:7051"
    val businessTableName = "impala::testbinlog.business"
    val baseUserTableName = "impala::testbinlog.base_user"
    val baseJobTableName = "impala::testbinlog.base_job"
    val baseUserJobTableName = "impala::testbinlog.base_user_job"
    val baseDepartTableName = "impala::testbinlog.base_department"
    val businessSuptTableName = "impala::testbinlog.business_supporter"
    val businessOnlineTableName = "impala::testbinlog.business_online_info"
    val onlineBusinessStatTableName = "impala::testbinlog.crm_result"
    val onlineBusinessDepartStatTableName = "impala::testbinlog.crm_depart_result"

    val conf = new SparkConf()
    conf.setAppName("SparkKudu ")
    conf.setMaster("local")
    val option = Map("kudu.master" -> kuduMaster, "kudu.table" -> onlineBusinessStatTableName)
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    val kc = new KuduContext(kuduMaster, sparkSession.sparkContext)

    //write and read kudu
    val DataFrame = sparkSession.read.format("org.apache.kudu.spark.kudu").options(option).load()
    DataFrame.show(100)
    //  kc.updateRows(DataFrame, tableName)

    sparkSession.close()
  }

}
