package com.shujia.flink.sql

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Put, Table}
import org.apache.hadoop.hbase.util.Bytes

import scala.io.Source

object Demo10Puthbase {
  def main(args: Array[String]): Unit = {

    val configuration = new Configuration()

    //指定zk的链接地址
    configuration.set("hbase.zookeeper.quorum", "master")
    val con: Connection = ConnectionFactory.createConnection(configuration)


    val table: Table = con.getTable(TableName.valueOf("student"))


    Source.fromFile("flink/data/students.txt")
      .getLines()
      .foreach(line => {

        val split: Array[String] = line.split(",")

        val id: String = split(0)
        val name: String = split(1)
        val age: Int = split(2).toInt
        val gender: String = split(3)
        val clazz: String = split(4)

        val put = new Put(id.getBytes())

        put.addColumn("info".getBytes(), "name".getBytes(), name.getBytes())
        put.addColumn("info".getBytes(), "age".getBytes(), Bytes.toBytes(age))
        put.addColumn("info".getBytes(), "gender".getBytes(), gender.getBytes())
        put.addColumn("info".getBytes(), "clazz".getBytes(), clazz.getBytes())


        //插入数据

        table.put(put)

      })


    con.close()


  }

}
