#!/usr/bin/env bash 
. /etc/profile
source /opt/hadoopclient/bigdata_env
kinit -k -t /home/bdbp/dataex/kerberos/admin/user.keytab asiainfo
echo "删除临时文件夹${tmp}"
hdfs dfs -rm -r ${tmp}
echo "生成hfile"
hbase org.apache.hadoop.hbase.mapreduce.ImportTsv \
-Dmapreduce.map.memory.mb=5120 \
-Dmapreduce.map.java.opts=-Xmx4096M -Djava.net.preferIPv4Stack=true \
-Dmapreduce.reduce.java.opts=-Xmx4096M -D-Djava.net.preferIPv4Stack=true \
-Dimporttsv.bulk.output=${tmp} \
-Dimporttsv.columns=HBASE_ROW_KEY,\
${columnValues} \
${hbaseTable}_tmp \
${hdfsPath} 

echo "load hbase"

hbase org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles \
${tmp} \
${hbaseTable}
echo "删除临时文件夹"
hdfs dfs -rm -r ${tmp}
