#!/bin/bash
schema=$1
table=$2
input=$3
index_table=$4
zk=$5
shouldProxy=$6
user=$7
queue=$8

echo "#####"
keytab=`ls -al $user.keytab* | awk '{print $9}'` 
echo $schema,$table,$input,$index_table,$zk,$shouldProxy,$user,$queue
echo "#####"

#principal=`klist -kt $keytab |awk '{if($4 != "") print $4}' | sed -n "1,1p"`
#kinit -kt $keytab $principal && /usr/hdp/current/hadoop-client/bin/hadoop --conf $resource jar phoenix-5.0.0-HBase-2.0-client.jar org.apache.phoenix.mapreduce.CsvBulkLoadTool --schema $schema --table $table --input hdfs://ksyuncluster/tmp/39224c26-6b2e-42a9-9a04-e83c0d61e07e/data/ --index-table $index_table --zookeeper $zk
#kinit -kt $keytab $principal && /usr/hdp/current/hadoop-client/bin/hadoop  jar phoenix-5.0.0-HBase-2.0-client.jar org.apache.phoenix.mapreduce.CsvBulkLoadTool --schema $schema --table $table --input hdfs://ksyuncluster/tmp/39224c26-6b2e-42a9-9a04-e83c0d61e07e/data/  --zookeeper $zk

/usr/hdp/current/hadoop-client/bin/hadoop  jar phoenix-5.0.0-HBase-2.0-client.jar org.apache.phoenix.mapreduce.CsvBulkLoadTool -Dmapred.job.queue.name=$queue -Dmapreduce.job.running.map.limit=30 -Dmapred.reduce.tasks=10  --schema $schema --table $table --input $input  --zookeeper $zk -sp $shouldProxy -keytab $keytab -krb5 /etc/krb5.conf -pu $user




