#!/bin/bash

export HADOOP_CLASSPATH=/usr/lib/hadoop/*\
:/usr/lib/hadoop/client/*\
:/usr/lib/hadoop/lib/*\
:/usr/lib/hadoop-hdfs/*\
:/usr/lib/hadoop-hdfs/lib/*\
:/usr/lib/hadoop-mapreduce/*\
:/usr/lib/hadoop-yarn/*\
:/usr/lib/hadoop-yarn/lib/*\
;

#demo
java -jar /usr/local/data/user_data/nd_hailalu/resource/8a3b9ab882ab11a00183403acf460714/flink-yarn-submiter-1.0.jar \
 -hadoopConfDir /usr/lib/hadoop/etc/hadoop \
 -flinkVersion "1.13.6" \
 -flinkJarPath /usr/lib/flink/lib \
 -flinkConfDir /usr/lib/flink/conf \
 -runJarPath /usr/local/data/user_data/nd_hailalu/resource/8a3b91cc87381beb018805d0a45707c4/bd-data-persona-flink-scala_2.11-1.0-SNAPSHOT.jar\
 -entryPointClassName com.nd.bd.HbaseDeleteJob2\
 -execArgs "['$1', 'env_dp_prod_new']"\
 -openSecurity true\
 -krb5Path /usr/local/data/user_data/nd_hailalu/resource/8a3b9b18804549bd01813714c7ad1290/krb5.conf\
 -principal hailalu\
 -keytabPath /usr/local/data/user_data/nd_hailalu/resource/8a3b9b18804549bd0181370feb27128e/hailalu.PROD.BDI.keytab.jar\
 -confProperties "{'taskmanager.numberOfTaskSlots': '3'}" \