#!/usr/bin/env bash

BASE_PATH=/app/inc_bdp_sch
APP_PATH=${BASE_PATH}/blackhole-0.1
for i in ${APP_PATH}/lib/*.jar
do
	CLASS_PATH=$i:${CLASS_PATH}
done
isLocal=true
kafkaBrokers=10.25.76.173:9092,10.25.76.174:9092,10.25.76.175:9092
kafkaTopic=ecommrece
zkUrl=10.25.76.173:2181,10.25.76.174:2181,10.25.76.175:2181
batchDuration=60
esNodes=10.25.76.163:9200,10.25.76.164:9200,10.25.76.175:9200
esPort=9200
groupId=mmtest
spark-submit --master yarn --deploy-mode cluster \
--conf spark.yarn.maxAppAttempts=4 \
--conf spark.yarn.am.attemptFailuresValidityInterval=1h \
--conf spark.yarn.max.executor.failures={8 * num_executors} \
--conf spark.yarn.executor.failuresValidityInterval=1h \
--conf spark.task.maxFailures=8 \
--conf spark.yarn.executor.memoryOverhead=2048  \
--conf spark.core.connection.ack.wait.timeout=300 \
--queue realtime_queue \
--executor-memory=4g   \
--num-executors=40       \
--total-excutor-cores=40   \
--conf spark.speculation=true \
--conf spark.hadoop.fs.hdfs.impl.disable.cache=true \
--conf spark.driver.extraJavaOptions=-Dlog4j.configuration=file:log4j.properties \
--conf spark.executor.extraJavaOptions=-Dlog4j.configuration=file:log4j.properties \
--files /path/to/log4j.properties:/path/to/metrics.properties
--jars ${CLASS_PATH}
ecommerce-sink.jar com.paic.SparkEsSink \
${isLocal} ${kafkaBrokers} ${kafkaTopic} ${zkUrl} ${batchDuration} ${esNodes} ${esPort} ${groupId}



