#!/usr/bin/env bash
export PYTHON_ROOT=/usr/local/python2.7
export PYSPARK_DRIVER_PYTHON=/usr/local/python2.7/bin/python2.7
export PYSPARK_PYTHON=/usr/local/python2.7/bin/python2.7
export LD_LIBRARY_PATH=${PATH}
export PYSPARK_PYTHON=${PYTHON_ROOT}/bin/python2.7
export SPARK_YARN_USER_ENV="PYSPARK_PYTHON=/usr/local/python2.7/bin/python2.7"
export PATH=${PYTHON_ROOT}/bin/:$PATH
export QUEUE=default
export LIB_HDFS=$HADOOP_HOME/lib/native
export LIB_JVM=$JAVA_HOME/jre/lib/amd64/server
export HADOOP_PREFIX=$HADOOP_HOME

${SPARK_HOME}/bin/spark-submit \
--master yarn \
--deploy-mode cluster \
--queue ${QUEUE} \
--num-executors 2 \
--executor-memory 2G \
--py-files hdfs:///user/${USER}/tf/tfspark.zip#tfspark,${TFoS_HOME}/examples/mnist/spark/mnist_dist.py \
--conf spark.dynamicAllocation.enabled=false \
--conf spark.yarn.maxAppAttempts=1 \
--conf spark.executorEnv.LD_LIBRARY_PATH=$LIB_JVM:$LIB_HDFS \
--driver-library-path=$LIB_JVM:$LIB_HDFS \
--archives hdfs:///user/${USER}/tf/python.zip#python \
${TFoS_HOME}/examples/mnist/spark/mnist_spark.py \
--images /tmp/test/images \
--labels /tmp/test/labels \
--mode inference \
--model mnist_model_1 \
--output predictions_1
