#!/usr/bin/env bash

#spark-submit --jars /opt/jobs/foreign-recommend/jar/hbase-client-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-common-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-protocol-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-server-1.4.9.jar,/opt/jobs/foreign-recommend/jar/metrics-core-2.2.0.jar,/opt/jobs/foreign-recommend/jar/spark-examples_2.11-1.6.0-typesafe-001.jar /opt/jobs/foreign-recommend/foreign_als.py

PROJECT_PATH="/home/yehun/projects/my/recommend-foreign-news"
PROJECT_JAR_PATH="${PROJECT_PATH}/jar"
JARS=""
for FILE_NAME in `ls ${PROJECT_JAR_PATH}`; do
    JARS="${JARS},${PROJECT_JAR_PATH}/${FILE_NAME}"
done
JARS=${JARS:1}

echo ${JARS}
/usr/local/bin/spark-submit \
--jars ${JARS} \
--master spark://10.0.3.93:7077 \
--executor-memory 1g \
--num-executors 2 \
--executor-cores 2 \
--conf spark.default.parallelism=200 \
${PROJECT_PATH}/als.py
#
#spark-submit --jars /opt/jobs/foreign-recommend/jar/hbase-client-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-common-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-protocol-1.4.9.jar,/opt/jobs/foreign-recommend/jar/hbase-server-1.4.9.jar,/opt/jobs/foreign-recommend/jar/metrics-core-2.2.0.jar,/opt/jobs/foreign-recommend/jar/spark-examples_2.11-1.6.0-typesafe-001.jar --master yarn --executor-memory 1g --num-executors 2 --executor-cores 2 main.py
