#/bin/bash
# 如何向YARN 提交任务: spark-submit
/export/server/spark/bin/spark-submit \
--master yarn \
--deploy-mode client \
--conf 'spark.sql.shuffle.partitions'=8 \
--driver-memory 512m \
--driver-cores 1 \
--executor-memory 512m \
--executor-cores 2 \
--num-executors 2 \
--queue default \
/export/data/workspace/itcast_insurance/main/_insurance_main.py