# -*- coding: utf-8 -*-
"""
execute a shell script in a single node

Usually we specify following spark configurations:
spark.master=yarn-cluster
spark.driver.memory=10g
spark.driver.cores=8
spark.yarn.maxAppAttempts=1
spark.yarn.appMasterEnv.PYSPARK_PYTHON=/home/haizhi/miniconda2/bin/python
spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON=/home/haizhi/miniconda2/bin/python
spark.yarn.appMasterEnv.PYTHONIOENCODING=utf_8
"""

from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division

import sys

from subprocess import call
from kgpipeline.sparkutil import init_spark


def main():
    init_spark()
    exit(call(sys.argv[1:]))
