import os
import subprocess

from airflow.patsnap.util import ldap


def demote(uid, gid):
    def exe_cmd():
        os.setgid(uid)
        os.setuid(gid)

    return exe_cmd


def process_spark_submit_log(itr):
    for line in itr:
        line = line.strip()
        print(line)


uid = ldap.get_user_id('test')
print(uid)

submit_sp = subprocess.Popen(
    ['/usr/hdp/3.1.5.0-152/spark3/bin/spark-submit', '--master', 'yarn', '--deploy-mode',
     'cluster', '--num-executors', '2', '--executor-cores', '2',
     '--executor-memory', '2G', '--driver-memory', '1g', '--name', 'spark-example-pi', '--class',
     'org.apache.spark.examples.SparkPi', '--queue', 'spark3', 'spark-examples_2.11-2.3.2.3.1.5.0-152.jar', '1000'],
    stdout=subprocess.PIPE,
    stderr=subprocess.STDOUT,
    bufsize=-1,
    preexec_fn=demote(uid, uid),
    universal_newlines=True
)

process_spark_submit_log(iter(submit_sp.stdout))

returncode = submit_sp.wait()
print(returncode)
