#!/usr/bin/python
# -*- coding: UTF-8 -*-

#运行shell命令： su - hdfs -s /bin/bash -c  "spark-submit --master yarn --deploy-mode cluster --executor-cores 1 --executor-memory 1g    hdfs:/user/hdfs/sparktest.py"
#su - hdfs -s /bin/bash -c  "spark-submit --master yarn --deploy-mode cluster --executor-cores 1 --executor-memory 1g --py-files /opt/cloudera/pyspark/sql_conf.py   /opt/cloudera/pyspark/sparktest.py "
#spark2-submit --master yarn --deploy-mode cluster --executor-cores 5 --num-executors 5 --driver-memory 2g --executor-memory 4g --conf spark.default.parallelism=20   --py-files /mnt/disk4/zjs/xy/prod/gsxx/sql_conf.py,/mnt/disk4/zjs/xy/prod/SparkUtil.py /mnt/disk4/zjs/xy/prod/gsxx/gsxx.py
#spark2-submit --master yarn --deploy-mode cluster --executor-cores 5 --num-executors 5 --driver-memory 2g --executor-memory 4g --conf spark.default.parallelism=10   --py-files /mnt/disk4/zjs/xy/prod/conf_gsxx.py,/mnt/disk4/zjs/xy/prod/conf_flss.py,/mnt/disk4/zjs/xy/prod/conf_jyfx.py,/mnt/disk4/zjs/xy/prod/conf_jyzk.py,/mnt/disk4/zjs/xy/prod/conf_jzzz.py,/mnt/disk4/zjs/xy/prod/conf_qynb.py,/mnt/disk4/zjs/xy/prod/conf_ssxx.py,/mnt/disk4/zjs/xy/prod/conf_zscq.py,/mnt/disk4/zjs/xy/prod/conf_hhmd.py,/mnt/disk4/zjs/xy/prod/SparkUtil.py /mnt/disk4/zjs/xy/prod/whole_script_ods_dwd.py
#spark2-submit --master yarn --deploy-mode cluster --executor-cores 5 --num-executors 5 --driver-memory 2g --executor-memory 4g --conf spark.default.parallelism=10   --py-files /mnt/disk4/zjs/xy/prod/conf_gsxx.py,/mnt/disk4/zjs/xy/prod/conf_flss.py,/mnt/disk4/zjs/xy/prod/conf_jyfx.py,/mnt/disk4/zjs/xy/prod/conf_jyzk.py,/mnt/disk4/zjs/xy/prod/conf_jzzz.py,/mnt/disk4/zjs/xy/prod/conf_qynb.py,/mnt/disk4/zjs/xy/prod/conf_ssxx.py,/mnt/disk4/zjs/xy/prod/conf_zscq.py,/mnt/disk4/zjs/xy/prod/conf_hhmd.py,/mnt/disk4/zjs/xy/prod/SparkUtil.py /mnt/disk4/zjs/xy/prod/whole_script_ods_dwd.py

import conf_flss as flss
from SparkUtil import SparkUtil
import time

startTime = time.time()
if __name__=="__main__":
    sparkUtil = SparkUtil(startTime)
    sparkUtil.execDatabaseName('cdm_dwdjq')
    databaseName = 'cdm_dwsjq'

    #XXX组件
    SparkUtil.loadData(sparkUtil,flss.t_zhixing, databaseName+'.dwd_flss_zhixing')
    sparkUtil.distory()

    print("--------------xy_ods清洗到xy_dwd完成--------------")
    endTime = time.time()
    print("startTime", startTime, "--->endTime:", endTime)
    print('总共花费时间: {} hours'.format((endTime - startTime) / 3600))