#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import uuid
import warnings

from pack.packing import Packing
from logger_util import get_logger

logger = get_logger()


class SparkSubmitExecutor:

    def __init__(self):
        if not os.path.exists("tmp"):
            os.makedirs("tmp")
        self.master = None
        self.deploy_mode = None
        self.name = None
        self.jars = None
        self.packages = None
        self.exclude_packages = None
        self.repositories = None
        self.conf = None
        self.properties_file = None
        self.driver_memory = None
        self.driver_java_options = None
        self.driver_library_path = None
        self.driver_cores = None
        self.executor_memory = None
        self.total_executor_cores = None
        self.num_executors = None
        self.executor_cores = None
        self.spark_home = None
        self.yarn_conf_dir = None

    def env_conf(self, spark_home=None, yarn_conf_dir=None):
        self.spark_home = spark_home
        self.yarn_conf_dir = yarn_conf_dir
        return self

    def yarn_conf(self, deploy_mode=None, driver_memory=None, driver_cores=None,
                  executor_memory=None, num_executors=None, executor_cores=None):
        if deploy_mode is None:
            deploy_mode = "cluster"
        if driver_memory is None:
            driver_memory = "1G"
        if driver_cores is None:
            driver_cores = "1"
        if executor_memory is None:
            executor_memory = "1G"
        if num_executors is None:
            num_executors = "2"
        if executor_cores is None:
            executor_cores = "2"
        self.master = "yarn"
        self.deploy_mode = deploy_mode
        self.driver_memory = driver_memory
        self.driver_cores = driver_cores
        self.executor_memory = executor_memory
        self.num_executors = num_executors
        self.executor_cores = executor_cores
        return self

    def __get_run_shell_local(self, py_files_path, main_py_path):
        shell = "{spark_home}/spark-submit " \
                "--name {name} " \
                "--master local " \
                "--driver-memory {driver_memory} " \
                "--driver-cores {driver_cores} " \
                "--executor-memory {executor_memory} " \
                "--num-executors {num_executors} " \
                "--executor-cores {executor_cores} " \
                "--py-files {py_files_path} " \
                "{main_py_path}" \
            .format(spark_home=self.spark_home, name=self.name, driver_memory=self.driver_memory,
                    driver_cores=self.driver_cores, executor_memory=self.executor_memory,
                    num_executors=self.num_executors, executor_cores=self.executor_cores,
                    py_files_path=py_files_path, main_py_path=main_py_path)
        return shell

    def __get_run_shell_yarn(self, py_files_path, main_py_path):
        shell = "{spark_home}/bin/spark-submit " \
                "--name {name} " \
                "--master {master} " \
                "--deploy-mode {deploy_mode} " \
                "--driver-memory {driver_memory} " \
                "--driver-cores {driver_cores} " \
                "--executor-memory {executor_memory} " \
                "--num-executors {num_executors} " \
                "--executor-cores {executor_cores} " \
                "--py-files {py_files_path} " \
                "{main_py_path}" \
            .format(spark_home=self.spark_home, name=self.name, master=self.master, deploy_mode=self.deploy_mode,
                    driver_memory=self.driver_memory, driver_cores=self.driver_cores,
                    executor_memory=self.executor_memory, num_executors=self.num_executors,
                    executor_cores=self.executor_cores, py_files_path=py_files_path, main_py_path=main_py_path)
        return shell

    def __get_run_shell_yarn_out_py_files(self, main_py_path):
        shell = "{spark_home}/bin/spark-submit " \
                "--name {name} " \
                "--master {master} " \
                "--deploy-mode {deploy_mode} " \
                "--driver-memory {driver_memory} " \
                "--driver-cores {driver_cores} " \
                "--executor-memory {executor_memory} " \
                "--num-executors {num_executors} " \
                "--executor-cores {executor_cores} " \
                "{main_py_path}" \
            .format(spark_home=self.spark_home, name=self.name, master=self.master, deploy_mode=self.deploy_mode,
                    driver_memory=self.driver_memory, driver_cores=self.driver_cores,
                    executor_memory=self.executor_memory, num_executors=self.num_executors,
                    executor_cores=self.executor_cores, main_py_path=main_py_path)
        return shell

    def run_on_local(self, py_files_path, main_py_path):
        if self.spark_home is None:
            self.spark_home = os.environ['SPARK_HOME']
            logger.warning(" Warning: spark_home is empty, use default SPARK_HOME ")
        self.name = str(uuid.uuid4())
        if isinstance(py_files_path, str):
            return self.__build_shell_file(self.__get_run_shell_local(py_files_path, main_py_path))
        elif isinstance(py_files_path, list):
            path_sum = ""
            for path in py_files_path:
                path_sum += (path + ",")
            return self.__build_shell_file(self.__get_run_shell_local(path_sum[0:len(path_sum) - 1], main_py_path))
        else:
            raise Exception(" py_files_path class type error")

    def run_on_yarn(self, py_files_path, main_py_path):
        if py_files_path is None:
            return self.__build_shell_file(self.__get_run_shell_yarn_out_py_files(main_py_path))
        paths = []
        for path in py_files_path.split(","):
            paths.append(Packing.packing_path(path))
        if self.yarn_conf_dir is None:
            self.yarn_conf_dir = os.environ['YARN_CONF_DIR']
            logger.warning(" Warning: yarn_conf_dir is empty, use default YARN_CONF_DIR ")
        if self.spark_home is None:
            self.spark_home = os.environ['SPARK_HOME']
            logger.warning(" Warning: spark_home is empty, use default SPARK_HOME ")
        self.name = str(uuid.uuid4())
        path_sum = ""
        for path in paths:
            path_sum += (path + ",")
        return self.__build_shell_file(self.__get_run_shell_yarn(path_sum[0:len(path_sum) - 1], main_py_path))

    def __build_shell_file(self, shell):
        shell_file = "/tmp/spark_submit_executor_" + self.name + ".sh"
        f = open(shell_file, "w")
        f.write("#!/bin/bash\n")
        f.write("\n")
        f.write("export SPARK_HOME={spark_home}\n".format(spark_home=self.spark_home))
        f.write("export YARN_CONF_DIR={yarn_conf_dir}\n".format(yarn_conf_dir=self.yarn_conf_dir))
        f.write("\n")
        f.write("{shell}".format(shell=shell))
        f.close()
        return shell_file
