import sys, os, pwd, grp, signal, time, glob
from resource_management import *
from subprocess import call





class Master(Script):
  def install(self, env):

    import params
    import status_params

    #e.g. /var/lib/ambari-agent/cache/stacks/HDP/2.3/services/ALARM-FLINK/package
    service_packagedir = os.path.realpath(__file__).split('/scripts')[0]

    Execute('rm -rf ' + params.alarm_flink_install_dir, ignore_failures=True)

    Directory([status_params.alarm_flink_pid_dir, params.alarm_flink_log_dir, params.alarm_flink_install_dir, params.alarm_flink_jar_dir],
            create_parents=True,
            owner=params.alarm_flink_user,
            group=params.alarm_flink_group
    )

    File(params.alarm_flink_log_file,
            mode=0644,
            owner=params.alarm_flink_user,
            group=params.alarm_flink_group,
            content=''
    )

    #User selected option to use prebuilt alarm-flink package
    Execute('echo Installing packages')

    #Fetch and unzip snapshot build, if no cached alarm-flink tar package exists on Ambari server node
    if not os.path.exists(params.temp_file):
      Execute('wget '+params.alarm_flink_download_url+' -O '+params.temp_file+' -a '  + params.alarm_flink_log_file, user=params.alarm_flink_user)
    #hadoop >2.8
    #if not os.path.exists('/tmp/jersey-core-1.9.jar'):
    #  Execute('wget http://central.maven.org/maven2/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar -O /tmp/jersey-core-1.9.jar')
    Execute('tar -zxvf '+params.temp_file+' -C ' + params.alarm_flink_install_dir + ' >> ' + params.alarm_flink_log_file, user=params.alarm_flink_user)
    Execute('mv '+params.alarm_flink_install_dir+'/*/* ' + params.alarm_flink_install_dir, user=params.alarm_flink_user)
    #Execute('cp /tmp/jersey-core-1.9.jar ' + params.alarm_flink_install_dir + '/lib/', user=params.alarm_flink_user)

    #update the configs specified by user
    self.configure(env, True)



  def configure(self, env, isInstall=False):
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)

    self.set_conf_bin(env)

    #write out nifi.properties
    properties_content=InlineTemplate(params.alarm_flink_yaml_content)
    File(format("{conf_dir}/alarm-flink-conf.yaml"), content=properties_content, owner=params.alarm_flink_user)


  def stop(self, env):
    import params
    import status_params
    from resource_management.core import sudo
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file))
    Execute('yarn application -kill ' + pid, user=params.alarm_flink_user)
    if os.path.exists(status_params.alarm_flink_pid_file):
      Execute('rm ' + status_params.alarm_flink_pid_file, ignore_failures=True)



  def start(self, env):
    import params
    import status_params
    self.set_conf_bin(env)
    self.configure(env)

    self.create_hdfs_user(params.alarm_flink_user)

    Execute('echo bin dir ' + params.bin_dir)
    Execute('echo pid file ' + status_params.alarm_flink_pid_file)
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {bin_dir}/yarn-session.sh -n {alarm_flink_container} -s {alarm_flink_task_slots} -jm {alarm_flink_job_container_memory} -tm {alarm_flink_task_container_Memory} -qu {alarm_flink_queue} -nm {alarm_flink_appname} -d")
    Execute (cmd + format(" >> {alarm_flink_log_file}"), user=params.alarm_flink_user)
    Execute("yarn application -list 2>/dev/null | awk '/" + params.alarm_flink_appname + "/ {print $1}' | head -n1 > " + status_params.alarm_flink_pid_file, user=params.alarm_flink_user)
    #Execute('chown '+params.alarm_flink_user+':'+params.alarm_flink_group+' ' + status_params.alarm_flink_pid_file)

  def check_alarm_flink_status(self, pid_file):
    from datetime import datetime
    from resource_management.core.exceptions import ComponentIsNotRunning
    from resource_management.core import sudo
    from subprocess import PIPE,Popen
    import shlex, subprocess
    if not pid_file or not os.path.isfile(pid_file):
      raise ComponentIsNotRunning()
    try:
      pid = str(sudo.read_file(pid_file))
      cmd_line = "/usr/bin/yarn application -list"
      args = shlex.split(cmd_line)
      proc = Popen(args, stdout=PIPE)
      p = str(proc.communicate()[0].split())
      if p.find(pid.strip()) < 0:
        raise ComponentIsNotRunning()
    except Exception, e:
      raise ComponentIsNotRunning()

  def status(self, env):
    import status_params
    from datetime import datetime
    self.check_alarm_flink_status(status_params.alarm_flink_pid_file)

  def set_conf_bin(self, env):
    import params
    params.conf_dir =  params.alarm_flink_install_dir+ '/conf'
    params.bin_dir =  params.alarm_flink_install_dir+ '/bin'

  def install_mvn_repo(self):
    #for centos/RHEL 6/7 maven repo needs to be installed
    distribution = platform.linux_distribution()[0].lower()
    if distribution in ['centos', 'redhat'] and not os.path.exists('/etc/yum.repos.d/epel-apache-maven.repo'):
      Execute('curl -o /etc/yum.repos.d/epel-apache-maven.repo https://repos.fedorapeople.org/repos/dchen/apache-maven/epel-apache-maven.repo')

  def create_hdfs_user(self, user):
    Execute('hadoop fs -mkdir -p /user/'+user, user='hdfs', ignore_failures=True)
    Execute('hadoop fs -chown ' + user + ' /user/'+user, user='hdfs')
    Execute('hadoop fs -chgrp ' + user + ' /user/'+user, user='hdfs')

  def monitorjob(self, env):
    from resource_management.core import sudo
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file)).strip()
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {alarm_flink_install_dir}/bin/flink run -yid {pid} -d -c com.ds.lens.flink.monitor.monitorjob.LensMonitorJob {alarm_flink_jar_dir}/{alarm_flink_jar_name}")
    Execute (cmd, user=params.alarm_flink_user)

  def notifyjob(self, env):
    from resource_management.core import sudo
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file)).strip()
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {alarm_flink_install_dir}/bin/flink run -yid {pid} -d -c com.ds.lens.flink.monitor.eventsinkjob.MonitorEventSinkJob {alarm_flink_jar_dir}/{alarm_flink_jar_name}")
    Execute (cmd, user=params.alarm_flink_user)

  def metricsjob(self, env):
    from resource_management.core import sudo
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file)).strip()
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {alarm_flink_install_dir}/bin/flink run -yid {pid} -d -c com.ds.lens.flink.monitor.metricsjob.LensMetricsJob {alarm_flink_jar_dir}/{alarm_flink_jar_name}")
    Execute (cmd, user=params.alarm_flink_user)

  def binlogjob(self, env):
    from resource_management.core import sudo
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file)).strip()
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {alarm_flink_install_dir}/bin/flink run -yid {pid} -d -c com.ds.lens.flink.monitor.datasource.LensDataSourceJob {alarm_flink_jar_dir}/{alarm_flink_jar_name}")
    Execute (cmd, user=params.alarm_flink_user)

  def external_event_job(self, env):
    from resource_management.core import sudo
    import params
    import status_params
    env.set_params(params)
    env.set_params(status_params)
    pid = str(sudo.read_file(status_params.alarm_flink_pid_file)).strip()
    cmd = format("export HADOOP_CONF_DIR={hadoop_conf_dir};export HADOOP_CLASSPATH=`hadoop classpath`; {alarm_flink_install_dir}/bin/flink run -yid {pid} -d -c com.ds.lens.flink.monitor.external.event.LensExternalEventJob {alarm_flink_jar_dir}/{alarm_flink_jar_name}")
    Execute (cmd, user=params.alarm_flink_user)

  def updatejar(self, env):
      import params
      env.set_params(params)
      Execute("rm -f " + params.alarm_flink_jar_dir + "/" + params.alarm_flink_jar_name, ignore_failures=True)
      Execute(params.alarm_flink_jar_file)

if __name__ == "__main__":
  Master().execute()
