#!/usr/local/bin/python
# -*- coding: UTF-8 -*-



import os
import random
import datetime
import time
import  subprocess
import shlex
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

# fill hql template with params
def fill_hql_template(hqlfile,paramsStr):
  hql_source = open(hqlfile,"r").read()
  paramsDic = {}
  params = paramsStr.split(";")
  for param in params:
      pair = param.split("=")
      paramsDic[pair[0]] = etl_date_adaptor(pair[1])

  hql_target = hql_source
  for key in paramsDic.keys():
       value = paramsDic.get(key)
       hql_target = hql_target.replace('${%s}'%(key),value)

  print "hql_template :\r\n %s"%(hql_source)
  print "hql_target : \r\n %s"%(hql_target)
  return hql_target



def etl_date_adaptor(etl_date_str):

    now_time = datetime.datetime.now()
    if not cmp('D|-1',etl_date_str):
        yes_time = now_time + datetime.timedelta(days=-1)
        etl_date_str = yes_time.strftime('%Y-%m-%d')
    elif not cmp('D|1',etl_date_str):
        etl_date_str = now_time.strftime('%Y-%m-%d')
    return etl_date_str

def paraseAZPropertyFile():
    filepath = os.environ["JOB_PROP_FILE"]
    resultDic = {}
    print "JOB_PROP_FILE  is :", filepath

    with open(filepath, 'r') as f:
        for line in f:
          if line.startswith("#"):
              continue
          else:
              kv = line.split("=")
              key = kv[0].strip()
              value = kv[1].strip()
              resultDic[key] = value

    print resultDic
    return resultDic


def generateAzSparkConfStr():

    azDic =paraseAZPropertyFile()

    project = azDic.get("azkaban.flow.projectid", "").split("\n")[0]
    flow = azDic.get("azkaban.flow.flowid","").split("\n")[0]
    job = azDic.get("azkaban.flow.execid","").split("\n")[0]
    workflow_url = "http://azkaban.10101111.com/manager?project=%s&flow=%s"%(project,flow)
    job_url = "http://azkaban.10101111.com/manager?project=%s&flow=%s&job=%s"%(project,flow,job)
    execution_url = "http://azkaban.10101111.com/executor?execid=%s"%(job)
    attempt_url = execution_url
    job_id = job

    confstr = '--conf "spark.driver.extraJavaOptions=-Dazkaban.link.workflow.url=%s  -Dazkaban.link.job.url=%s  \
                       -Dazkaban.link.execution.url=%s  -Dazkaban.link.attempt.url=%s  -Dazkaban.job.id=%s"'\
                        %(workflow_url,job_url,execution_url,attempt_url,job_id)

    print "confstr%s"%(confstr)

    return confstr

def passParams2AZjobs(dic):
    para_file = os.environ["JOB_OUTPUT_PROP_FILE"]
    print "JOB_OUTPUT_PROP_FILE is :", para_file

    pairs = []
    for (key,value) in dic.items():
        pairStr = ' "%s":%s '%(key,value)
        pairs.append(pairStr)

    jsonstr  =",".join(pairs)
    jsonstr = "{" + jsonstr +"}"

    with open(para_file, 'w+') as f:
        f.write(jsonstr)

def execcmd(user,cmd):

    code  = subprocess.call(cmd.encode('utf-8'), shell=True)
    print "user: %s ,exec cmd is: %s,\n return code :%s "%(user,cmd,code)
    return code

def exec_cmd_main(user,cmd):

    print "exec cmd is: %s"%(cmd)
    rint = random.randint(1001, 9999)
    now = datetime.datetime.now()
    starttime = int((now).strftime("%s")) * 1000
    requestid = "%s_%s_%s"%(user,starttime,rint)
    print requestid



    stdoutList = []
    file_object = open('execmd_logfile_%s.txt'%(requestid), 'w')
    ppid = os.getpid()

    p = subprocess.Popen(cmd.encode('utf-8'), shell=True, stdout=file_object, stderr=subprocess.STDOUT) 

    #p = subprocess.Popen(cmd.encode('utf-8'), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    (stdoutdata, stderrdata) = p.communicate()
    for r in stdoutdata.decode("utf-8").split("\n"):
        print (r)
        stdoutList.append(r)
    for r in stderrdata.decode("utf-8").split("\n"):
        print (r)
        stdoutList.append(r)

    code = p.returncode
    return code

def exec_cmd(cmd):

    print "exec cmd is: %s"%(cmd)

    stdoutList = []
    p = subprocess.Popen(cmd.encode('utf-8'), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    (stdoutdata, stderrdata) = p.communicate()
    for r in stdoutdata.decode("utf-8").split("\n"):
        print (r)
        stdoutList.append(r)
    for r in stderrdata.decode("utf-8").split("\n"):
        print (r)
        stdoutList.append(r)

    code = p.returncode
    resultList = stdoutList
    return code

if __name__ == "__main__":
    os.environ["JOB_PROP_FILE"] = "test.txt"
    paraseAZPropertyFile()
    os.environ["JOB_OUTPUT_PROP_FILE"] = "test1.txt"
    dic = {"aa":111,"bb":"22"}
    generateAzSparkConfStr()
