#!/home/hadoop/anaconda2/bin
# -*- coding=utf-8 -*-
import os

BASE_DIR = os.path.dirname(os.path.abspath(__file__))

# GAVIAL API CONSTANTS
URL_GET_JOB_CONF = '/api/job/config/get'
URL_GET_JOB_RUNTIME_CONF = '/api/job/config/runtime/get'
URL_SAVE_JOB_RUNTIME_CONF = '/api/job/config/runtime/save'
URL_SAVE_JOB_STATE = '/api/job/state/save'
URL_GET_JOB_STATE = '/api/job/state/get'
URL_SAVE_JOB_OUTPUT_PARAM = '/api/job/output/save'
URL_GET_JOB_OUTPUT_PARAM = '/api/job/output/get'
URL_GET_TASK_OUTPUT_PARAM = '/api/task/output/get'
URL_SEND_MESSAGE = '/api/job/message/send'
URL_SAVE_TABLE_INFO = '/api/table/save'
URL_GET_APP_VARIABLE = '/api/app/variable/get'

# HADOOP CONF
HADOOP_CONF = {
  'dfs.nameservices': 'ns',
  'hadoop.security.authentication': 'kerberos',
  'dfs.ha.namenodes.ns': 'nn1,nn2',
  'dfs.namenode.rpc-address.ns.nn2': 'namenode-2:9000',
  'dfs.namenode.rpc-address.ns.nn1': 'namenode-1:9000',
  'dfs.namenode.http-address.ns.nn2': 'namenode-2:50070',
  'dfs.namenode.http-address.ns.nn1': 'namenode-1:50070'
}

# TICKET CACHE FORMAT
TICKET_CACHE_FORMAT = '/tmp/krb5cc__{0}__{1}__{2}__gavial'

GAVIAL_HIVE_DATAWOUSE = "hdfs://ns/hive/warehouse/{0}"
