#!/bin/bash
here=`pwd`
AGG_WORKFLOW=`basename "$here"`

if [ -z "$AGG_WORKFLOW" ]; then
	echo "Please set the AGG_WORKFLOW variable in $0."
	exit 1;
fi
bin=`dirname "$0"`
AGG_WORKING_DIR=`cd "$bin/.."; pwd`
AGG_CONF_DIR=$AGG_WORKING_DIR/conf
AGG_WORKFLOW_CONF_DIR=$AGG_WORKING_DIR/$AGG_WORKFLOW/conf
AGG_WF_PROP=$AGG_WORKING_DIR/$AGG_WORKFLOW/wf.properties
AGG_HIVE_CONF=$AGG_CONF_DIR/hive.conf

# Top level configurations to add to workflow.
source $AGG_CONF_DIR/conf_switch.sh

# Check service constants.
if [ -z "$SERVICE_NAME" ]; then
	echo "Please set the SERVICE_NAME variable in ${AGG_WORKFLOW_CONF_DIR}/${appLevelConf}."
	exit 1
fi
if [ -z "$APPLICATION_NAME" ]; then
	echo "Please set the APPLICATION_NAME variable in ${AGG_WORKFLOW_CONF_DIR}/${appLevelConf}."
	exit 1
fi
if [ -z "$DATASET" ]; then
	echo "Please set the DATASET variable in ${AGG_WORKFLOW_CONF_DIR}/${appLevelConf}."
	exit 1
fi

# Get AGG specific installation settings.
source $AGG_WORKFLOW_CONF_DIR/install.conf

# Verify settings with user.
echo "Service/Workflow settings"
echo "--------------------------"
echo "SERVICE_NAME: $SERVICE_NAME"
echo "APPLICATION_NAME: $APPLICATION_NAME"
echo "DATASET: $DATASET"
echo "WORKFLOW: $AGG_WORKFLOW"
echo "HIVE_DATABASE: $HIVE_DATABASE"
echo ""
echo "Workflow Application settings"
echo "------------------------------"
echo "AGG_RELEASE: $AGG_RELEASE"
echo "TYPES_RELEASE: $TYPES_RELEASE"
echo "EB_RELEASE: $EB_RELEASE"
echo "PROTOBUF_RELEASE: $PROTOBUF_RELEASE"
echo "GUAVA_RELEASE: $GUAVA_RELEASE"

function echoSettings {
        echo "Settings can adjusted in:";
	echo "  1. ${AGG_WORKFLOW_CONF_DIR}/${appLevelConf}";
	echo "  2. ${AGG_CONF_DIR}/${topLevelConf}";
	echo "  3. ${AGG_CONF_DIR}/${hiveConf}";
        echo "  4. ${AGG_WORKFLOW_CONF_DIR}/install.conf";
	exit 0; 
}

xverify "Proceed with install?, continue" y N N || echoSettings

# Where we are going.
jobsDir=/services/$SERVICE_NAME/jobs/oozie/$AGG_WORKFLOW

# Build workflow properties file.
cat > ${AGG_WF_PROP}  << EOF
# job location
WORKFLOW=$AGG_WORKFLOW
pathOnHdfs=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}
# oozie settings
oozie.wf.application.path=\${pathOnHdfs}
oozie.libpath=/sharedlib/
debugEnabled=false
# workflow variables
inputDir_service_usage=\${nameNode}/services/\${SERVICE_NAME}/\${APPLICATION_NAME}/\${DATASET}/data/\${partition}
outputDir_service_usage=\${nameNode}/services/\${SERVICE_NAME}/\${APPLICATION_NAME}/\${DATASET}/derived/\${WORKFLOW}/\${partition}
outputDir_service_usageUSA2=\${nameNode}/services/\${SERVICE_NAME}/\${APPLICATION_NAME}/\${DATASET}/derived/\${WORKFLOW}USA2/\${partition}
outputDir_service_usageUSA3=\${nameNode}/services/\${SERVICE_NAME}/\${APPLICATION_NAME}/\${DATASET}/derived/\${WORKFLOW}USA3/\${partition}
position_network_type=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}/map_tables/dim_position_network_type.tsv
position_platform_type=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}/map_tables/dim_position_platform_type.tsv
position_protocol_type=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}/map_tables/dim_position_protocol_type.tsv
operatorCountry=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}/map_tables/mcc2country.tsv
operatorName=\${nameNode}/services/\${SERVICE_NAME}/jobs/oozie/\${WORKFLOW}/map_tables/MccMncOperatorCountry.tsv
EOF

# Add stuff to be sourced by hive tools.
echo "HIVE_DATABASE=$HIVE_DATABASE" >> $AGG_HIVE_CONF
echo "export HIVE_AUX_JARS_PATH=${hive_aux_jars_path}" >> $AGG_HIVE_CONF

# Create log dir.
if [ ! -d "$AGG_LOG_DIR" ]; then
        mkdir $AGG_LOG_DIR
        chmod 777 $AGG_LOG_DIR
fi

# Make sure all artifact files are present.
for f in "${files[@]}"
do
	if [ ! -f $f ]; then
		echo "Error: install.sh Could not find AGG artifact $f"
		exit 1
	fi
done
# make sure all libraries are present.
for f in "${libfiles[@]}"
do
	if [ ! -f $f ]; then
		echo "Error: install.sh Could not find lib artifact $f"
		exit 1
	fi
done
# make sure all map_table (tsv files) are present.
for f in "${map_tablefiles[@]}"
do
	if [ ! -f $f ]; then
		echo "Error: install.sh Could not find map_table (tsv file) artifact $f"
		exit 1
	fi
done

# Create the directory structure for jobs
echo "hdfs dfs -rm -r ${jobsDir}"
rmro=`hdfs dfs -rm -r ${jobsDir} 2>&1`
echo $rmro | egrep  "No such file or directory" > /dev/null 2>&1
if [ $? = 0 ]; then
        echo "${jobsDir} does not exist, continuing."
fi
echo "hdfs dfs -mkdir ${jobsDir}"
hdfs dfs -mkdir ${jobsDir}

for f in "${files[@]}"
do
	echo "hdfs dfs -put $f "${jobsDir}/""
	hdfs dfs -put $f "${jobsDir}/"
done

echo "hdfs dfs -mkdir ${jobsDir}/lib"
hdfs dfs -mkdir "${jobsDir}/lib"

for f in "${libfiles[@]}"
do
	echo "hdfs dfs -put $f "${jobsDir}/lib""
	hdfs dfs -put $f "${jobsDir}/lib"
done

echo "hdfs dfs -mkdir ${jobsDir}/map_tables"
hdfs dfs -mkdir "${jobsDir}/map_tables"

for f in "${map_tablefiles[@]}"
do
	echo "hdfs dfs -put $f "${jobsDir}/map_tables""
	hdfs dfs -put $f "${jobsDir}/map_tables"
done

echo "================================================="
echo "Add the following entries to the scheduler script"
echo "================================================="
echo "${AGG_BIN_DIR}/cron-run.sh /services/${SERVICE_NAME}/${APPLICATION_NAME}/${DATASET}/data 0 0 0 3 ${AGG_WORKFLOW} wf.properties"
echo "${AGG_BIN_DIR}/cron-hdfs2db.sh 0 0 0 3 ${AGG_WORKFLOW} /services/${SERVICE_NAME}/${APPLICATION_NAME}/${DATASET}/derived/${AGG_WORKFLOW}"
echo "${AGG_BIN_DIR}/cron-hdfs2db.sh 0 0 0 3 ${AGG_WORKFLOW} /services/${SERVICE_NAME}/${APPLICATION_NAME}/${DATASET}/derived/${AGG_WORKFLOW}USA2"
echo "${AGG_BIN_DIR}/cron-hdfs2db.sh 0 0 0 3 ${AGG_WORKFLOW} /services/${SERVICE_NAME}/${APPLICATION_NAME}/${DATASET}/derived/${AGG_WORKFLOW}USA3"
