#!/bin/bash

PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin

esUrl=http://localhost:9200
saveDir=/data/backup/es
logSync=$saveDir/rsync.log
logTmp=$saveDir/syncTmp.log
index=logstash_api-$(date +"%Y%m%d" -d "$now -1 days")
url=$esUrl/$index
input=$saveDir/$index.json

if [ ! -d $saveDir ]; then
	mkdir -p $saveDir
fi
if [ ! -f $logSync ]; then
	touch $logSync
	echo 'rsync data then restore' >> $logSync
fi

echo '--------------------------' >> $logSync
echo 'date    = '`date +"%Y-%m-%d"` >> $logSync
echo 'action  = rsync' >> $logSync
echo 'start   = '`date +"%H:%M:%S"` >> $logSync
# need rsync daemon run
# need rsync server, module es
rsync -rIv --password-file=/root/rsync.password rsyncd@14.18.206.136::elastic $saveDir > $logTmp
if [ $? -ne 0 ]; then
	echo 'result  = fail' >> $logSync
	cat $logTmp >> $logSync
else
	echo 'result  = success '`date +"%H:%M:%S"` >> $logSync

	# file exist
	if [ -f $input ]; then
		echo 'index   = '$index >> $logSync
		echo 'url     = '$url >> $logSync
		echo 'action  = remove old index' >> $logSync
		# remove old index
		curl -X DELETE $url > $logTmp
		if [ $? -ne 0 ]; then
			echo 'result  = fail' >> $logSync
			cat $logTmp >> $logSync
		else
			echo 'result  = success '`date +"%H:%M:%S"` >> $logSync
			echo 'action  = restore production data' >> $logSync
			# restore new data
			/usr/local/bin/elasticdump --input=$input --output=$url --type=data > $logTmp
			if [ $? -ne 0 ]; then
				echo 'result  = fail' >> $logSync
				cat $logTmp >> $logSync
			else
				echo 'result  = success '`date +"%H:%M:%S"` >> $logSync
			fi
		fi
	fi
fi



# in crontab
# * * * * * /bin/sh /path/to/p2t_prod.sh
# every day 01:01 begin backup
# may have bugs if this index too large
# 01 03 * * * /bin/sh /path/to/p2t_prod.sh