#!/bin/bash

PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin

saveDir=/data/backup/es
logDump=$saveDir/dump.log
logTmp=$saveDir/tmp.log
esUrl=http://localhost:9200
index=logstash_api-$(date +"%Y%m%d" -d "$now -1 days")

url=$esUrl/$index
output=$saveDir/$index.json

# all things saved under $saveDir folder
if [ ! -d $saveDir ]; then
	echo "create saveDir $saveDir"
	mkdir -p $saveDir
fi

echo '--------------------------' >> $logDump
echo 'date    = '`date +"%Y-%m-%d"` >> $logDump
echo 'start   = '`date +"%H:%M:%S"` >> $logDump
echo 'index   = '$index >> $logDump
echo 'url     = '$url >> $logDump
echo 'output  = '$output >> $logDump

/usr/local/bin/elasticdump --input=$url --output=$output --type=data > $logTmp
if [ $? -ne 0 ]; then
	# err
	echo 'fail: dump' >> $logDump
	cat $logTmp >> $logDump
else
	# success
	echo 'success = '`date +"%H:%M:%S"` >> $logDump
fi

# in crontab
# * * * * * /bin/sh /path/to/p2t_prod.sh
# every day 01:01 begin backup
# may have bugs if this index too large
# 01 01 * * * /bin/sh /path/to/p2t_prod.sh