#!/bin/sh
#
# Create a big Volume > 5 GB and backup to it to test disk
#   seeking on big volumes.  We cheat and artificially grow
#   the volume.
#
TestName="big-vol-test"
JobName=bigvol
. scripts/functions

scripts/cleanup
scripts/copy-test-confs

change_jobname CompressedTest $JobName

# Directory to backup.
# This directory will be created by setup_data().
BackupDirectory="${tmp}/data"

# Use a tgz to setup data to be backed up.
# Data will be placed at "${tmp}/data/".
setup_data data/small.tgz


start_test

cat <<END_OF_DATA >${cwd}/tmp/bconcmds
@$out /dev/null
messages
@$out ${cwd}/tmp/log1.out
@#setdebug level=100 storage=File
label storage=File volume=TestVolume001
run job=$JobName yes
wait
messages
quit
END_OF_DATA

run_susan
check_for_zombie_jobs storage=File

#
# Now increase the size of the Volume
#
# make big file
size=5200000000
truncate -s ${size} ${tmp}/TestVolume001
if [ $? != 0 ]; then
   echo "Execute of truncate failed."
   exit 1
fi

cat <<END_OF_DATA >${cwd}/tmp/bconcmds
@$out /dev/null
messages
@$out ${cwd}/tmp/log1.out
sql
UPDATE Media SET Volbytes=${size} WHERE VolumeName='TestVolume001';

llist volume=TestVolume001
@# Now do another full save with big Volume
run level=Full job=$JobName yes
wait
messages
@#
@# now do a restore
@#
sql
SELECT * FROM JobMedia;

@$out ${cwd}/tmp/log2.out
restore where=${cwd}/tmp/susan-restores storage=File select all done
yes
wait
messages
quit
END_OF_DATA

run_bconsole
check_for_zombie_jobs storage=File
stop_susan

check_two_logs
check_restore_diff
end_test
# Get rid of big files
rm -f ${cwd}/tmp/TestVolume001
