#!/bin/sh
#
# Run a simple backup using the compressed option
#   then backup four times, each with incremental then
#   do a bscan and restore.
#   It should require at least 4 different bsrs.
#
TestName="bscan-fast-tape"
JobName=bscanfasttape
. scripts/functions

require_tape_drive

copy_tape_confs

cp ${cwd}/bin/susan-sd.conf ${cwd}/tmp/1
# sed "s%# Maximum File Size%  Maximum File Size%" ${cwd}/tmp/1 >${cwd}/bin/susan-sd.conf

change_jobname NightlySave $JobName


# Directory to backup.
# This directory will be created by setup_data().
BackupDirectory="${tmp}/data"

# Use a tgz to setup data to be backed up.
# Data will be placed at "${tmp}/data/".
setup_data data/small.tgz

start_test

cat <<END_OF_DATA >tmp/bconcmds
@$out /dev/null
messages
@$out tmp/log1.out
setdebug level=2 storage=tape
label storage=tape volume=TestVolume001 slot=0 pool=Default
run job=$JobName yes
wait
messages
quit
END_OF_DATA

echo "Starting Susan tape writing"
#$bin/btape -c bin/susan-sd.conf /dev/nst0 <<END_OF_DATA
#rewind
#label
#Test001
#wr
#wr
#wr
#wr
#wr
#wr
#weof
#quit
#END_OF_DATA
#run_susan
#check_for_zombie_jobs storage=tape
#stop_susan
echo "End writing tape"

#
# now drop and recreate the database
#
cd bin
./drop_susan_tables >/dev/null 2>&1
./make_susan_tables >/dev/null 2>&1
./grant_susan_privileges 2>&1 >/dev/null
cd ..

echo "Begin attempt to read tape that crashes the system"
echo "volume=TestVolume001" >tmp/bscan.bsr

bscan_libdbi

strace -o strace.new $bin/bscan -d200 -w working $BSCANLIBDBI -u regress -n regress -m -s -v -b tmp/bscan.bsr -c bin/susan-sd.conf tape
exit
cat <<END_OF_DATA >tmp/bconcmds
@$out /dev/null
messages
@$out tmp/log2.out
@#
@# now do a restore
@#
restore where=${cwd}/tmp/susan-restores select all storage=tape done
yes
wait
messages
quit
END_OF_DATA

run_susan
check_for_zombie_jobs storage=tape
stop_susan
#rm -f  ${BackupDirectory}/src/lib/dummy

check_two_logs
check_restore_diff
end_test
