#!/bin/sh
#
#  Test if Susan can handle big fileset
#
TestName="big-fileset-test"
JobName=BigFileSet
. scripts/functions

copy_test_confs

cp ${cwd}/bin/susan-dir.conf ${cwd}/tmp/1
sed "s%# Label Format%  Label Format%" ${cwd}/tmp/1 >${cwd}/bin/susan-dir.conf

change_jobname MonsterFileSet $JobName


# Directory to backup.
# This directory will be created by setup_data().
BackupDirectory="${tmp}/data"

# Use a tgz to setup data to be backed up.
# Data will be placed at "${tmp}/data/".
# TODO: use a larger data directory.
setup_data data/small.tgz

start_test

cat <<END_OF_SCRIPT >${cwd}/tmp/bconcmds
@$out /dev/null
messages
@$out ${cwd}/tmp/log1.out
status all
status all
list pools
messages
@#setdebug level=110 storage=File
run job=$JobName storage=File yes
list pools
list volumes
wait
sql
SELECT StartTime, JobFiles, JobId, Level, count(*) AS nb_jobmedia
FROM JobMedia join Job using (JobId) join Client using (ClientId)
where Client.Name = 'localhost-fd' group by JobId, Level, StartTime,
JobFiles order by JobId desc ;

messages
@#
@# now do a restore
@#
@$out ${cwd}/tmp/log2.out
@#setdebug level=400 storage=File
restore where=${cwd}/tmp/susan-restores select storage=File
unmark *
mark *
count
ls *
dir *
find Makefile
pwd
lsmark
estimate
?
help
done
yes
wait
messages
quit
END_OF_SCRIPT

run_susan
check_for_zombie_jobs storage=File || exit 1
stop_susan

check_two_logs

# we are really more interested to know if backup and restore
# worked, but checking the files restored is non-trivial due
# to the big fileset exclusions
#  check_restore_diff

zstat=0
dstat=0
bstat=0
rstat=0
end_test

