ADD_TEST(disk:backup-bacula-test "@regressdir@/tests/backup-bacula-test")
ADD_TEST(disk:backup-to-null "@regressdir@/tests/backup-to-null")
ADD_TEST(disk:bextract-test "@regressdir@/tests/bextract-test")
+ADD_TEST(disk:big-fileset-test "@regressdir@/tests/big-fileset-test")
ADD_TEST(disk:big-vol-test "@regressdir@/tests/big-vol-test")
ADD_TEST(disk:bscan-test "@regressdir@/tests/bscan-test")
ADD_TEST(disk:bsr-opt-test "@regressdir@/tests/bsr-opt-test")
--- /dev/null
+#!/bin/sh
+#
+# Test if Bacula can handle big fileset
+#
+
+TestName="big-fileset-test"
+JobName=BigFileSet
+. scripts/functions
+
+copy_test_confs
+
+echo "${cwd}/build" >${cwd}/tmp/file-list
+
+cp ${cwd}/bin/bacula-dir.conf ${cwd}/tmp/1
+sed "s%# Label Format% Label Format%" ${cwd}/tmp/1 >${cwd}/bin/bacula-dir.conf
+
+change_jobname MonsterFileSet $JobName
+start_test
+
+cat <<END_OF_SCRIPT >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+status all
+status all
+list pools
+messages
+@#setdebug level=110 storage=File
+run job=$JobName storage=File yes
+list pools
+list volumes
+wait
+sql
+SELECT StartTime, JobFiles, JobId, Level, count(*) AS nb_jobmedia
+FROM JobMedia join Job using (JobId) join Client using (ClientId)
+where Client.Name = 'localhost-fd' group by JobId, Level, StartTime,
+JobFiles order by JobId desc ;
+
+messages
+@#
+@# now do a restore
+@#
+@$out ${cwd}/tmp/log2.out
+@#setdebug level=400 storage=File
+restore where=${cwd}/tmp/bacula-restores select storage=File
+unmark *
+mark *
+count
+ls *
+dir *
+find Makefile
+pwd
+lsmark
+estimate
+?
+help
+done
+yes
+wait
+messages
+@$out
+quit
+END_OF_SCRIPT
+
+run_bacula
+check_for_zombie_jobs storage=File || exit 1
+stop_bacula
+
+check_two_logs
+
+# we are really more interested to know if backup and restore
+# worked, but checking the files restored is non-trivial due
+# to the big fileset exclusions
+# check_restore_diff
+
+zstat=0
+dstat=0
+bstat=0
+rstat=0
+end_test