--- /dev/null
+#!/bin/sh
+#
+# Test if Bacula can handle big fileset
+# This test create 2M files on a directory and
+# backup it twice in accurate mode.
+#
+# Creating 2M files is very long, so the "many-files"
+# directory isn' cleaned at the end.
+#
+
+TestName="big-files-test"
+JobName=BigFiles
+. scripts/functions
+
+copy_test_confs
+
+echo "${cwd}/many-files" >${cwd}/tmp/file-list
+if [ ! -f ${cwd}/many-files/100000file100000 ]; then
+ mkdir ${cwd}/many-files
+ cd ${cwd}/many-files
+ print_debug "Creating files..."
+ time perl -e 'for($i=0; $i < 2000000; $i++) {open(FP, ">${i}file${i}") or die "$!"; print FP "$i\n"; close(FP); print "." if ($i%10000);}'
+fi
+
+sed "s%# Label Format% Label Format%" ${cwd}/bin/bacula-dir.conf > ${cwd}/tmp/1
+sed "s%Type = Backup%Type = Backup; Accurate = yes" ${cwd}/tmp/1 > ${cwd}/bin/bacula-dir.conf
+
+change_jobname MonsterFileSet $JobName
+start_test
+
+cat <<END_OF_SCRIPT >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+status all
+status all
+list pools
+messages
+@#setdebug level=110 storage=File
+run job=$JobName storage=File yes
+list pools
+list volumes
+wait
+sql
+SELECT StartTime, JobFiles, JobId, Level, count(*) AS nb_jobmedia
+FROM JobMedia join Job using (JobId) join Client using (ClientId)
+where Client.Name = 'localhost-fd' group by JobId, Level, StartTime,
+JobFiles order by JobId desc ;
+
+messages
+run job=$JobName storage=File yes
+wait
+messages
+@#
+@# now do a restore
+@#
+@$out ${cwd}/tmp/log2.out
+@#setdebug level=400 storage=File
+restore where=${cwd}/tmp/bacula-restores select storage=File
+unmark *
+mark *
+count
+find 10file10
+pwd
+estimate
+done
+yes
+wait
+messages
+@$out
+quit
+END_OF_SCRIPT
+
+run_bacula
+check_for_zombie_jobs storage=File || exit 1
+stop_bacula
+
+check_two_logs
+
+# we are really more interested to know if backup and restore
+# worked, but checking the files restored is non-trivial due
+# to the big fileset exclusions
+# check_restore_diff
+
+zstat=0
+dstat=0
+bstat=0
+rstat=0
+end_test