3 # Test if Bacula can handle big fileset
4 # This test create 2M files on a directory and
5 # backup it twice in accurate mode.
7 # Creating 2M files is very long, so the "many-files"
8 # directory isn' cleaned at the end.
11 TestName="big-files-test"
17 echo "${cwd}/many-files" >${cwd}/tmp/file-list
18 if [ ! -f ${cwd}/many-files/100000file100000 ]; then
19 mkdir -p ${cwd}/many-files
21 print_debug "Creating 2000000 files..."
22 time perl -e 'for($i=0; $i < 2000000; $i++) {open(FP, ">${i}file${i}") or die "$!"; print FP "$i\n"; close(FP); print "\r$i " if ($i%10000);}'
27 sed 's%# Label Format% Label Format%' ${cwd}/bin/bacula-dir.conf > ${cwd}/tmp/1
28 # sed 's%Type = Backup%Type = Backup; Accurate = yes%' ${cwd}/tmp/1 > ${cwd}/bin/bacula-dir.conf
29 sed 's%Type = Backup%Type = Backup%' ${cwd}/tmp/1 > ${cwd}/bin/bacula-dir.conf
31 change_jobname MonsterFileSet $JobName
34 cat <<END_OF_SCRIPT >${cwd}/tmp/bconcmds
37 @$out ${cwd}/tmp/log1.out
42 @#setdebug level=110 storage=File
43 run job=$JobName storage=File yes
48 SELECT StartTime, JobFiles, JobId, Level, count(*) AS nb_jobmedia
49 FROM JobMedia join Job using (JobId) join Client using (ClientId)
50 where Client.Name = 'localhost-fd' group by JobId, Level, StartTime,
51 JobFiles order by JobId desc ;
54 run job=$JobName storage=File yes
60 @$out ${cwd}/tmp/log2.out
61 @#setdebug level=400 storage=File
62 restore where=${cwd}/tmp/bacula-restores select storage=File
77 check_for_zombie_jobs storage=File || exit 1
82 # we are really more interested to know if backup and restore
83 # worked, but checking the files restored is non-trivial due
84 # to the big fileset exclusions