3 # Test if Bacula can handle big fileset
4 # This test create 2M files on a directory and
5 # backup it twice in accurate mode.
7 # Creating 2M files is very long, so the "many-files"
8 # directory isn' cleaned at the end.
11 TestName="big-files-test"
17 echo "${cwd}/many-files" >${cwd}/tmp/file-list
18 if [ ! -f ${cwd}/many-files/100000file100000 ]; then
19 mkdir -p ${cwd}/many-files
21 print_debug "Creating 2000000 files..."
22 time perl -e 'for($i=0; $i < 2000000; $i++) {open(FP, ">${i}file${i}") or die "$!"; print FP "$i\n"; close(FP); print "\r$i " if ($i%10000);}'
27 sed 's%# Label Format% Label Format%' ${cwd}/bin/bacula-dir.conf > ${cwd}/tmp/1
28 # sed 's%Type = Backup%Type = Backup; Accurate = yes%' ${cwd}/tmp/1 > ${cwd}/bin/bacula-dir.conf
29 sed 's%Type = Backup%Type = Backup%' ${cwd}/tmp/1 > ${cwd}/bin/bacula-dir.conf
31 $bperl -e "add_attribute('$conf/bacula-dir.conf', 'Max Run Time', '90min', 'Job')"
32 $bperl -e "add_attribute('$conf/bacula-dir.conf', 'SpoolData', 'no', 'Job')"
34 change_jobname MonsterFileSet $JobName
37 cat <<END_OF_SCRIPT >${cwd}/tmp/bconcmds
40 @$out ${cwd}/tmp/log1.out
45 @#setdebug level=110 storage=File
46 run job=$JobName spooldata=no storage=File yes
51 SELECT StartTime, JobFiles, JobId, Level, count(*) AS nb_jobmedia
52 FROM JobMedia join Job using (JobId) join Client using (ClientId)
53 where Client.Name = 'localhost-fd' group by JobId, Level, StartTime,
54 JobFiles order by JobId desc ;
57 run job=$JobName spooldata=no storage=File yes
63 @$out ${cwd}/tmp/log2.out
64 @#setdebug level=400 storage=File
65 restore where=${cwd}/many-files/bacula-restores select storage=File
80 check_for_zombie_jobs storage=File || exit 1
85 # we are really more interested to know if backup and restore
86 # worked, but checking the files restored is non-trivial due
87 # to the big fileset exclusions