4 TestName="duplicate-job-test"
8 scripts/copy-test-confs
9 echo "${cwd}/build/technotes" >${cwd}/tmp/file-list
11 # increase the maximum concurrent jobs for FD, SD and DIR
12 perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-dir.conf",100)'
13 perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-sd.conf",100)'
14 perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-fd.conf",100)'
16 # extract a Job and add a runscript on it
17 perl -Mscripts::functions \
18 -e "extract_resource('$conf/bacula-dir.conf', 'Job', 'CompressedTest')" \
19 | sed 's%Standard%Standard; ClientRunBeforeJob="sleep 6"%' > $tmp/1
23 echo 's%CompressedTest%AllowDuplicateYes%' > $outf
24 echo 's%Backup%Backup; AllowDuplicateJobs = yes%' >> $outf
25 sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
27 echo 's%CompressedTest%CancelLowerLevelDuplicatesYes%' > $outf
28 echo 's%Backup%Backup; AllowDuplicateJobs = no; CancelLowerLevelDuplicates=yes%' >> $outf
29 sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
31 echo 's%CompressedTest%CancelQueueDuplicatesYes%' > $outf
32 echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=yes %' >> $outf
33 sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
35 echo 's%CompressedTest%CancelRunningDuplicatesYes%' > $outf
36 echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=yes%' >> $outf
37 sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
39 echo 's%CompressedTest%CancelRunningDuplicatesNo%' > $outf
40 echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=no%' >> $outf
41 sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
43 when=`perl -Mscripts::functions -e "get_time(300)"`
47 cat <<END_OF_DATA >${cwd}/tmp/bconcmds
50 @$out ${cwd}/tmp/log1.out
52 label storage=File volume=TestVolume001
53 @#setdebug level=100 storage=File
54 @##############################################
55 @# AllowDuplicates = Yes, both jobs should work
56 @##############################################
57 run comment="Should work" level=Full job=AllowDuplicateYes yes
59 run comment="Should work" level=Full job=AllowDuplicateYes yes
63 @###############################################################
64 @# Run two jobs with the same level and see wich one is canceled
65 @###############################################################
66 run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
68 run comment="Should fail" level=Full job=CancelLowerLevelDuplicatesYes yes
71 @####################################################################
72 @# Run two jobs with the different level and see wich one is canceled
73 @####################################################################
74 run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
76 run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
79 run comment="Should fail" level=Differential job=CancelLowerLevelDuplicatesYes yes
81 run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
84 run comment="Should work" level=Differential job=CancelLowerLevelDuplicatesYes yes
86 run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
89 @#####################################################################################
90 @# Run two jobs with the different level and see wich one is canceled (reversed order)
91 @#####################################################################################
92 run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
94 run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
98 @####################################################################
99 @# Run two jobs, the second one can't cancel the 1st, and should fail
100 @####################################################################
101 run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes
103 run comment="Should fail" level=Full job=CancelQueueDuplicatesYes yes
106 @#################################################################
107 @# The first job should stay queued, the second one will cancel it
108 @#################################################################
109 run comment="Should fail" level=Full job=CancelQueueDuplicatesYes when="$when" yes
111 run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes
115 @########################################
116 @# The second job will kill the first one
117 @########################################
118 run comment="Should fail" level=Full job=CancelRunningDuplicatesYes yes
120 run comment="Should work" level=Full job=CancelRunningDuplicatesYes yes
124 @##########################
125 @# The second job won't run
126 @##########################
127 run comment="Should work" level=Full job=CancelRunningDuplicatesNo yes
129 run comment="Should fail" level=Full job=CancelRunningDuplicatesNo yes
134 SELECT JobId, Name, Level, Comment, JobStatus from Job
135 WHERE Comment='Should fail' and JobStatus='T' ORDER By JobId;
139 SELECT JobId, Name, Level, Comment, JobStatus from Job
140 WHERE Comment='Should work' and JobStatus != 'T' ORDER By JobId;
144 SELECT JobId, Name, Level, Comment, JobStatus from Job order by JobId;
150 check_for_zombie_jobs storage=File
156 grep '^| *[0-9]' $tmp/log7.out > /dev/null
158 print_debug "ERROR: Found errors in $tmp/log7.out"
159 print_debug `cat $tmp/log7.out`
163 grep '^| *[0-9]' $tmp/log8.out > /dev/null
165 print_debug "ERROR: Found errors in $tmp/log8.out"
166 print_debug `cat $tmp/log8.out`
170 nb=`grep '^| *[0-9]' $tmp/log9.out | wc -l`
171 if [ $nb -lt 10 ]; then
172 print_debug "ERROR: no enough results in $tmp/log9.out"
173 print_debug `cat $tmp/log9.out`