WorkingDirectory = "@working_dir@"
PidDirectory = "@piddir@"
SubSysDirectory = "@subsysdir@"
- Maximum Concurrent Jobs = 4
+ Maximum Concurrent Jobs = 10
Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password
Messages = Standard
}
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
+ Maximum Concurrent Jobs = 10
SpoolData=yes
}
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
+ Maximum Concurrent Jobs = 10
+ SpoolData=yes
}
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
+ Maximum Concurrent Jobs = 10
+ SpoolData=yes
}
Job {
Storage = File
Messages = Standard
Pool = Default
- Maximum Concurrent Jobs = 4
+ Max Run Time = 30min
+ Maximum Concurrent Jobs = 10
+ SpoolData=yes
Write Bootstrap = "@working_dir@/NightlySave.bsr"
}
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
+ Maximum Concurrent Jobs = 10
+ SpoolData=yes
}
Job {
File Retention = 30d # 30 days
Job Retention = 180d # six months
AutoPrune = yes # Prune expired Jobs/Files
- Maximum Concurrent Jobs = 4
+ Maximum Concurrent Jobs = 10
TLS Require = yes
TLS Certificate = "@scriptdir@/tls-cert.pem"
TLS Key = "@scriptdir@/tls-cert.pem"
Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
Device = FileStorage
Media Type = File
- Maximum Concurrent Jobs = 4
+ Maximum Concurrent Jobs = 10
TLS Require = yes
TLS Certificate = "@scriptdir@/tls-cert.pem"
TLS Key = "@scriptdir@/tls-cert.pem"
Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
Device = FileStorage1
Media Type = File1
- Maximum Concurrent Jobs = 4
+ Maximum Concurrent Jobs = 10
TLS Require = yes
TLS Certificate = "@scriptdir@/tls-cert.pem"
TLS Key = "@scriptdir@/tls-cert.pem"
--- /dev/null
+#!/bin/sh
+#
+#
+TestName="duplicate-job-test"
+. scripts/functions
+
+scripts/cleanup
+scripts/copy-tls-confs
+echo "${cwd}/build/technotes" >${cwd}/tmp/file-list
+
+# increase the maximum concurrent jobs for FD, SD and DIR
+perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-dir.conf",100)'
+perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-sd.conf",100)'
+perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-fd.conf",100)'
+
+# extract a Job and add a runscript on it
+perl -Mscripts::functions \
+ -e "extract_resource('$conf/bacula-dir.conf', 'Job', 'CompressedTest')" \
+ | sed 's%Standard%Standard; ClientRunBeforeJob="sleep 6"%' > $tmp/1
+
+
+outf="$tmp/sed_tmp"
+echo 's%CompressedTest%AllowDuplicateYes%' > $outf
+echo 's%Backup%Backup; AllowDuplicateJobs = yes%' >> $outf
+sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
+
+echo 's%CompressedTest%CancelLowerLevelDuplicatesYes%' > $outf
+echo 's%Backup%Backup; AllowDuplicateJobs = no; CancelLowerLevelDuplicates=yes%' >> $outf
+sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
+
+echo 's%CompressedTest%CancelQueueDuplicatesYes%' > $outf
+echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=yes %' >> $outf
+sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
+
+echo 's%CompressedTest%CancelRunningDuplicatesYes%' > $outf
+echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=yes%' >> $outf
+sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
+
+echo 's%CompressedTest%CancelRunningDuplicatesNo%' > $outf
+echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=no%' >> $outf
+sed -f $outf $tmp/1 >> $conf/bacula-dir.conf
+
+when=`perl -Mscripts::functions -e "get_time(300)"`
+
+start_test
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@$out /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+messages
+label storage=File volume=TestVolume001
+@#setdebug level=100 storage=File
+@##############################################
+@# AllowDuplicates = Yes, both jobs should work
+@##############################################
+run comment="Should work" level=Full job=AllowDuplicateYes yes
+@sleep 2
+run comment="Should work" level=Full job=AllowDuplicateYes yes
+wait
+messages
+@$out $tmp/log3.out
+@###############################################################
+@# Run two jobs with the same level and see wich one is canceled
+@###############################################################
+run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
+@sleep 2
+run comment="Should fail" level=Full job=CancelLowerLevelDuplicatesYes yes
+wait
+messages
+@####################################################################
+@# Run two jobs with the different level and see wich one is canceled
+@####################################################################
+run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
+@sleep 2
+run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
+wait
+messages
+run comment="Should fail" level=Differential job=CancelLowerLevelDuplicatesYes yes
+@sleep 2
+run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
+wait
+messages
+run comment="Should work" level=Differential job=CancelLowerLevelDuplicatesYes yes
+@sleep 2
+run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
+wait
+messages
+@#####################################################################################
+@# Run two jobs with the different level and see wich one is canceled (reversed order)
+@#####################################################################################
+run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes
+@sleep 2
+run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes
+wait
+messages
+@$out $tmp/log4.out
+@####################################################################
+@# Run two jobs, the second one can't cancel the 1st, and should fail
+@####################################################################
+run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes
+@sleep 2
+run comment="Should fail" level=Full job=CancelQueueDuplicatesYes yes
+wait
+messages
+@#################################################################
+@# The first job should stay queued, the second one will cancel it
+@#################################################################
+run comment="Should fail" level=Full job=CancelQueueDuplicatesYes when="$when" yes
+@sleep 2
+run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes
+wait
+messages
+@$out $tmp/log5.out
+@########################################
+@# The second job will kill the first one
+@########################################
+run comment="Should fail" level=Full job=CancelRunningDuplicatesYes yes
+@sleep 2
+run comment="Should work" level=Full job=CancelRunningDuplicatesYes yes
+wait
+messages
+@$out $tmp/log6.out
+@##########################
+@# The second job won't run
+@##########################
+run comment="Should work" level=Full job=CancelRunningDuplicatesNo yes
+@sleep 2
+run comment="Should fail" level=Full job=CancelRunningDuplicatesNo yes
+wait
+messages
+@$out $tmp/log7.out
+sql
+SELECT JobId, Name, Level, Comment, JobStatus from Job
+WHERE Comment='Should fail' and JobStatus='T' ORDER By JobId;
+
+@$out $tmp/log8.out
+sql
+SELECT JobId, Name, Level, Comment, JobStatus from Job
+WHERE Comment='Should work' and JobStatus != 'T' ORDER By JobId;
+
+@$out $tmp/log9.out
+sql
+SELECT JobId, Name, Level, Comment, JobStatus from Job order by JobId;
+
+quit
+END_OF_DATA
+
+run_bacula
+check_for_zombie_jobs storage=File
+stop_bacula
+
+touch $tmp/log2.out
+check_two_logs
+
+grep '^| *[0-9]' $tmp/log7.out > /dev/null
+if [ $? = 0 ]; then
+ print_debug "ERROR: Found errors in $tmp/log7.out"
+ print_debug `cat $tmp/log7.out`
+ estat=1
+fi
+
+grep '^| *[0-9]' $tmp/log8.out > /dev/null
+if [ $? = 0 ]; then
+ print_debug "ERROR: Found errors in $tmp/log8.out"
+ print_debug `cat $tmp/log8.out`
+ estat=1
+fi
+
+nb=`grep '^| *[0-9]' $tmp/log9.out | wc -l`
+if [ $nb -lt 10 ]; then
+ print_debug "ERROR: no enough results in $tmp/log9.out"
+ print_debug `cat $tmp/log9.out`
+ estat=1
+fi
+
+end_test