From f1da47fd3346a339c432126ae5b5293b06136171 Mon Sep 17 00:00:00 2001 From: Kern Sibbald Date: Sun, 4 Jul 2010 22:12:51 +0200 Subject: [PATCH] Add new tls-duplicate-job test --- regress/DartTestfile.txt.in | 1 + regress/all-disk-tests | 1 + regress/scripts/tls-bacula-dir.conf.in | 23 +++- regress/scripts/tls-bacula-fd.conf.in | 2 + regress/tests/tls-duplicate-job-test | 177 +++++++++++++++++++++++++ 5 files changed, 198 insertions(+), 6 deletions(-) create mode 100755 regress/tests/tls-duplicate-job-test diff --git a/regress/DartTestfile.txt.in b/regress/DartTestfile.txt.in index b3e71c84e3..eda24e10f7 100644 --- a/regress/DartTestfile.txt.in +++ b/regress/DartTestfile.txt.in @@ -74,6 +74,7 @@ ADD_TEST(disk:2drive-2job-test "@regressdir@/tests/2drive-2job-test") ADD_TEST(disk:source-addr-test "@regressdir@/tests/source-addr-test") ADD_TEST(disk:stats-test "@regressdir@/tests/stats-test") ADD_TEST(disk:three-pool-test "@regressdir@/tests/three-pool-test") +ADD_TEST(disk:tls-duplicate-job-test "@regressdir@/tests/tls-duplicate-job-test") ADD_TEST(disk:tls-test "@regressdir@/tests/tls-test") ADD_TEST(disk:two-jobs-test "@regressdir@/tests/two-jobs-test") ADD_TEST(disk:two-pool-test "@regressdir@/tests/two-pool-test") diff --git a/regress/all-disk-tests b/regress/all-disk-tests index 7fdb3b6948..240cc12d5e 100755 --- a/regress/all-disk-tests +++ b/regress/all-disk-tests @@ -80,6 +80,7 @@ nice tests/prune-migration-test nice tests/prune-copy-test nice tests/prune-base-job-test nice tests/hardlink-test +nice tests/tls-duplicate-job-test nice tests/tls-test nice tests/virtual-changer-test nice tests/virtual-backup-test diff --git a/regress/scripts/tls-bacula-dir.conf.in b/regress/scripts/tls-bacula-dir.conf.in index 5133db1547..129f832384 100644 --- a/regress/scripts/tls-bacula-dir.conf.in +++ b/regress/scripts/tls-bacula-dir.conf.in @@ -19,7 +19,7 @@ Director { # define myself WorkingDirectory = "@working_dir@" PidDirectory = "@piddir@" SubSysDirectory = "@subsysdir@" - Maximum Concurrent Jobs = 4 + Maximum Concurrent Jobs = 10 Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password Messages = Standard } @@ -36,7 +36,7 @@ Job { Messages = Standard Pool = Default Write Bootstrap = "@working_dir@/NightlySave.bsr" - Maximum Concurrent Jobs = 4 + Maximum Concurrent Jobs = 10 SpoolData=yes } @@ -49,6 +49,9 @@ Job { Messages = Standard Pool = Default Write Bootstrap = "@working_dir@/NightlySave.bsr" + Max Run Time = 30min + Maximum Concurrent Jobs = 10 + SpoolData=yes } @@ -74,6 +77,9 @@ Job { Messages = Standard Pool = Default Write Bootstrap = "@working_dir@/NightlySave.bsr" + Max Run Time = 30min + Maximum Concurrent Jobs = 10 + SpoolData=yes } Job { @@ -84,7 +90,9 @@ Job { Storage = File Messages = Standard Pool = Default - Maximum Concurrent Jobs = 4 + Max Run Time = 30min + Maximum Concurrent Jobs = 10 + SpoolData=yes Write Bootstrap = "@working_dir@/NightlySave.bsr" } @@ -97,6 +105,9 @@ Job { Messages = Standard Pool = Default Write Bootstrap = "@working_dir@/NightlySave.bsr" + Max Run Time = 30min + Maximum Concurrent Jobs = 10 + SpoolData=yes } Job { @@ -264,7 +275,7 @@ Client { File Retention = 30d # 30 days Job Retention = 180d # six months AutoPrune = yes # Prune expired Jobs/Files - Maximum Concurrent Jobs = 4 + Maximum Concurrent Jobs = 10 TLS Require = yes TLS Certificate = "@scriptdir@/tls-cert.pem" TLS Key = "@scriptdir@/tls-cert.pem" @@ -279,7 +290,7 @@ Storage { Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" Device = FileStorage Media Type = File - Maximum Concurrent Jobs = 4 + Maximum Concurrent Jobs = 10 TLS Require = yes TLS Certificate = "@scriptdir@/tls-cert.pem" TLS Key = "@scriptdir@/tls-cert.pem" @@ -293,7 +304,7 @@ Storage { Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" Device = FileStorage1 Media Type = File1 - Maximum Concurrent Jobs = 4 + Maximum Concurrent Jobs = 10 TLS Require = yes TLS Certificate = "@scriptdir@/tls-cert.pem" TLS Key = "@scriptdir@/tls-cert.pem" diff --git a/regress/scripts/tls-bacula-fd.conf.in b/regress/scripts/tls-bacula-fd.conf.in index 2921cd16e1..40dadf6815 100644 --- a/regress/scripts/tls-bacula-fd.conf.in +++ b/regress/scripts/tls-bacula-fd.conf.in @@ -28,6 +28,8 @@ FileDaemon { # this is me WorkingDirectory = "@working_dir@" Pid Directory = "@piddir@" SubSys Directory = "@subsysdir@" + Plugin Directory = "@sbindir@/plugins" + Maximum Concurrent Jobs = 100 TLS Require = yes TLS Certificate = "@scriptdir@/tls-cert.pem" TLS Key = "@scriptdir@/tls-cert.pem" diff --git a/regress/tests/tls-duplicate-job-test b/regress/tests/tls-duplicate-job-test new file mode 100755 index 0000000000..4161f8f18c --- /dev/null +++ b/regress/tests/tls-duplicate-job-test @@ -0,0 +1,177 @@ +#!/bin/sh +# +# +TestName="duplicate-job-test" +. scripts/functions + +scripts/cleanup +scripts/copy-tls-confs +echo "${cwd}/build/technotes" >${cwd}/tmp/file-list + +# increase the maximum concurrent jobs for FD, SD and DIR +perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-dir.conf",100)' +perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-sd.conf",100)' +perl -Mscripts::functions -e 'set_maximum_concurrent_jobs("$conf/bacula-fd.conf",100)' + +# extract a Job and add a runscript on it +perl -Mscripts::functions \ + -e "extract_resource('$conf/bacula-dir.conf', 'Job', 'CompressedTest')" \ + | sed 's%Standard%Standard; ClientRunBeforeJob="sleep 6"%' > $tmp/1 + + +outf="$tmp/sed_tmp" +echo 's%CompressedTest%AllowDuplicateYes%' > $outf +echo 's%Backup%Backup; AllowDuplicateJobs = yes%' >> $outf +sed -f $outf $tmp/1 >> $conf/bacula-dir.conf + +echo 's%CompressedTest%CancelLowerLevelDuplicatesYes%' > $outf +echo 's%Backup%Backup; AllowDuplicateJobs = no; CancelLowerLevelDuplicates=yes%' >> $outf +sed -f $outf $tmp/1 >> $conf/bacula-dir.conf + +echo 's%CompressedTest%CancelQueueDuplicatesYes%' > $outf +echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=yes %' >> $outf +sed -f $outf $tmp/1 >> $conf/bacula-dir.conf + +echo 's%CompressedTest%CancelRunningDuplicatesYes%' > $outf +echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=yes%' >> $outf +sed -f $outf $tmp/1 >> $conf/bacula-dir.conf + +echo 's%CompressedTest%CancelRunningDuplicatesNo%' > $outf +echo 's%Backup%Backup; AllowDuplicateJobs = no;CancelLowerLevelDuplicates=no;CancelQueuedDuplicates=no; CancelRunningDuplicates=no%' >> $outf +sed -f $outf $tmp/1 >> $conf/bacula-dir.conf + +when=`perl -Mscripts::functions -e "get_time(300)"` + +start_test + +cat <${cwd}/tmp/bconcmds +@$out /dev/null +messages +@$out ${cwd}/tmp/log1.out +messages +label storage=File volume=TestVolume001 +@#setdebug level=100 storage=File +@############################################## +@# AllowDuplicates = Yes, both jobs should work +@############################################## +run comment="Should work" level=Full job=AllowDuplicateYes yes +@sleep 2 +run comment="Should work" level=Full job=AllowDuplicateYes yes +wait +messages +@$out $tmp/log3.out +@############################################################### +@# Run two jobs with the same level and see wich one is canceled +@############################################################### +run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes +@sleep 2 +run comment="Should fail" level=Full job=CancelLowerLevelDuplicatesYes yes +wait +messages +@#################################################################### +@# Run two jobs with the different level and see wich one is canceled +@#################################################################### +run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes +@sleep 2 +run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes +wait +messages +run comment="Should fail" level=Differential job=CancelLowerLevelDuplicatesYes yes +@sleep 2 +run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes +wait +messages +run comment="Should work" level=Differential job=CancelLowerLevelDuplicatesYes yes +@sleep 2 +run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes +wait +messages +@##################################################################################### +@# Run two jobs with the different level and see wich one is canceled (reversed order) +@##################################################################################### +run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes yes +@sleep 2 +run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes yes +wait +messages +@$out $tmp/log4.out +@#################################################################### +@# Run two jobs, the second one can't cancel the 1st, and should fail +@#################################################################### +run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes +@sleep 2 +run comment="Should fail" level=Full job=CancelQueueDuplicatesYes yes +wait +messages +@################################################################# +@# The first job should stay queued, the second one will cancel it +@################################################################# +run comment="Should fail" level=Full job=CancelQueueDuplicatesYes when="$when" yes +@sleep 2 +run comment="Should work" level=Full job=CancelQueueDuplicatesYes yes +wait +messages +@$out $tmp/log5.out +@######################################## +@# The second job will kill the first one +@######################################## +run comment="Should fail" level=Full job=CancelRunningDuplicatesYes yes +@sleep 2 +run comment="Should work" level=Full job=CancelRunningDuplicatesYes yes +wait +messages +@$out $tmp/log6.out +@########################## +@# The second job won't run +@########################## +run comment="Should work" level=Full job=CancelRunningDuplicatesNo yes +@sleep 2 +run comment="Should fail" level=Full job=CancelRunningDuplicatesNo yes +wait +messages +@$out $tmp/log7.out +sql +SELECT JobId, Name, Level, Comment, JobStatus from Job +WHERE Comment='Should fail' and JobStatus='T' ORDER By JobId; + +@$out $tmp/log8.out +sql +SELECT JobId, Name, Level, Comment, JobStatus from Job +WHERE Comment='Should work' and JobStatus != 'T' ORDER By JobId; + +@$out $tmp/log9.out +sql +SELECT JobId, Name, Level, Comment, JobStatus from Job order by JobId; + +quit +END_OF_DATA + +run_bacula +check_for_zombie_jobs storage=File +stop_bacula + +touch $tmp/log2.out +check_two_logs + +grep '^| *[0-9]' $tmp/log7.out > /dev/null +if [ $? = 0 ]; then + print_debug "ERROR: Found errors in $tmp/log7.out" + print_debug `cat $tmp/log7.out` + estat=1 +fi + +grep '^| *[0-9]' $tmp/log8.out > /dev/null +if [ $? = 0 ]; then + print_debug "ERROR: Found errors in $tmp/log8.out" + print_debug `cat $tmp/log8.out` + estat=1 +fi + +nb=`grep '^| *[0-9]' $tmp/log9.out | wc -l` +if [ $nb -lt 10 ]; then + print_debug "ERROR: no enough results in $tmp/log9.out" + print_debug `cat $tmp/log9.out` + estat=1 +fi + +end_test -- 2.39.5