# process .in files with sed script
sed -f ${out} ${cwd}/scripts/bacula-dir.conf.errors.in >${cwd}/scripts/bacula-dir.conf.errors
sed -f ${out} ${cwd}/scripts/test-bacula-dir.conf.in >${cwd}/scripts/test-bacula-dir.conf
-sed -f ${out} ${cwd}/scripts/multihost-bacula-dir.conf.in >${cwd}/scripts/multihost-bacula-dir.conf
+sed -f ${out} ${cwd}/scripts/multi-client-bacula-dir.conf.in >${cwd}/scripts/multi-client-bacula-dir.conf
sed -f ${out} ${cwd}/scripts/bacula-dir.conf.regexwhere.in >${cwd}/scripts/bacula-dir.conf.regexwhere
sed -f ${out} ${cwd}/scripts/bacula-dir.conf.maxtime.in >${cwd}/scripts/bacula-dir.conf.maxtime
sed -f ${out} ${cwd}/scripts/new-test-bacula-dir.conf.in >${cwd}/scripts/new-test-bacula-dir.conf
--- /dev/null
+#
+# Default Bacula Director Configuration file
+#
+# The only thing that MUST be changed is to add one or more
+# file or directory names in the Include directive of the
+# FileSet resource.
+#
+# For Bacula release 2.2.2
+#
+# You might also want to change the default email address
+# from root to your address. See the "mail" and "operator"
+# directives in the Messages resource.
+#
+
+Director { # define myself
+ Name = @hostname@-dir
+ DIRport = 8101 # where we listen for UA connections
+ QueryFile = "@scriptdir@/query.sql"
+ WorkingDirectory = "@working_dir@"
+ PidDirectory = "@piddir@"
+ SubSysDirectory = "@subsysdir@"
+ Maximum Concurrent Jobs = 4
+ Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password
+ Messages = Daemon
+}
+
+#
+# Define the main nightly save backup job
+# By default, this job will back up to disk in /tmp
+Job {
+ Name = "NightlySave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+}
+
+Job {
+ Name = @hostname1@
+ Type = Backup
+ Client = @hostname1@-fd
+ FileSet = "@hostname1@-FileSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+}
+
+Job {
+ Name = @hostname2@
+ Type = Backup
+ Client = @hostname2@-fd
+ FileSet = "@hostname2@-FileSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+}
+
+Job {
+ Name = @hostname3@
+ Type = Backup
+ Client = @hostname3@-fd
+ FileSet = "@hostname3@-FileSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+ SpoolData=yes
+}
+
+
+Job {
+ Name = "NightlySave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+ SpoolData=yes
+}
+
+
+Job {
+ Name = "MonsterSave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File1
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+
+Job {
+ Name = "VerifyVolume"
+ Type = Verify
+ Level = VolumeToCatalog
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+
+Job {
+ Name = "SparseTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="SparseSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+Job {
+ Name = "CompressedTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="CompressedSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Maximum Concurrent Jobs = 4
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+# Max Run Time = 15 seconds
+}
+
+Job {
+ Name = "SparseCompressedTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="SparseCompressedSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+
+# Backup the catalog database (after the nightly save)
+Job {
+ Name = "BackupCatalog"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Catalog"
+# Schedule = "WeeklyCycleAfterBackup"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ # This creates an ASCII copy of the catalog
+ RunBeforeJob = "@sbindir@/make_catalog_backup -u regress"
+ # This deletes the copy of the catalog
+ RunAfterJob = "@sbindir@/delete_catalog_backup"
+ Write Bootstrap = "@working_dir@/BackupCatalog.bsr"
+}
+
+# Standard Restore template, to be changed by Console program
+Job {
+ Name = "RestoreFiles"
+ Type = Restore
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Where = /tmp/bacula-restores
+}
+
+
+# List of files to be backed up
+FileSet {
+ Name = "Full Set"
+ Include { Options { signature=MD5 }
+ File = </tmp/file-list
+ }
+}
+
+FileSet {
+ Name = "@hostname1@-FileSet"
+ Include { Options { signature=MD5 }
+ File = @hostname1_files@
+ }
+}
+
+FileSet {
+ Name = "@hostname2@-FileSet"
+ Include { Options { signature=MD5 }
+ File = @hostname2_files@
+ }
+}
+
+FileSet {
+ Name = "@hostname3@-FileSet"
+ Include { Options { signature=MD5 }
+ File = @hostname3_files@
+ }
+}
+
+FileSet {
+ Name = "SparseSet"
+ Include { Options { signature=MD5; sparse=yes }
+ File=</tmp/file-list
+ }
+}
+
+FileSet {
+ Name = "CompressedSet"
+ Include {
+ Options { signature=MD5; compression=GZIP }
+ File =</tmp/file-list
+ }
+}
+
+FileSet {
+ Name = "SparseCompressedSet"
+ Include {
+ Options {
+ signature=MD5; compression=GZIP
+ }
+ File= </tmp/file-list
+ }
+}
+
+
+
+#
+# When to do the backups, full backup on first sunday of the month,
+# differential (i.e. incremental since full) every other sunday,
+# and incremental backups other days
+Schedule {
+ Name = "WeeklyCycle"
+ Run = Level=Full 1st sun at 1:05
+ Run = Level=Differential 2nd-5th sun at 1:05
+ Run = Level=Incremental mon-sat at 1:05
+}
+
+# This schedule does the catalog. It starts after the WeeklyCycle
+Schedule {
+ Name = "WeeklyCycleAfterBackup"
+ Run = Level=Full sun-sat at 1:10
+}
+
+# This is the backup of the catalog
+FileSet {
+ Name = "Catalog"
+ Include { Options { signature=MD5 }
+ File=/home/kern/bacula/regress/bin/working/bacula.sql
+ }
+}
+
+# Client (File Services) to backup
+Client {
+ Name = @hostname@-fd
+ Address = @hostname@
+ FDPort = 8102
+ Catalog = MyCatalog
+ Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
+ File Retention = 30d # 30 days
+ Job Retention = 180d # six months
+ AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
+}
+
+Client {
+ Name = @hostname1@-fd
+ Address = @hostname1@
+ FDPort = 9102
+ Catalog = MyCatalog
+ Password = "@hostname1_password@" # password for FileDaemon
+ File Retention = 30d # 30 days
+ Job Retention = 180d # six months
+ AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
+}
+
+Client {
+ Name = @hostname2@-fd
+ Address = @hostname2@
+ FDPort = 9102
+ Catalog = MyCatalog
+ Password = "@hostname2_password@" # password for FileDaemon
+ Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
+ File Retention = 30d # 30 days
+ Job Retention = 180d # six months
+ AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
+}
+
+Client {
+ Name = @hostname3@-fd
+ Address = @hostname3@
+ FDPort = 9102
+ Catalog = MyCatalog
+ Password = "@hostname3_password@" # password for FileDaemon
+ File Retention = 30d # 30 days
+ Job Retention = 180d # six months
+ AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
+}
+
+
+# Definiton of file storage device
+Storage {
+ Name = File
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = 8103
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ Device = FileStorage
+ Media Type = File
+ Maximum Concurrent Jobs = 4
+}
+
+Storage {
+ Name = File1
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = 8103
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ Device = FileStorage1
+ Media Type = File1
+ Maximum Concurrent Jobs = 4
+}
+
+
+# Generic catalog service
+Catalog {
+ Name = MyCatalog
+ dbname = regress; user = regress; password = ""
+}
+
+# Reasonable message delivery -- send most everything to email address
+# and to the console
+Messages {
+ Name = Standard
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ operatorcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: Intervention needed for %j\" %r"
+ MailOnError = @job_email@ = all
+ operator = @job_email@ = mount
+ console = all, !skipped, !terminate, !restored
+#
+# WARNING! the following will create a file that you must cycle from
+# time to time as it will grow indefinitely. However, it will
+# also keep all your messages if the scroll off the console.
+#
+ append = "@working_dir@/log" = all, !skipped
+}
+
+Messages {
+ Name = NoEmail
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ console = all, !skipped, !terminate
+#
+# WARNING! the following will create a file that you must cycle from
+# time to time as it will grow indefinitely. However, it will
+# also keep all your messages if the scroll off the console.
+#
+ append = "@working_dir@/log" = all, !skipped
+}
+
+#
+# Message delivery for daemon messages (no job).
+Messages {
+ Name = Daemon
+ mailcommand = "@sbindir@/bsmtp -h @smtp_host@ -f \"\(Bacula regression\) %r\" -s \"Regression daemon message\" %r"
+ mail = @job_email@ = all, !skipped
+ console = all, !skipped, !saved
+ append = "@working_dir@/log" = all, !skipped
+}
+
+# Default pool definition
+Pool {
+ Name = Default
+ Pool Type = Backup
+ Recycle = yes # Bacula can automatically recycle Volumes
+ AutoPrune = yes # Prune expired volumes
+ Volume Retention = 365d # one year
+}
+++ /dev/null
-#
-# Default Bacula Director Configuration file
-#
-# The only thing that MUST be changed is to add one or more
-# file or directory names in the Include directive of the
-# FileSet resource.
-#
-# For Bacula release 2.2.2
-#
-# You might also want to change the default email address
-# from root to your address. See the "mail" and "operator"
-# directives in the Messages resource.
-#
-
-Director { # define myself
- Name = @hostname@-dir
- DIRport = 8101 # where we listen for UA connections
- QueryFile = "@scriptdir@/query.sql"
- WorkingDirectory = "@working_dir@"
- PidDirectory = "@piddir@"
- SubSysDirectory = "@subsysdir@"
- Maximum Concurrent Jobs = 4
- Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password
- Messages = Daemon
-}
-
-#
-# Define the main nightly save backup job
-# By default, this job will back up to disk in /tmp
-Job {
- Name = "NightlySave"
- Type = Backup
- Client=@hostname@-fd
- FileSet="Full Set"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
-}
-
-Job {
- Name = @hostname1@
- Type = Backup
- Client = @hostname1@-fd
- FileSet = "@hostname1@-FileSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
-}
-
-Job {
- Name = @hostname2@
- Type = Backup
- Client = @hostname2@-fd
- FileSet = "@hostname2@-FileSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
-}
-
-Job {
- Name = @hostname3@
- Type = Backup
- Client = @hostname3@-fd
- FileSet = "@hostname3@-FileSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
- SpoolData=yes
-}
-
-
-Job {
- Name = "NightlySave"
- Type = Backup
- Client=@hostname@-fd
- FileSet="Full Set"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
- Maximum Concurrent Jobs = 4
- SpoolData=yes
-}
-
-
-Job {
- Name = "MonsterSave"
- Type = Backup
- Client=@hostname@-fd
- FileSet="Full Set"
- Storage = File1
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
-}
-
-
-Job {
- Name = "VerifyVolume"
- Type = Verify
- Level = VolumeToCatalog
- Client=@hostname@-fd
- FileSet="Full Set"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
-}
-
-
-Job {
- Name = "SparseTest"
- Type = Backup
- Client=@hostname@-fd
- FileSet="SparseSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
-}
-
-Job {
- Name = "CompressedTest"
- Type = Backup
- Client=@hostname@-fd
- FileSet="CompressedSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Maximum Concurrent Jobs = 4
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
-# Max Run Time = 15 seconds
-}
-
-Job {
- Name = "SparseCompressedTest"
- Type = Backup
- Client=@hostname@-fd
- FileSet="SparseCompressedSet"
- Storage = File
- Messages = Standard
- Pool = Default
- Write Bootstrap = "@working_dir@/NightlySave.bsr"
-}
-
-
-# Backup the catalog database (after the nightly save)
-Job {
- Name = "BackupCatalog"
- Type = Backup
- Client=@hostname@-fd
- FileSet="Catalog"
-# Schedule = "WeeklyCycleAfterBackup"
- Storage = File
- Messages = Standard
- Pool = Default
- # This creates an ASCII copy of the catalog
- RunBeforeJob = "@sbindir@/make_catalog_backup -u regress"
- # This deletes the copy of the catalog
- RunAfterJob = "@sbindir@/delete_catalog_backup"
- Write Bootstrap = "@working_dir@/BackupCatalog.bsr"
-}
-
-# Standard Restore template, to be changed by Console program
-Job {
- Name = "RestoreFiles"
- Type = Restore
- Client=@hostname@-fd
- FileSet="Full Set"
- Storage = File
- Messages = Standard
- Pool = Default
- Where = /tmp/bacula-restores
-}
-
-
-# List of files to be backed up
-FileSet {
- Name = "Full Set"
- Include { Options { signature=MD5 }
- File = </tmp/file-list
- }
-}
-
-FileSet {
- Name = "@hostname1@-FileSet"
- Include { Options { signature=MD5 }
- File = @hostname1_files@
- }
-}
-
-FileSet {
- Name = "@hostname2@-FileSet"
- Include { Options { signature=MD5 }
- File = @hostname2_files@
- }
-}
-
-FileSet {
- Name = "@hostname3@-FileSet"
- Include { Options { signature=MD5 }
- File = @hostname3_files@
- }
-}
-
-FileSet {
- Name = "SparseSet"
- Include { Options { signature=MD5; sparse=yes }
- File=</tmp/file-list
- }
-}
-
-FileSet {
- Name = "CompressedSet"
- Include {
- Options { signature=MD5; compression=GZIP }
- File =</tmp/file-list
- }
-}
-
-FileSet {
- Name = "SparseCompressedSet"
- Include {
- Options {
- signature=MD5; compression=GZIP
- }
- File= </tmp/file-list
- }
-}
-
-
-
-#
-# When to do the backups, full backup on first sunday of the month,
-# differential (i.e. incremental since full) every other sunday,
-# and incremental backups other days
-Schedule {
- Name = "WeeklyCycle"
- Run = Level=Full 1st sun at 1:05
- Run = Level=Differential 2nd-5th sun at 1:05
- Run = Level=Incremental mon-sat at 1:05
-}
-
-# This schedule does the catalog. It starts after the WeeklyCycle
-Schedule {
- Name = "WeeklyCycleAfterBackup"
- Run = Level=Full sun-sat at 1:10
-}
-
-# This is the backup of the catalog
-FileSet {
- Name = "Catalog"
- Include { Options { signature=MD5 }
- File=/home/kern/bacula/regress/bin/working/bacula.sql
- }
-}
-
-# Client (File Services) to backup
-Client {
- Name = @hostname@-fd
- Address = @hostname@
- FDPort = 8102
- Catalog = MyCatalog
- Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
- File Retention = 30d # 30 days
- Job Retention = 180d # six months
- AutoPrune = yes # Prune expired Jobs/Files
- Maximum Concurrent Jobs = 4
-}
-
-Client {
- Name = @hostname1@-fd
- Address = @hostname1@
- FDPort = 8102
- Catalog = MyCatalog
- Password = "@hostname1_password@" # password for FileDaemon
- File Retention = 30d # 30 days
- Job Retention = 180d # six months
- AutoPrune = yes # Prune expired Jobs/Files
- Maximum Concurrent Jobs = 4
-}
-
-Client {
- Name = @hostname2@-fd
- Address = @hostname2@
- FDPort = 8102
- Catalog = MyCatalog
- Password = "@hostname2_password@" # password for FileDaemon
- Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
- File Retention = 30d # 30 days
- Job Retention = 180d # six months
- AutoPrune = yes # Prune expired Jobs/Files
- Maximum Concurrent Jobs = 4
-}
-
-Client {
- Name = @hostname3@-fd
- Address = @hostname3@
- FDPort = 8102
- Catalog = MyCatalog
- Password = "@hostname3_password@" # password for FileDaemon
- File Retention = 30d # 30 days
- Job Retention = 180d # six months
- AutoPrune = yes # Prune expired Jobs/Files
- Maximum Concurrent Jobs = 4
-}
-
-
-# Definiton of file storage device
-Storage {
- Name = File
- Address = @hostname@ # N.B. Use a fully qualified name here
- SDPort = 8103
- Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
- Device = FileStorage
- Media Type = File
- Maximum Concurrent Jobs = 4
-}
-
-Storage {
- Name = File1
- Address = @hostname@ # N.B. Use a fully qualified name here
- SDPort = 8103
- Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
- Device = FileStorage1
- Media Type = File1
- Maximum Concurrent Jobs = 4
-}
-
-
-# Generic catalog service
-Catalog {
- Name = MyCatalog
- dbname = regress; user = regress; password = ""
-}
-
-# Reasonable message delivery -- send most everything to email address
-# and to the console
-Messages {
- Name = Standard
- mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
- operatorcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: Intervention needed for %j\" %r"
- MailOnError = @job_email@ = all
- operator = @job_email@ = mount
- console = all, !skipped, !terminate, !restored
-#
-# WARNING! the following will create a file that you must cycle from
-# time to time as it will grow indefinitely. However, it will
-# also keep all your messages if the scroll off the console.
-#
- append = "@working_dir@/log" = all, !skipped
-}
-
-Messages {
- Name = NoEmail
- mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
- console = all, !skipped, !terminate
-#
-# WARNING! the following will create a file that you must cycle from
-# time to time as it will grow indefinitely. However, it will
-# also keep all your messages if the scroll off the console.
-#
- append = "@working_dir@/log" = all, !skipped
-}
-
-#
-# Message delivery for daemon messages (no job).
-Messages {
- Name = Daemon
- mailcommand = "@sbindir@/bsmtp -h @smtp_host@ -f \"\(Bacula regression\) %r\" -s \"Regression daemon message\" %r"
- mail = @job_email@ = all, !skipped
- console = all, !skipped, !saved
- append = "@working_dir@/log" = all, !skipped
-}
-
-# Default pool definition
-Pool {
- Name = Default
- Pool Type = Backup
- Recycle = yes # Bacula can automatically recycle Volumes
- AutoPrune = yes # Prune expired volumes
- Volume Retention = 365d # one year
-}
--- /dev/null
+#!/bin/sh
+#
+# Run four jobs at the same time, with two Volumes.
+# Set max Vol bytes of first volume
+# to less than total backup to force Bacula to use the second
+# Volume.
+#
+TestName="multi-client-test"
+JobName=Multi-client
+. scripts/functions
+
+scripts/cleanup
+scripts/copy-test-confs
+/bin/cp -f scripts/multi-client-bacula-dir.conf bin/bacula-dir.conf
+
+change_jobname NightlySave $JobName
+start_test
+
+cat <<END_OF_DATA >tmp/bconcmds
+@output /dev/null
+messages
+@$out tmp/log1.out
+label storage=File1
+TestVolume001
+label storage=File1
+TestVolume002
+update Volume=TestVolume001 MaxVolBytes=200000000
+@#50000000
+@#12
+@#setdebug level=51 Storage=File1
+status storage=File1
+llist volume=TestVolume001
+llist volume=TestVolume002
+run job=${hostname1} level=Full Storage=File1
+yes
+llist volume=TestVolume001
+llist volume=TestVolume002
+run job=${hostname2} level=Full Storage=File1
+yes
+reload
+llist volume=TestVolume001
+llist volume=TestVolume002
+run job=${hostname3} level=Full Storage=File1
+yes
+llist volume=TestVolume001
+llist volume=TestVolume002
+status storage=File1
+run job=$JobName level=Full Storage=File1
+yes
+status storage=File1
+@sleep 2
+status dir
+status storage=File1
+llist volume=TestVolume001
+llist volume=TestVolume002
+@sleep 5
+messages
+wait
+status storage=File1
+messages
+@#
+@# now do a restore
+@#
+@$out tmp/log2.out
+restore where=${cwd}/tmp/bacula-restores client=${hostname} select storage=File1
+unmark *
+mark *
+done
+yes
+wait
+reload
+reload
+messages
+@output
+quit
+END_OF_DATA
+
+run_bacula
+check_for_zombie_jobs storage=File1
+stop_bacula
+
+check_two_logs
+check_restore_diff
+end_test