# file or directory names in the Include directive of the
# FileSet resource.
#
-# For Bacula release 1.30 (12 April 2003) -- redhat 7.3
+# For Bacula release 2.2.2
#
# You might also want to change the default email address
# from root to your address. See the "mail" and "operator"
#
Director { # define myself
- Name = rufus-dir
- DIRport = 8101 # where we listen for UA connections
+ Name = @hostname@-dir
+ DIRPort = @dirport@ # where we listen for UA connections
QueryFile = "@scriptdir@/query.sql"
WorkingDirectory = "@working_dir@"
PidDirectory = "@piddir@"
SubSysDirectory = "@subsysdir@"
- Maximum Concurrent Jobs = 1
+ Maximum Concurrent Jobs = 4
Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password
- Messages = Standard
+ Messages = Daemon
}
#
# Define the main nightly save backup job
-# By default, this job will back up to disk in /tmp
+# By default, this job will back up to disk in @tmpdir@
Job {
Name = "NightlySave"
Type = Backup
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="Full Set"
Storage = File
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+ SpoolData=yes
+ Max Run Time = 30min
}
+Job {
+ Name = "MonsterSave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File1
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
+}
+
+
Job {
Name = "VerifyVolume"
Type = Verify
Level = VolumeToCatalog
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="Full Set"
Storage = File
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
}
Job {
Name = "SparseTest"
Type = Backup
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="SparseSet"
Storage = File
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
}
Job {
Name = "CompressedTest"
Type = Backup
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="CompressedSet"
Storage = File
Messages = Standard
Pool = Default
+ Maximum Concurrent Jobs = 4
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
+# Max Run Time = 15 seconds
}
Job {
Name = "SparseCompressedTest"
Type = Backup
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="SparseCompressedSet"
Storage = File
Messages = Standard
Pool = Default
Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Max Run Time = 30min
}
Job {
Name = "BackupCatalog"
Type = Backup
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="Catalog"
# Schedule = "WeeklyCycleAfterBackup"
Storage = File
Messages = Standard
Pool = Default
# This creates an ASCII copy of the catalog
- RunBeforeJob = "@sbindir@/make_catalog_backup -u bacula"
+ RunBeforeJob = "@sbindir@/make_catalog_backup -u regress"
# This deletes the copy of the catalog
RunAfterJob = "@sbindir@/delete_catalog_backup"
Write Bootstrap = "@working_dir@/BackupCatalog.bsr"
+ Max Run Time = 30min
}
# Standard Restore template, to be changed by Console program
Job {
Name = "RestoreFiles"
Type = Restore
- Client=rufus-fd
+ Client=@hostname@-fd
FileSet="Full Set"
Storage = File
Messages = Standard
Pool = Default
- Where = /tmp/bacula-restores
+ Where = @tmpdir@/bacula-restores
+ Max Run Time = 30min
}
# List of files to be backed up
FileSet {
Name = "Full Set"
- Include = signature=MD5 {
- </tmp/file-list
+ Include { Options { signature=MD5 }
+ File = <@tmpdir@/file-list
}
}
FileSet {
Name = "SparseSet"
- Include = signature=MD5 sparse=yes {
- </tmp/file-list
+ Include { Options { signature=MD5; sparse=yes }
+ File=<@tmpdir@/file-list
}
}
FileSet {
Name = "CompressedSet"
- Include = signature=MD5 compression=GZIP {
- </tmp/file-list
+ Include {
+ Options { signature=MD5; compression=GZIP }
+ File =<@tmpdir@/file-list
}
}
FileSet {
Name = "SparseCompressedSet"
- Include = signature=MD5 compression=GZIP {
- </tmp/file-list
+ Include {
+ Options {
+ signature=MD5; compression=GZIP
+ }
+ File= <@tmpdir@/file-list
}
}
# and incremental backups other days
Schedule {
Name = "WeeklyCycle"
- Run = Full 1st sun at 1:05
- Run = Differential 2nd-5th sun at 1:05
- Run = Incremental mon-sat at 1:05
+ Run = Level=Full 1st sun at 1:05
+ Run = Level=Differential 2nd-5th sun at 1:05
+ Run = Level=Incremental mon-sat at 1:05
}
# This schedule does the catalog. It starts after the WeeklyCycle
Schedule {
Name = "WeeklyCycleAfterBackup"
- Run = Full sun-sat at 1:10
+ Run = Level=Full sun-sat at 1:10
}
# This is the backup of the catalog
FileSet {
Name = "Catalog"
- Include = signature=MD5 {
- /home/kern/bacula/regress/bin/working/bacula.sql
+ Include { Options { signature=MD5 }
+ File=/home/kern/bacula/regress/bin/working/bacula.sql
}
}
# Client (File Services) to backup
Client {
- Name = rufus-fd
- Address = rufus
- FDPort = 8102
+ Name = @hostname@-fd
+ Address = @hostname@
+ FDPort = @fdport@
Catalog = MyCatalog
Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
File Retention = 30d # 30 days
Job Retention = 180d # six months
AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
}
# Definiton of file storage device
Storage {
Name = File
- Address = rufus # N.B. Use a fully qualified name here
- SDPort = 8103
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = @sdport@
Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
Device = FileStorage
Media Type = File
+ Maximum Concurrent Jobs = 4
}
+Storage {
+ Name = File1
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = @sdport@
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ Device = FileStorage1
+ Media Type = File1
+ Maximum Concurrent Jobs = 4
+}
+
+
# Definition of DLT tape storage device
#Storage {
# Name = DLTDrive
-# Address = rufus # N.B. Use a fully qualified name here
-# SDPort = 8103
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" # password for Storage daemon
# Device = "HP DLT 80" # must be same as Device in Storage daemon
# Media Type = DLT8000 # must be same as MediaType in Storage daemon
# Definition of DDS tape storage device
#Storage {
# Name = SDT-10000
-# Address = rufus # N.B. Use a fully qualified name here
-# SDPort = 8103
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" # password for Storage daemon
# Device = SDT-10000 # must be same as Device in Storage daemon
-# Media Type = DDS-4 # must be same as MediaType in Storage daemon
+# Media Type = tape # must be same as MediaType in Storage daemon
#}
# Definition of 8mm tape storage device
#Storage {
# Name = "8mmDrive"
-# Address = rufus # N.B. Use a fully qualified name here
-# SDPort = 8103
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
# Device = "Exabyte 8mm"
# MediaType = "8mm"
# Generic catalog service
Catalog {
Name = MyCatalog
- dbname = bacula; user = bacula; password = ""
+ @libdbi@
+ dbname = @db_name@; user = @db_user@; password = "@db_password@"
}
# Reasonable message delivery -- send most everything to email address
# and to the console
Messages {
Name = Standard
- mailcommand = "@sbindir@/smtp -h localhost -f \"\(Bacula\) %r\" -s \"Bacula: %t %e of %c %l\" %r"
- operatorcommand = "@sbindir@/smtp -h localhost -f \"\(Bacula\) %r\" -s \"Bacula: Intervention needed for %j\" %r"
- MailOnError = kern+ok@sibbald.com = all
- operator = kern+ok@sibbald.com = mount
- console = all, !skipped
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ operatorcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: Intervention needed for %j\" %r"
+# MailOnError = @job_email@ = all
+# operator = @job_email@ = mount
+ console = all, !skipped, !terminate, !restored
#
# WARNING! the following will create a file that you must cycle from
# time to time as it will grow indefinitely. However, it will
# also keep all your messages if the scroll off the console.
#
append = "@working_dir@/log" = all, !skipped
+ catalog = all, !skipped
+}
+
+Messages {
+ Name = NoEmail
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ console = all, !skipped, !terminate
+#
+# WARNING! the following will create a file that you must cycle from
+# time to time as it will grow indefinitely. However, it will
+# also keep all your messages if the scroll off the console.
+#
+ append = "@working_dir@/log" = all, !skipped
+ catalog = all, !skipped
+}
+
+#
+# Message delivery for daemon messages (no job).
+Messages {
+ Name = Daemon
+ mailcommand = "@sbindir@/bsmtp -h @smtp_host@ -f \"\(Bacula regression\) %r\" -s \"Regression daemon message\" %r"
+# mail = @job_email@ = all, !skipped
+ console = all, !skipped, !saved
+ append = "@working_dir@/log" = all, !skipped
}
# Default pool definition
Recycle = yes # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365d # one year
- Accept Any Volume = yes # write on any volume in the pool
}