nice tests/six-vol-test
nice tests/span-vol-test
nice tests/maxvol-test
+nice tests/maxvol2-test
nice tests/sparse-compressed-test
nice tests/sparse-test
nice tests/two-jobs-test
--- /dev/null
+#!/bin/sh
+
+/bin/cp -f scripts/tls-auth-bacula-dir.conf bin/bacula-dir.conf
+/bin/cp -f scripts/tls-auth-bacula-sd.conf bin/bacula-sd.conf
+/bin/cp -f scripts/tls-auth-bacula-fd.conf bin/bacula-fd.conf
+/bin/cp -f scripts/test-console.conf bin/bconsole.conf
+/bin/cp -f scripts/tls-CA.pem bin/tls-CA.pem
+/bin/cp -f scripts/tls-cert.pem bin/tls-cert.pem
out="${cwd}/tmp/sed_tmp"
# set the ports used by these tests
+BASEPORT=${BASEPORT:-8101}
dirport=${BASEPORT}
fdport=`expr ${BASEPORT} '+' 1`
sdport=`expr ${BASEPORT} '+' 2`
sed -f ${out} ${cwd}/scripts/tls-bacula-sd.conf.in >${cwd}/scripts/tls-bacula-sd.conf
sed -f ${out} ${cwd}/scripts/tls-bacula-dir.conf.in >${cwd}/scripts/tls-bacula-dir.conf
+sed -f ${out} ${cwd}/scripts/tls-auth-bacula-fd.conf.in >${cwd}/scripts/tls-auth-bacula-fd.conf
+sed -f ${out} ${cwd}/scripts/tls-auth-bacula-sd.conf.in >${cwd}/scripts/tls-auth-bacula-sd.conf
+sed -f ${out} ${cwd}/scripts/tls-auth-bacula-dir.conf.in >${cwd}/scripts/tls-auth-bacula-dir.conf
+
+
sed -f ${out} ${cwd}/scripts/bacula-fd-2d.conf.in >${cwd}/scripts/bacula-fd-2d.conf
sed -f ${out} ${cwd}/scripts/bacula-sd-2d.conf.in >${cwd}/scripts/bacula-sd-2d.conf
sed -f ${out} ${cwd}/scripts/bacula-dir-2d.conf.in >${cwd}/scripts/bacula-dir-2d.conf
CFLAGS="-g -O2 -Wall" \
./configure \
--sbindir=${1}/bin \
- --archivedir=${1}/tmp \
--sysconfdir=${1}/bin \
--mandir=${1}/bin \
--with-pid-dir=${1}/working \
--enable-smartalloc \
--disable-readline \
--with-working-dir=${1}/working \
+ --with-archivedir=${1}/tmp \
--with-dump-email=${EMAIL} \
--with-job-email=${EMAIL} \
--with-smtp-host=${SMTP_HOST} \
--- /dev/null
+#
+# Default Bacula Director Configuration file
+#
+# The only thing that MUST be changed is to add one or more
+# file or directory names in the Include directive of the
+# FileSet resource.
+#
+# For Bacula release 1.39 or later
+#
+# You might also want to change the default email address
+# from root to your address. See the "mail" and "operator"
+# directives in the Messages resource.
+#
+
+Director { # define myself
+ Name = @hostname@-dir
+ DIRPort = @dirport@ # where we listen for UA connections
+ QueryFile = "@scriptdir@/query.sql"
+ WorkingDirectory = "@working_dir@"
+ PidDirectory = "@piddir@"
+ SubSysDirectory = "@subsysdir@"
+ Maximum Concurrent Jobs = 4
+ Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3" # Console password
+ Messages = Standard
+}
+
+#
+# Define the main nightly save backup job
+# By default, this job will back up to disk in @tmpdir@
+Job {
+ Name = "NightlySave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ Maximum Concurrent Jobs = 4
+ SpoolData=yes
+}
+
+Job {
+ Name = "MonsterSave"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File1
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+
+Job {
+ Name = "VerifyVolume"
+ Type = Verify
+ Level = VolumeToCatalog
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+
+Job {
+ Name = "SparseTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="SparseSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+Job {
+ Name = "CompressedTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="CompressedSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Maximum Concurrent Jobs = 4
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+Job {
+ Name = "SparseCompressedTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="SparseCompressedSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+}
+
+Job {
+ Name = "FIFOTest"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="FIFOSet"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Write Bootstrap = "@working_dir@/NightlySave.bsr"
+ ClientRunBeforeJob = "/bin/sleep 60"
+}
+
+
+
+# Backup the catalog database (after the nightly save)
+Job {
+ Name = "BackupCatalog"
+ Type = Backup
+ Client=@hostname@-fd
+ FileSet="Catalog"
+# Schedule = "WeeklyCycleAfterBackup"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ # This creates an ASCII copy of the catalog
+ RunBeforeJob = "@sbindir@/make_catalog_backup -u regress"
+ # This deletes the copy of the catalog
+ RunAfterJob = "@sbindir@/delete_catalog_backup"
+ Write Bootstrap = "@working_dir@/BackupCatalog.bsr"
+}
+
+JobDefs {
+ Name = "BackupJob"
+ Type = Backup
+ Pool = Default
+ Storage = File
+ Messages = Standard
+ Priority = 10
+}
+
+Job {
+ JobDefs = "BackupJob"
+ Name = "bug621-job-1"
+ Client = @hostname@-fd
+ FileSet="Full Set"
+ ClientRunBeforeJob = "/bin/sleep 60"
+}
+
+Job {
+ JobDefs = "BackupJob"
+ Name = "bug621-job-2"
+ Client = @hostname@-fd
+ FileSet = "Full Set"
+ Max Run Time = 30
+ Priority = 15
+}
+
+
+# Standard Restore template, to be changed by Console program
+Job {
+ Name = "RestoreFiles"
+ Type = Restore
+ Client=@hostname@-fd
+ FileSet="Full Set"
+ Storage = File
+ Messages = Standard
+ Pool = Default
+ Where = @tmpdir@/bacula-restores
+}
+
+
+# List of files to be backed up
+FileSet {
+ Name = "Full Set"
+ Include { Options { signature=MD5 }
+ File = <@tmpdir@/file-list
+ }
+}
+
+FileSet {
+ Name = "SparseSet"
+ Include {
+ Options {
+ signature=MD5
+ sparse=yes
+ }
+ File = <@tmpdir@/file-list
+ }
+}
+
+FileSet {
+ Name = "CompressedSet"
+ Include {
+ Options {
+ signature=MD5
+ compression=GZIP
+ }
+ File = <@tmpdir@/file-list
+ }
+}
+
+FileSet {
+ Name = "FIFOSet"
+ Include {
+ Options {
+ readfifo = yes
+ signature=MD5
+ }
+ File = <@tmpdir@/file-list
+ }
+}
+
+
+FileSet {
+ Name = "SparseCompressedSet"
+ Include {
+ Options {
+ signature=MD5
+ compression=GZIP
+ sparse=yes
+ }
+ File = <@tmpdir@/file-list
+ }
+}
+
+
+
+#
+# When to do the backups, full backup on first sunday of the month,
+# differential (i.e. incremental since full) every other sunday,
+# and incremental backups other days
+Schedule {
+ Name = "WeeklyCycle"
+ Run = Level=Full 1st sun at 1:05
+ Run = Level=Differential 2nd-5th sun at 1:05
+ Run = Level=Incremental mon-sat at 1:05
+}
+
+# This schedule does the catalog. It starts after the WeeklyCycle
+Schedule {
+ Name = "WeeklyCycleAfterBackup"
+ Run = Level=Full sun-sat at 1:10
+}
+
+# This is the backup of the catalog
+FileSet {
+ Name = "Catalog"
+ Include {
+ Options {
+ signature=MD5
+ }
+ File = /home/kern/bacula/regress/bin/working/bacula.sql
+ }
+}
+
+# Client (File Services) to backup
+Client {
+ Name = @hostname@-fd
+ Address = @hostname@
+ FDPort = @fdport@
+ Catalog = MyCatalog
+ Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc" # password for FileDaemon
+ File Retention = 30d # 30 days
+ Job Retention = 180d # six months
+ AutoPrune = yes # Prune expired Jobs/Files
+ Maximum Concurrent Jobs = 4
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+# Definiton of file storage device
+Storage {
+ Name = File
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = @sdport@
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ Device = FileStorage
+ Media Type = File
+ Maximum Concurrent Jobs = 4
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+Storage {
+ Name = File1
+ Address = @hostname@ # N.B. Use a fully qualified name here
+ SDPort = @sdport@
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ Device = FileStorage1
+ Media Type = File1
+ Maximum Concurrent Jobs = 4
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+
+# Definition of DLT tape storage device
+#Storage {
+# Name = DLTDrive
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
+# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" # password for Storage daemon
+# Device = "HP DLT 80" # must be same as Device in Storage daemon
+# Media Type = DLT8000 # must be same as MediaType in Storage daemon
+#}
+
+# Definition of DDS tape storage device
+#Storage {
+# Name = SDT-10000
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
+# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9" # password for Storage daemon
+# Device = SDT-10000 # must be same as Device in Storage daemon
+# Media Type = DDS-4 # must be same as MediaType in Storage daemon
+#}
+
+# Definition of 8mm tape storage device
+#Storage {
+# Name = "8mmDrive"
+# Address = @hostname@ # N.B. Use a fully qualified name here
+# SDPort = @sdport@
+# Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+# Device = "Exabyte 8mm"
+# MediaType = "8mm"
+#}
+
+
+# Generic catalog service
+Catalog {
+ Name = MyCatalog
+ dbname = regress; user = regress; password = ""
+}
+
+# Reasonable message delivery -- send most everything to email address
+# and to the console
+Messages {
+ Name = Standard
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ operatorcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: Intervention needed for %j\" %r"
+ MailOnError = @job_email@ = all
+ operator = @job_email@ = mount
+ console = all, !skipped, !terminate, !restored
+#
+# WARNING! the following will create a file that you must cycle from
+# time to time as it will grow indefinitely. However, it will
+# also keep all your messages if the scroll off the console.
+#
+ append = "@working_dir@/log" = all, !skipped
+}
+
+Messages {
+ Name = NoEmail
+ mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" -s \"Regression: %t %e of %c %l\" %r"
+ console = all, !skipped, !terminate
+#
+# WARNING! the following will create a file that you must cycle from
+# time to time as it will grow indefinitely. However, it will
+# also keep all your messages if the scroll off the console.
+#
+ append = "@working_dir@/log" = all, !skipped
+}
+
+
+# Default pool definition
+Pool {
+ Name = Default
+ Pool Type = Backup
+ Recycle = yes # Bacula can automatically recycle Volumes
+ AutoPrune = yes # Prune expired volumes
+ Volume Retention = 365d # one year
+# Label Format = "TEST-${Year}-${Month:p/2/0/r}-${Day:p/2/0/r}:${NumVols}"
+}
--- /dev/null
+#
+# Default Bacula File Daemon Configuration file
+#
+# For Bacula release 2.0
+#
+# There is not much to change here except perhaps the
+# File daemon Name to
+#
+
+#
+# List Directors who are permitted to contact this File daemon
+#
+Director {
+ Name = @hostname@-dir
+ Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc"
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+#
+# "Global" File daemon configuration specifications
+#
+FileDaemon { # this is me
+ Name = @hostname@-fd
+ FDPort = @fdport@ # where we listen for the director
+ WorkingDirectory = "@working_dir@"
+ Pid Directory = "@piddir@"
+ SubSys Directory = "@subsysdir@"
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+# Send all messages except skipped files back to Director
+Messages {
+ Name = Standard
+ director = @hostname@-dir = all, !terminate
+}
--- /dev/null
+#
+# Default Bacula Storage Daemon Configuration file
+#
+# For Bacula release 1.33
+#
+# You may need to change the name of your tape drive
+# on the "Archive Device" directive in the Device
+# resource. If you change the Name and/or the
+# "Media Type" in the Device resource, please ensure
+# that dird.conf has corresponding changes.
+#
+
+Storage { # definition of myself
+ Name = @hostname@-sd
+ SDPort = @sdport@ # Director's port
+ WorkingDirectory = "@working_dir@"
+ Pid Directory = "@piddir@"
+ Subsys Directory = "@subsysdir@"
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+#
+# List Directors who are permitted to contact Storage daemon
+#
+Director {
+ Name = @hostname@-dir
+ Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+ TLS Authenticate = yes
+ TLS Require = no
+ TLS Certificate = "@scriptdir@/tls-cert.pem"
+ TLS Key = "@scriptdir@/tls-cert.pem"
+ TLS CA Certificate File = "@scriptdir@/tls-CA.pem"
+}
+
+#
+# Devices supported by this Storage daemon
+# To connect, the Director's bacula-dir.conf must have the
+# same Name and MediaType.
+#
+
+Device {
+ Name = FileStorage
+ Media Type = File
+ Archive Device = @tmpdir@
+ LabelMedia = yes; # lets Bacula label unlabelled media
+ Random Access = Yes;
+ AutomaticMount = yes; # when device opened, read it
+ RemovableMedia = no;
+ AlwaysOpen = no;
+# Maximum File Size = 10KB
+}
+
+Device {
+ Name = FileStorage1
+ Media Type = File1
+ Archive Device = @tmpdir@
+ LabelMedia = yes; # lets Bacula label unlabelled media
+ Random Access = Yes;
+ AutomaticMount = yes; # when device opened, read it
+ RemovableMedia = no;
+ AlwaysOpen = no;
+}
+
+
+#Device {
+# Name = "HP DLT 80"
+# Media Type = DLT8000
+# Archive Device = /dev/nst0
+# AutomaticMount = yes; # when device opened, read it
+# AlwaysOpen = yes;
+# RemovableMedia = yes;
+#}
+
+#Device {
+# Name = SDT-7000 #
+# Media Type = DDS-2
+# Archive Device = /dev/nst0
+# AutomaticMount = yes; # when device opened, read it
+# AlwaysOpen = yes;
+# RemovableMedia = yes;
+#}
+
+#Device {
+# Name = Floppy
+# Media Type = Floppy
+# Archive Device = /mnt/floppy
+# RemovableMedia = yes;
+# Random Access = Yes;
+# AutomaticMount = yes; # when device opened, read it
+# AlwaysOpen = no;
+#}
+
+#
+# A very old Exabyte with no end of media detection
+#
+#Device {
+# Name = "Exabyte 8mm"
+# Media Type = "8mm"
+# Archive Device = /dev/nst0
+# Hardware end of medium = No;
+# AutomaticMount = yes; # when device opened, read it
+# AlwaysOpen = Yes;
+# RemovableMedia = yes;
+#}
+
+#
+# Send all messages to the Director,
+# mount messages also are sent to the email address
+#
+Messages {
+ Name = Standard
+ director = @hostname@-dir = all, !terminate
+}
--- /dev/null
+#!/bin/sh
+#
+# Run a simple backup of the Bacula build directory but
+# create three volumes and do six backups causing the
+# volumes to be recycled, and cycling through the volumes
+# twice. Tests maxvoljobs and volretention.
+#
+TestName="maxvol2-test"
+JobName=Maxvol2
+. scripts/functions
+
+scripts/cleanup
+scripts/copy-test-confs
+echo "${cwd}/build" >${cwd}/tmp/file-list
+
+change_jobname NightlySave $JobName
+start_test
+
+cat <<END_OF_DATA >${cwd}/tmp/bconcmds
+@output /dev/null
+messages
+@$out ${cwd}/tmp/log1.out
+label storage=File1 volume=TestVolume001
+label storage=File1 volume=TestVolume002
+label storage=File1 volume=TestVolume003
+update Volume=TestVolume001 volretention=10s
+update Volume=TestVolume001 maxvoljobs=1
+update Volume=TestVolume002 volretention=10s
+update Volume=TestVolume002 maxvoljobs=1
+update Volume=TestVolume003 volretention=10s
+update Volume=TestVolume003 maxvoljobs=1
+list volumes
+setdebug level=50 storage=File1
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+wait
+messages
+list volumes
+@#
+@# now do a restore
+@#
+@$out ${cwd}/tmp/log2.out
+restore where=${cwd}/tmp/bacula-restores select storage=File1
+unmark *
+mark *
+done
+yes
+wait
+messages
+@output
+quit
+END_OF_DATA
+
+run_bacula
+check_for_zombie_jobs storage=File1
+stop_bacula
+
+check_two_logs
+check_restore_diff
+end_test
label storage=File1 volume=TestVolume002
label storage=File1 volume=TestVolume003
update Volume=TestVolume001 volretention=10s
-update Volume=TestVolume001 maxvoljobs=1
+update Volume=TestVolume001 maxvolbytes=200000000
update Volume=TestVolume002 volretention=10s
-update Volume=TestVolume002 maxvoljobs=1
+update Volume=TestVolume002 maxvolbytes=200000000
update Volume=TestVolume003 volretention=10s
-update Volume=TestVolume003 maxvoljobs=1
+update Volume=TestVolume003 maxvolbytes=200000000
list volumes
+@#setdebug level=100 storage=File1
+run job=$JobName storage=File1 level=full yes
run job=$JobName storage=File1 level=full yes
-wait
-messages
-list volumes
run job=$JobName storage=File1 level=full yes
wait
messages
list volumes
run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
wait
messages
list volumes
@sleep 10
run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
wait
messages
list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
run job=$JobName storage=File1 level=full yes
wait
messages
list volumes
+@sleep 10
+run job=$JobName storage=File1 level=full yes
+run job=$JobName storage=File1 level=full yes
run job=$JobName storage=File1 level=full yes
wait
messages
wait
messages
@#
-@# now do a restore
+@# now run 10 restores
@#
@$out ${cwd}/tmp/log2.out
restore where=${cwd}/tmp/bacula-restores storage=File
7
<${cwd}/tmp/restore-list
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
+yes
+wait
+restore where=${cwd}/tmp/bacula-restores storage=File
+7
+<${cwd}/tmp/restore-list
+
yes
wait
messages
export bstat
export rstat
-grep 'dir: BeforeJob: run command "/bin/echo RunBeforeJob"' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
+grep 'BeforeJob: run command "/bin/echo RunBeforeJob"' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
a=$?
-grep 'fd: ClientRunBeforeJob: ClientRunBeforeJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
+grep 'ClientRunBeforeJob: ClientRunBeforeJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
b=$?
-grep 'fd: ClientAfterJob: run command "/bin/echo ClientRunAfterJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
+grep 'ClientAfterJob: run command "/bin/echo ClientRunAfterJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
c=$?
-grep 'dir: AfterJob: run command "/bin/echo RunAfterJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
+grep 'AfterJob: run command "/bin/echo RunAfterJob' ${cwd}/tmp/RUN_ALL_OK.log >/dev/null 2>&1
d=$?
if [ $a = 0 -a $b = 0 -a $c = 0 -a $d = 0 ]
then
rstat=1
fi
-grep 'dir: BeforeJob: run command "/bin/false RUN_DIR_FAILED"' ${cwd}/tmp/RUN_DIR_FAILED.log >/dev/null 2>&1
+grep 'BeforeJob: run command "/bin/false RUN_DIR_FAILED"' ${cwd}/tmp/RUN_DIR_FAILED.log >/dev/null 2>&1
a=$?
-grep 'dir: AfterJob: RunAfterFailedJob' ${cwd}/tmp/RUN_DIR_FAILED.log >/dev/null 2>&1
+grep 'AfterJob: RunAfterFailedJob' ${cwd}/tmp/RUN_DIR_FAILED.log >/dev/null 2>&1
b=$?
if [ $a = 0 -a $b = 0 ]
then
rstat=1
fi
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_FAILED1"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_FAILED1"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
a=$?
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_FAILED2"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_FAILED2"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
b=$?
-grep 'dir: AfterJob: run command "/bin/echo RunAfterFailedJob"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
+grep 'AfterJob: run command "/bin/echo RunAfterFailedJob"' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
c=$?
#grep 'touching' ${cwd}/tmp/RUN_FD_FAILED.log >/dev/null 2>&1
test -f ${cwd}/tmp/RUN_FD_FAILED
rstat=1
fi
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_FAILED1"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_FAILED1"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
a=$?
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_FAILED2"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_FAILED2"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
b=$?
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_FAILED3"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_FAILED3"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
c=$?
-grep 'dir: AfterJob: run command "/bin/echo RunAfterFailedJob"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
+grep 'AfterJob: run command "/bin/echo RunAfterFailedJob"' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
d=$?
grep '*** Backup Error ***' ${cwd}/tmp/RUN_FD_FAILED2.log >/dev/null 2>&1
e=$?
rstat=1
fi
-grep 'fd: ClientBeforeJob: run command "/bin/false RUN_FD_WARNING"' ${cwd}/tmp/RUN_FD_WARNING.log >/dev/null 2>&1
+grep 'ClientBeforeJob: run command "/bin/false RUN_FD_WARNING"' ${cwd}/tmp/RUN_FD_WARNING.log >/dev/null 2>&1
a=$?
grep 'RunAfterFailedJob' ${cwd}/tmp/RUN_FD_WARNING.log >/dev/null 2>&1
b=$?
--- /dev/null
+#!/bin/sh
+#
+# Run a simple backup of the Bacula build directory with TLS
+# communications code enabled then restore it.
+#
+TestName="tls-authenticate-test"
+JobName=tls
+. scripts/functions
+
+scripts/cleanup
+scripts/copy-tls-confs
+echo "${cwd}/build" >${cwd}/tmp/file-list
+
+change_jobname CompressedTest $JobName
+start_test
+
+cat <<END_OF_DATA >tmp/bconcmds
+@output
+messages
+@$out tmp/log1.out
+setdebug level=100 dir
+setdebug level=100 storage=File
+setdebug level=100 client
+status all
+status all
+messages
+label storage=File volume=TestVolume001 pool=Default
+run job=$JobName storage=File yes
+wait
+messages
+@#
+@# now do a restore
+@#
+@$out tmp/log2.out
+restore where=${cwd}/tmp/bacula-restores select storage=File
+unmark *
+mark *
+done
+yes
+wait
+messages
+@output
+quit
+END_OF_DATA
+
+run_bacula
+check_for_zombie_jobs storage=File
+stop_bacula
+
+check_two_logs
+check_restore_diff
+end_test
cp -Rp weird-files weird-files2
echo "${cwd}/weird-files2" >${cwd}/tmp/file-list
cd weird-files2
-perl socket.pl
+#perl socket.pl # we no longer restore sockets
cd ..
change_jobname NightlySave $JobName
bin/testls weird-files2 >${cwd}/tmp/restored
check_two_logs
-#diff ${cwd}/tmp/original ${cwd}/tmp/restored 2>&1 >/dev/null
-diff ${cwd}/tmp/original ${cwd}/tmp/restored
+diff ${cwd}/tmp/original ${cwd}/tmp/restored 2>&1 >/dev/null
+#diff -u ${cwd}/tmp/original ${cwd}/tmp/restored
dstat=$?
end_test