3 # Run two jobs at the same time
6 TestName="concurrent-jobs-test"
7 JobName=concurrent-jobs
12 echo "${cwd}/tmp/largefile" >${cwd}/tmp/file-list
13 if test -c /dev/urandom ; then
14 # Create 56MB file with random data
15 echo "Creating a 56MB file with random data ..."
16 dd if=/dev/urandom of=${cwd}/tmp/largefile bs=1024 count=55000
18 echo "Creating a 56MB file with bacula-dir data ..."
19 dd if=$bin/bacula-dir of=${cwd}/tmp/1 bs=1024 count=1000
20 cat ${cwd}/tmp/1 ${cwd}/tmp/1 ${cwd}/tmp/1 ${cwd}/tmp/1 ${cwd}/tmp/1 >${cwd}/tmp/2
22 cat ${cwd}/tmp/2 ${cwd}/tmp/2 ${cwd}/tmp/2 ${cwd}/tmp/2 ${cwd}/tmp/2 >>${cwd}/tmp/3
24 cat ${cwd}/tmp/3 ${cwd}/tmp/3 ${cwd}/tmp/3 ${cwd}/tmp/3 ${cwd}/tmp/3 >${cwd}/tmp/largefile
28 echo "largefile created"
30 change_jobname CompressedTest $JobName
33 cat <<END_OF_DATA >${cwd}/tmp/bconcmds
36 @$out ${cwd}/tmp/log1.out
37 label storage=File volume=TestVolume001
38 run job=$JobName level=Full yes
39 run job=$JobName level=Full yes
40 run job=$JobName level=Full yes
41 run job=$JobName level=Full yes
47 @$out ${cwd}/tmp/log2.out
48 restore where=${cwd}/tmp/bacula-restores select storage=File
60 check_for_zombie_jobs storage=File
64 diff ${cwd}/tmp/largefile ${cwd}/tmp/bacula-restores${cwd}/tmp/largefile 2>&1 >/dev/null