#!/bin/sh # # Create a big Volume > 5 GB and backup to it to test disk # seeking on big volumes. We cheat and artifically grow # the volume. # TestName="big-vol-test" JobName=bigvol . scripts/functions scripts/cleanup scripts/copy-test-confs echo "${cwd}/build" >/tmp/file-list change_jobname CompressedTest $JobName start_test cat <tmp/bconcmds @output /dev/null messages @$out tmp/log1.out @#setdebug level=100 storage=File label storage=File volume=TestVolume001 run job=$JobName yes wait messages quit END_OF_DATA run_bacula check_for_zombie_jobs storage=File # # Now increase the size of the Volume using gigaslam # cd ${cwd}/tmp # make 1 GB file ${cwd}/build/src/tools/gigaslam if [ $? != 0 ]; then echo "Execute of ${cwd}/build/src/tools/gigaslam failed." rm -f gigaslam.gif exit 1 fi cat gigaslam.gif >>TestVolume001 cat gigaslam.gif >>TestVolume001 cat gigaslam.gif >>TestVolume001 cat gigaslam.gif >>TestVolume001 cat gigaslam.gif >>TestVolume001 size=`ls -l TestVolume001 | cut -f 5 -d ' '` ls -l TestVolume001 cd ${cwd} cat <tmp/bconcmds @output /dev/null messages @$out tmp/log1.out sql UPDATE Media SET Volbytes=${size} WHERE VolumeName='TestVolume001'; llist volume=TestVolume001 @# Now do another full save with big Volume run level=Full job=$JobName yes wait messages @# @# now do a restore @# sql SELECT * FROM JobMedia; @output tmp/log2.out restore where=${cwd}/tmp/bacula-restores storage=File select all done yes wait messages @$out quit END_OF_DATA run_bconsole check_for_zombie_jobs storage=File stop_bacula check_two_logs check_restore_diff end_test # Get rid of big files rm -f ${cwd}/tmp/gigaslam.gif rm -f ${cwd}/tmp/TestVolume001