3 # Run a big test with backup of millions of files then run 100 incremental
4 # modifying 10% each time
6 # Can use following env variables
16 ${rscripts}/copy-test-confs
17 echo "${cwd}/files" >${tmp}/file-list
18 rm -rf ${tmp}/bacula-restores
21 change_jobname CompressedTest $JobName
22 if [ a$ACCURATE != a ]; then
23 sed 's/Name = "speed"/Name = "speed"; accurate=yes/' $conf/bacula-dir.conf > $tmp/1
24 mv $tmp/1 $conf/bacula-dir.conf
27 if [ a$BASEJOB != a ]; then
28 sed 's/Name = "speed"/Name = "speed"; accurate=yes;basejob=speed/' $conf/bacula-dir.conf > $tmp/1
29 mv $tmp/1 $conf/bacula-dir.conf
33 export NB_CONCURENT=${NB_CONCURENT:-1}
37 # Create X million of files
38 echo "Creating 1.5M files"
39 perl -Mscripts::functions -e "create_many_files('$cwd/files');"
41 cat <<END_OF_DATA >${tmp}/bconcmds
45 label storage=File volume=TestVolume001
46 run job=$JobName $basejob yes
53 check_for_zombie_jobs storage=File
55 cat <<END_OF_DATA >${tmp}/bconcmds
61 for j in $(seq 1 $NB_CONCURENT)
63 cat <<EOF >>${tmp}/bconcmds
65 run job=$JobName level=Full yes
69 cat <<EOF >>${tmp}/bconcmds
75 # insert X million files into File table
81 # Now, run many incremental
82 sed 's/Full/Incremental/' ${tmp}/bconcmds > $tmp/1
83 mv $tmp/1 ${tmp}/bconcmds
87 perl -Mscripts::functions -e "update_some_files('$cwd/files/'.chr($i+65));"
91 cat <<END_OF_DATA >${tmp}/bconcmds
99 restore where=${tmp}/bacula-restores storage=File select all done yes
109 check_for_zombie_jobs storage=File
115 find ${tmp}/bacula-restores | wc -l | tee $tmp/bacula-restore-list
116 rm -rf ${tmp}/bacula-restores
118 cp working/log files/log${WHICHDB}-$(date +%F_%H-%M).log