our @ISA = qw(Exporter);
our @EXPORT = qw(update_some_files create_many_files check_multiple_copies
update_client $HOST $BASEPORT add_to_backup_list check_volume_size
- create_many_dirs
+ create_many_dirs cleanup start_bacula stop_bacula get_resource
+ set_maximum_concurent_jobs get_time
check_min_volume_size check_max_volume_size $estat $bstat $rstat $zstat
$cwd $bin $scripts $conf $rscripts $tmp $working extract_resource
$db_name $db_user $db_password $src $tmpsrc);
$estat = $rstat = $bstat = $zstat = 0;
}
-sub extract_resource
+sub cleanup
{
- my ($file, $type, $name) = @_;
+ system("$rscripts/cleanup");
+ return $? == 0;
+}
+
+sub start_bacula
+{
+ my $ret;
+ $ENV{LANG}='C';
+ system("$bin/bacula start");
+ $ret = $? == 0;
+ open(FP, ">$tmp/bcmd");
+ print FP "sql\ntruncate client_group;\ntruncate client_group_member;\nupdate Media set LocationId=0;\ntruncate location;\n\n";
+ close(FP);
+ system("cat $tmp/bcmd | $bin/bconsole >/dev/null");
+ return $ret;
+}
+sub stop_bacula
+{
+ $ENV{LANG}='C';
+ system("$bin/bacula stop");
+ return $? == 0;
+}
+
+sub get_resource
+{
+ my ($file, $type, $name) = @_;
+ my $ret;
open(FP, $file) or die "Can't open $file";
my $content = join("", <FP>);
if ($content =~ m/(^$type {[^}]+?Name\s*=\s*"?$name"?[^}]+?^})/ms) {
- print $1, "\n";
+ $ret = $1;
}
close(FP);
+ return $ret;
+}
+
+sub extract_resource
+{
+ my $ret = get_resource(@_);
+ if ($ret) {
+ print $ret, "\n";
+ }
}
sub check_min_volume_size
}
}
+# You can change the maximum concurrent jobs for any config file
+# If specified, you can change only one Resource or one type of
+# resource at the time (optional)
+# set_maximum_concurent_jobs('$conf/bacula-dir.conf', 100);
+# set_maximum_concurent_jobs('$conf/bacula-dir.conf', 100, 'Director');
+# set_maximum_concurent_jobs('$conf/bacula-dir.conf', 100, 'Device', 'Drive-0');
+sub set_maximum_concurent_jobs
+{
+ my ($file, $nb, $obj, $name) = @_;
+ my ($cur_obj, $cur_name);
+
+ die "Can't get new maximumconcurrentjobs"
+ unless ($nb);
+
+ open(FP, ">$tmp/1.$$") or die "Can't write to $tmp/1.$$";
+ open(SRC, $file) or die "Can't open $file";
+ while (my $l = <SRC>)
+ {
+ if ($l =~ /^(\w+) {/) {
+ $cur_obj = $1;
+ }
+
+ if ($l =~ /maximum\s*concurrent\s*jobs/i) {
+ if (!$obj || $cur_obj eq $obj) {
+ if (!$name || $cur_name eq $name) {
+ $l =~ s/maximum\s*concurrent\s*jobs\s*=\s*\d+/Maximum Concurrent Jobs = $nb/ig;
+ }
+ }
+ }
+
+ if ($l =~ /Name\s*=\s*"?([\w\d\.-])"?/i) {
+ $cur_name = $1;
+ }
+
+ if ($l =~ /^}/) {
+ $cur_name = $cur_obj = undef;
+ }
+ print FP $l;
+ }
+ close(SRC);
+ close(FP);
+ copy("$tmp/1.$$", $file) or die "Can't copy $tmp/1.$$ to $file";
+}
+
# This test ensure that 'list copies' displays only each copy one time
#
# Input: read stream from stdin or with file list argument
exit $ret;
}
+use POSIX qw/strftime/;
+sub get_time
+{
+ my ($sec) = @_;
+ print strftime('%F %T', localtime(time+$sec)), "\n";
+}
+
1;
@##########################
END_OF_DATA
-for i in `seq 1 40`; do
+for i in `seq 1 30`; do
cat <<EOF >>${cwd}/tmp/bconcmds
run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes-$i yes
EOF
echo "wait" >> ${cwd}/tmp/bconcmds
echo "messages" >> ${cwd}/tmp/bconcmds
-for i in `seq 1 40`; do
+for i in `seq 1 30`; do
cat <<EOF >>${cwd}/tmp/bconcmds
run comment="Should fail" level=Incremental job=CancelLowerLevelDuplicatesYes-$i yes
EOF
echo "@sleep 5" >> ${cwd}/tmp/bconcmds
-for i in `seq 1 40`; do
+for i in `seq 1 30`; do
cat <<EOF >>${cwd}/tmp/bconcmds
run comment="Should work" level=Full job=CancelLowerLevelDuplicatesYes-$i yes
EOF