diff --git a/Check.pm b/Check.pm index 0447ebd..f90bc45 100644 --- a/Check.pm +++ b/Check.pm @@ -135,46 +135,44 @@ my $bpc = BackupPC::Lib->new(); my $mainConf = $bpc->ConfigDataRead(); my $json = {}; -if ( $host ) { - my $hostConf = $bpc->ConfigDataRead($host); - my $conf = { %$mainConf, %$hostConf }; - my $age = -1; - $json = { - bkp => 0, - last_age => 0, - errors => 0, - new_size => 0, - new_size_avg => 0, - new_size_median => 0, - new_size_q1 => 0, - new_size_q3 => 0 - }; - - my @bpc_info = $bpc->BackupInfoRead($host); - my $sizes = new Statistics::Descriptive::Full; - - if ( scalar( @bpc_info ) ){ - foreach my $backup ( @bpc_info ) { - # Skip partial or active backups - next if ( $backup->{type} !~ m/^full|incr$/ ); - # Push all the sizes in our data set to compute avg sizes - # Exclude backup N°0 as it'll always have much more new data than normal backups - $sizes->add_data($backup->{sizeNew}) unless ( $backup->{num} == 0 ); - $json->{bkp}++; - } - - # Ignore the last backup if it's not full or incr (which means it's either partial or active) - my $i = ( $bpc_info[-1]->{type} =~ m/^full|incr$/ ) ? -1 : -2; - - $json->{errors} = $bpc_info[$i]->{xferErrs}; - $json->{new_size} = $bpc_info[$i]->{sizeNew}; - $json->{new_size_avg} = int $sizes->mean; - $json->{new_size_median} = int $sizes->median; - $json->{new_size_q1} = eval { int $sizes->quantile(1) } || 0; - $json->{new_size_q3} = eval { int $sizes->quantile(3) } || 0; - $json->{age} = time - $bpc_info[$i]->{startTime}; - $json->{last_age} = sprintf("%.1f", ($json->{age}) / 84600); +my $hostConf = $bpc->ConfigDataRead($host); +my $conf = { %$mainConf, %$hostConf }; +my $age = -1; +$json = { + bkp => 0, + last_age => 0, + errors => 0, + new_size => 0, + new_size_avg => 0, + new_size_median => 0, + new_size_q1 => 0, + new_size_q3 => 0 +}; + +my @bpc_info = $bpc->BackupInfoRead($host); +my $sizes = new Statistics::Descriptive::Full; + +if ( scalar( @bpc_info ) ){ + foreach my $backup ( @bpc_info ) { + # Skip partial or active backups + next if ( $backup->{type} !~ m/^full|incr$/ ); + # Push all the sizes in our data set to compute avg sizes + # Exclude backup N°0 as it'll always have much more new data than normal backups + $sizes->add_data($backup->{sizeNew}) unless ( $backup->{num} == 0 ); + $json->{bkp}++; } + + # Ignore the last backup if it's not full or incr (which means it's either partial or active) + my $i = ( $bpc_info[-1]->{type} =~ m/^full|incr$/ ) ? -1 : -2; + + $json->{errors} = $bpc_info[$i]->{xferErrs}; + $json->{new_size} = $bpc_info[$i]->{sizeNew}; + $json->{new_size_avg} = int $sizes->mean; + $json->{new_size_median} = int $sizes->median; + $json->{new_size_q1} = eval { int $sizes->quantile(1) } || 0; + $json->{new_size_q3} = eval { int $sizes->quantile(3) } || 0; + $json->{age} = time - $bpc_info[$i]->{startTime}; + $json->{last_age} = sprintf("%.1f", ($json->{age}) / 84600); } # TOO BIG ?