From 6a721b0442d49a6b2c1b10bfdbd0df6e60a56e28 Mon Sep 17 00:00:00 2001 From: Heuzef Date: Fri, 27 Sep 2019 13:21:21 +0200 Subject: [PATCH] Include Size Consistency check --- Check.pm | 80 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 79 insertions(+), 1 deletion(-) diff --git a/Check.pm b/Check.pm index 6271171..4387d65 100644 --- a/Check.pm +++ b/Check.pm @@ -141,12 +141,90 @@ if ( $Conf{BackupsDisable} == 1 ) { $shortErr = " ($shortErr)"; } +# Size Consistency +my $bpc = BackupPC::Lib->new(); +my $mainConf = $bpc->ConfigDataRead(); +my $json = {}; + +if ( $host ) { + my $hostConf = $bpc->ConfigDataRead($host); + my $conf = { %$mainConf, %$hostConf }; + my $age = -1; + $json = { + bkp => 0, + last_age => 0, + errors => 0, + new_size => 0, + new_size_avg => 0, + new_size_median => 0, + new_size_q1 => 0, + new_size_q3 => 0 + }; + + my @bpc_info = $bpc->BackupInfoRead($host); + my $sizes = new Statistics::Descriptive::Full; + + if ( scalar( @bpc_info ) ){ + foreach my $backup ( @bpc_info ) { + # Skip partial or active backups + next if ( $backup->{type} !~ m/^full|incr$/ ); + if ( $backup->{type} eq "full" ) { + $last_full_num = $backup->{num}; + } + # Push all the sizes in our data set to compute avg sizes + # Exclude backup N°0 as it'll always have much more new data than normal backups + $sizes->add_data($backup->{sizeNew}) unless ( $backup->{num} == 0 ); + $json->{bkp}++; + } + + # Ignore the last backup if it's not full or incr (which means it's either partial or active) + my $i = ( $bpc_info[-1]->{type} =~ m/^full|incr$/ ) ? -1 : -2; + + $json->{errors} = $bpc_info[$i]->{xferErrs}; + $json->{new_size} = $bpc_info[$i]->{sizeNew}; + $json->{new_size_avg} = int $sizes->mean; + $json->{new_size_median} = int $sizes->median; + $json->{new_size_q1} = eval { int $sizes->quantile(1) } || 0; + $json->{new_size_q3} = eval { int $sizes->quantile(3) } || 0; + $json->{age} = time - $bpc_info[$i]->{startTime}; + $json->{last_age} = sprintf("%.1f", ($json->{age}) / 84600); + } +} + +# TOO BIG ? +my $toobig = "1"; +if ( $json->{new_size} > ($json->{new_size_q3} + $json->{new_size_q3} - $json->{new_size_q1}) * 1.5 or $json->{new_size} > $json->{new_size_avg} * 6 ) { + $toobig = "1"; + } +else { + $toobig = "0"; +} + +# TOO SMALL ? +my $toosmall = "1"; +if ( $json->{new_size} < ($json->{new_size_q1} - $json->{new_size_q3} - $json->{new_size_q1}) * 1.5 or $json->{new_size} < $json->{new_size_avg} / 3 ) { + $toosmall = "1"; + } +else { + $toosmall = "0"; +} + +my $sizeConsistency = "ANOMALOUS"; + +# Get result +if ( $toobig or $toosmall ) { + $sizeConsistency = "ANOMALOUS"; + } +else { + $sizeConsistency = "Normal"; +} + # Show summary $str = <${HostLink($host)} $lastAge $lastXferErrors - sizeConsistency + $sizeConsistency random-file.txt EOF