#!/usr/bin/perl use lib "/usr/share/BackupPC/lib"; use BackupPC::Lib; use POSIX; use JSON; use Getopt::Long; use Statistics::Descriptive; use Data::Dumper; my $host = undef; GetOptions( "host=s" => \$host ); # We need to switch to backuppc UID/GID my $uid = getuid(); my $gid = getgid(); my (undef,undef,$bkpuid,$bkpgid) = getpwnam('backuppc'); setuid($bkpuid) if ($uid ne $bkpuid); setgid($bkpgid) if ($gid ne $bkpgid); my $bpc = BackupPC::Lib->new(); my $mainConf = $bpc->ConfigDataRead(); my $json = {}; if ( $host ) { my $hostConf = $bpc->ConfigDataRead($host); my $conf = { %$mainConf, %$hostConf }; my $age = -1; $json = { bkp => 0, last_age => 0, errors => 0, new_size => 0, new_size_avg => 0, new_size_median => 0, new_size_q1 => 0, new_size_q3 => 0 }; my $lastXferErrors = 0; my $maxErrors = 0; my $new_size_of_last_full = 0; my @bpc_info = $bpc->BackupInfoRead($host); my $sizes = new Statistics::Descriptive::Full; if ( scalar( @bpc_info ) ){ foreach my $backup ( @bpc_info ) { # Skip partial or active backups next if ( $backup->{type} !~ m/^full|incr$/ ); if ( $backup->{type} eq "full" ) { $json->{full_size} = $backup->{size}; $new_size_of_last_full = $backup->{sizeNew}; } # Push all the sizes in our data set to compute avg sizes # Exclude backup N°0 as it'll always have much more new data than normal backups $sizes->add_data($backup->{sizeNew}) unless ( $backup->{num} == 0 ); $json->{bkp}++; } # Ignore the last backup if it's not full or incr (which means it's either partial or active) my $i = ( $bpc_info[-1]->{type} =~ m/^full|incr$/ ) ? -1 : -2; $json->{errors} = $bpc_info[$i]->{xferErrs}; $json->{new_size} = $bpc_info[$i]->{sizeNew}; $json->{new_size_avg} = int $sizes->mean; $json->{new_size_median} = int $sizes->median; # Some old versions of Statistics::Descriptive (eg, on el5) do not support quantile $json->{new_size_q1} = eval { int $sizes->quantile(1) } || 0; $json->{new_size_q3} = eval { int $sizes->quantile(3) } || 0; $json->{age} = time - $bpc_info[$i]->{startTime}; $json->{last_age} = sprintf("%.1f", ($json->{age}) / 84600); $json->{max_errors} = $conf->{MaxXferError} || 0; } } else { print<<"EOF"; Usage: $0 --host= EOF } # Print results print("\n----------------\n"); print("Last Backup : $json->{last_age}"); print("\n"); print("Errors : $json->{errors}"); print("\n"); print("Size Consistency : "); # TOO BIG ? my $toobig = "true"; if ( $json->{new_size} > $json->{new_size_q3} + $json->{new_size_q3} - $json->{new_size_q1} * 1.5 && $json->{new_size} > $json->{new_size_avg} * 6 ) { $toobig = "false"; } else { $toobig = "true"; } # TOO SMALL ? my $toosmall = "true"; if ( $json->{new_size} < $json->{new_size_q1} - $json->{new_size_q3} - $json->{new_size_q1} * 1.5 && $json->{new_size} < $json->{new_size_avg} / 3 ) { $toosmall = "false"; } else { $toosmall = "true"; } # Print result if ( $toobig == "false" && $toosmall == "false" ) { print("Normal"); } else { print("ANOMALOUS"); } print("\n"); print("Random file : .........."); print("\n----------------\n"); exit(0);