summaryrefslogtreecommitdiff
path: root/files
diff options
context:
space:
mode:
Diffstat (limited to 'files')
-rwxr-xr-xfiles/checkbackups.pl194
-rw-r--r--files/nagios_plugins/duplicity/README.md24
-rw-r--r--files/nagios_plugins/duplicity/backupninja_duplicity_freshness.sh268
-rw-r--r--files/nagios_plugins/duplicity/check_backupninja_duplicity.py123
4 files changed, 0 insertions, 609 deletions
diff --git a/files/checkbackups.pl b/files/checkbackups.pl
deleted file mode 100755
index 39914469..00000000
--- a/files/checkbackups.pl
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/perl -w
-
-# This script is designed to check a backup directory populated with
-# subdirectories named after hosts, within which there are backups of various
-# types.
-#
-# Example:
-# /home/backup:
-# foo.example.com
-#
-# foo.example.com:
-# rdiff-backup .ssh
-#
-# rdiff-backup:
-# root home rdiff-backup-data usr var
-#
-# There are heuristics to determine the backup type. Currently, the following
-# types are supported:
-#
-# rdiff-backup: assumes there is a rdiff-backup/rdiff-backup-data/backup.log file
-# duplicity: assumes there is a dup subdirectory, checks the latest file
-# dump files: assumes there is a dump subdirectory, checks the latest file
-#
-# This script returns output suitable for send_nsca to send the results to
-# nagios and should therefore be used like this:
-#
-# checkbackups.sh | send_nsca -H nagios.example.com
-
-use Getopt::Std;
-
-# XXX: taken from utils.sh from nagios-plugins-basic
-my $STATE_OK=0;
-my $STATE_WARNING=1;
-my $STATE_CRITICAL=2;
-my $STATE_UNKNOWN=3;
-my $STATE_DEPENDENT=4;
-my %ERRORS=(0=>'OK',1=>'WARNING',2=>'CRITICAL',3=>'UNKNOWN',4=>'DEPENDENT');
-
-# gross hack: we look into subdirs to find vservers
-my @vserver_dirs = qw{/var/lib/vservers /vservers};
-
-our $opt_d = "/backup";
-our $opt_c = 48 * 60 * 60;
-our $opt_w = 24 * 60 * 60;
-our $opt_v = 0;
-our $opt_o;
-our $opt_s;
-
-if (!getopts('d:c:w:s:vo')) {
- print <<EOF
-Usage: $0 [ -d <backupdir> ] [ -c <threshold> ] [ -w <threshold> ] [ -o ] [ -s <host> ] [ -v ]
-EOF
- ;
- exit();
-}
-
-sub check_rdiff {
- my ($host, $dir, $optv) = @_;
- my $flag="$dir/rdiff-backup-data/backup.log";
- my $extra_msg = '';
- my @vservers;
- if (open(FLAG, $flag)) {
- while (<FLAG>) {
- if (/EndTime ([0-9]*).[0-9]* \((.*)\)/) {
- $last_bak = $1;
- $extra_msg = ' [backup.log]';
- $opt_v && print STDERR "found timestamp $1 ($2) in $flag\n";
- }
- }
- if (!$last_bak) {
- print_status($host, $STATE_UNKNOWN, "cannot parse $flag for a valid timestamp");
- next;
- }
- } else {
- $opt_v && print STDERR "cannot open $flag\n";
- }
- close(FLAG);
- ($state, $delta) = check_age($last_bak);
- $dir =~ /([^\/]+)\/?$/;
- $service = "backups-$1";
- print_status($host, $state, "$delta hours old$extra_msg", $service);
- foreach my $vserver_dir (@vserver_dirs) {
- $vsdir = "$dir/$vserver_dir";
- if (opendir(DIR, $vsdir)) {
- @vservers = grep { /^[^\.]/ && -d "$vsdir/$_" } readdir(DIR);
- $opt_v && print STDERR "found vservers $vsdir: @vservers\n";
- closedir DIR;
- } else {
- $opt_v && print STDERR "no vserver in $vsdir\n";
- }
- }
- my @dom_sufx = split(/\./, $host);
- my $dom_sufx = join('.', @dom_sufx[1,-1]);
- foreach my $vserver (@vservers) {
- print_status("$vserver.$dom_sufx", $state, "$delta hours old$extra_msg, same as parent: $host");
- }
-}
-
-sub check_age {
- my ($last_bak) = @_;
- my $t = time();
- my $delta = $t - $last_bak;
- if ($delta > $opt_c) {
- $state = $STATE_CRITICAL;
- } elsif ($delta > $opt_w) {
- $state = $STATE_WARNING;
- } elsif ($delta >= 0) {
- $state = $STATE_OK;
- }
- $delta = sprintf '%.2f', $delta/3600.0;
- return ($state, $delta);
-}
-
-sub print_status {
- my ($host, $state, $message, $service) = @_;
- my $state_msg = $ERRORS{$state};
- if (!$service) {
- $service = 'backups';
- }
- $line = "$host\t$service\t$state\t$state_msg $message\n";
- if ($opt_s) {
- $opt_v && print STDERR "sending results to nagios...\n";
- open(NSCA, "|/usr/sbin/send_nsca -H $opt_s") or die("cannot start send_nsca: $!\n");
- print NSCA $line;
- close(NSCA) or warn("could not close send_nsca pipe correctly: $!\n");
- }
- if (!$opt_s || $opt_v) {
- printf $line;
- }
-}
-
-sub check_flag {
- my ($host, $flag) = @_;
- my @stats = stat($flag);
- if (not @stats) {
- print_status($host, $STATE_UNKNOWN, "cannot stat flag $flag");
- }
- else {
- ($state, $delta) = check_age($stats[9]);
- print_status($host, $state, "$delta hours old");
- }
-}
-
-my $backupdir= $opt_d;
-
-my @hosts;
-if (defined($opt_o)) {
- @hosts=qx{hostname -f};
-} else {
- # XXX: this should be a complete backup registry instead
- @hosts=qx{ls $backupdir | grep -v lost+found};
-}
-
-chdir($backupdir);
-my ($delta, $state, $host);
-foreach $host (@hosts) {
- chomp($host);
- if ($opt_o) {
- $dir = $backupdir;
- } else {
- $dir = $host;
- }
- my $flag;
- if (-d $dir) {
- # guess the backup type and find a proper stamp file to compare
- @rdiffs = glob("$dir/*/rdiff-backup-data");
- foreach $subdir (@rdiffs) {
- $subdir =~ s/rdiff-backup-data$//;
- $opt_v && print STDERR "inspecting dir $subdir\n";
- check_rdiff($host, $subdir, $opt_v);
- $flag = 1;
- }
- if (-d "$dir/dump") {
- # XXX: this doesn't check backup consistency
- $flag="$dir/dump/" . `ls -tr $dir/dump | tail -1`;
- chomp($flag);
- check_flag($host, $flag);
- } elsif (-d "$dir/dup") {
- # XXX: this doesn't check backup consistency
- $flag="$dir/dup/" . `ls -tr $dir/dup | tail -1`;
- chomp($flag);
- check_flag($host, $flag);
- } elsif (-r "$dir/rsync.log") {
- # XXX: this doesn't check backup consistency
- $flag="$dir/rsync.log";
- check_flag($host, $flag);
- }
- if (!$flag) {
- print_status($host, $STATE_UNKNOWN, 'unknown system');
- }
- } else {
- print_status($host, $STATE_UNKNOWN, 'no directory');
- }
-}
diff --git a/files/nagios_plugins/duplicity/README.md b/files/nagios_plugins/duplicity/README.md
deleted file mode 100644
index 1cd349af..00000000
--- a/files/nagios_plugins/duplicity/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-duplicity-backup-status
-=======================
-
-Backupninja generates duplicity configfiles, this nagios plugin can check their freshness. Currently only the config files generated by backupninja can be parsed and we depend on that.
-
-## Prerequisites
-
-Make sure you have python-argparse installed (yes an extra dependency, getopt doubles the amount of code, so I gave up on that). The Python script will look for the duplicity_freshness.sh shell script in /usr/local/lib/nagios/plugins/ or /usr/lib/nagios/plugins/ make sure you copy it there and make executable.
-
-## Getting started
-
-Run the python script from your nagios. Don't forget to specify some extras like when warnings or criticalities should be emerged.
-
-- -w WARNINC Number of hours allowed for incremential backup warning level default 28
-- -W WARNFULL Number of hours allowed for incremential backup critical level default 40
-- -c CRITINC Number of days allowed for full backup warning level default 52
-- -C CRITFULL Number of days allowed for full backup critical level default 60
-
-
-## TODO:
-
-- make it cuter, tidy up
-- make it more robust
-- support other config backends as backupninja - this can be done by writing more scripts like backupninja_duplicity_freshness.sh and parsing an extra parameter
diff --git a/files/nagios_plugins/duplicity/backupninja_duplicity_freshness.sh b/files/nagios_plugins/duplicity/backupninja_duplicity_freshness.sh
deleted file mode 100644
index 7af2bf7f..00000000
--- a/files/nagios_plugins/duplicity/backupninja_duplicity_freshness.sh
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/bin/bash
-# -*- mode: sh; sh-basic-offset: 3; indent-tabs-mode: nil; -*-
-# vim: set filetype=sh sw=3 sts=3 expandtab autoindent:
-
-# Load backupninja library/helpers, because why reinventing the wheel? [Because my wheels weren't round]
-# some duplication is to be expected
-# this is only supposed to work with duplicity
-
-## Functions
-# simple lowercase function
-function tolower() {
- echo "$1" | tr '[:upper:]' '[:lower:]'
-}
-
-# we grab the current time once, since processing
-# all the configs might take more than an hour.
-nowtime=`LC_ALL=C date +%H`
-nowday=`LC_ALL=C date +%d`
-nowdayofweek=`LC_ALL=C date +%A`
-nowdayofweek=`tolower "$nowdayofweek"`
-
-conffile="/etc/backupninja.conf"
-
-# find $libdirectory
-libdirectory=`grep '^libdirectory' $conffile | /usr/bin/awk '{print $3}'`
-if [ -z "$libdirectory" ]; then
- if [ -d "/usr/lib/backupninja" ]; then
- libdirectory="/usr/lib/backupninja"
- else
- echo "Could not find entry 'libdirectory' in $conffile."
- fatal "Could not find entry 'libdirectory' in $conffile."
- fi
-else
- if [ ! -d "$libdirectory" ]; then
- echo "Lib directory $libdirectory not found."
- fatal "Lib directory $libdirectory not found."
- fi
-fi
-
-. $libdirectory/tools
-
-setfile $conffile
-
-# get global config options (second param is the default)
-getconf configdirectory /etc/backup.d
-getconf scriptdirectory /usr/share/backupninja
-getconf reportdirectory
-getconf reportemail
-getconf reporthost
-getconf reportspace
-getconf reportsuccess yes
-getconf reportinfo no
-getconf reportuser
-getconf reportwarning yes
-getconf loglevel 3
-getconf when "Everyday at 01:00"
-defaultwhen=$when
-getconf logfile /var/log/backupninja.log
-getconf usecolors "yes"
-getconf SLAPCAT /usr/sbin/slapcat
-getconf LDAPSEARCH /usr/bin/ldapsearch
-getconf RDIFFBACKUP /usr/bin/rdiff-backup
-getconf CSTREAM /usr/bin/cstream
-getconf MYSQLADMIN /usr/bin/mysqladmin
-getconf MYSQL /usr/bin/mysql
-getconf MYSQLHOTCOPY /usr/bin/mysqlhotcopy
-getconf MYSQLDUMP /usr/bin/mysqldump
-getconf PGSQLDUMP /usr/bin/pg_dump
-getconf PGSQLDUMPALL /usr/bin/pg_dumpall
-getconf PGSQLUSER postgres
-getconf GZIP /bin/gzip
-getconf GZIP_OPTS --rsyncable
-getconf RSYNC /usr/bin/rsync
-getconf admingroup root
-
-if [ ! -d "$configdirectory" ]; then
- echo "Configuration directory '$configdirectory' not found."
- fatal "Configuration directory '$configdirectory' not found."
-fi
-
-# get the duplicity configuration
-function get_dupconf(){
- setfile $1
- getconf options
- getconf testconnect yes
- getconf nicelevel 0
- getconf tmpdir
-
- setsection gpg
- getconf password
- getconf sign no
- getconf encryptkey
- getconf signkey
-
- setsection source
- getconf include
- getconf vsnames all
- getconf vsinclude
- getconf exclude
-
- setsection dest
- getconf incremental yes
- getconf increments 30
- getconf keep 60
- getconf keepincroffulls all
- getconf desturl
- getconf awsaccesskeyid
- getconf awssecretaccesskey
- getconf cfusername
- getconf cfapikey
- getconf cfauthurl
- getconf ftp_password
- getconf sshoptions
- getconf bandwidthlimit 0
- getconf desthost
- getconf destdir
- getconf destuser
- destdir=${destdir%/}
-}
-
-### some voodoo to mangle the correct commands
-
-function mangle_cli(){
-
- execstr_options="$options "
- execstr_source=
- if [ -n "$desturl" ]; then
- [ -z "$destuser" ] || warning 'the configured destuser is ignored since desturl is set'
- [ -z "$desthost" ] || warning 'the configured desthost is ignored since desturl is set'
- [ -z "$destdir" ] || warning 'the configured destdir is ignored since desturl is set'
- execstr_serverpart="$desturl"
- else
- execstr_serverpart="scp://$destuser@$desthost/$destdir"
- fi
-
-
- ### Symmetric or asymmetric (public/private key pair) encryption
- if [ -n "$encryptkey" ]; then
- execstr_options="${execstr_options} --encrypt-key $encryptkey"
- fi
-
- ### Data signing (or not)
- if [ "$sign" == yes ]; then
- # duplicity is not able to sign data when using symmetric encryption
- [ -n "$encryptkey" ] || fatal "The encryptkey option must be set when signing."
- # if needed, initialize signkey to a value that is not empty (checked above)
- [ -n "$signkey" ] || signkey="$encryptkey"
- execstr_options="${execstr_options} --sign-key $signkey"
- fi
-
- ### Temporary directory
- precmd=
- if [ -n "$tmpdir" ]; then
- if [ ! -d "$tmpdir" ]; then
- #info "Temporary directory ($tmpdir) does not exist, creating it."
- mkdir -p "$tmpdir"
- [ $? -eq 0 ] || fatal "Could not create temporary directory ($tmpdir)."
- chmod 0700 "$tmpdir"
- fi
- #info "Using $tmpdir as TMPDIR"
- precmd="${precmd}TMPDIR=$tmpdir "
- fi
-
- ### Source
-
- set -o noglob
-
- # excludes
- SAVEIFS=$IFS
- IFS=$(echo -en "\n\b")
- for i in $exclude; do
- str="${i//__star__/*}"
- execstr_source="${execstr_source} --exclude '$str'"
- done
- IFS=$SAVEIFS
-
- # includes
- SAVEIFS=$IFS
- IFS=$(echo -en "\n\b")
- for i in $include; do
- [ "$i" != "/" ] || fatal "Sorry, you cannot use 'include = /'"
- str="${i//__star__/*}"
- execstr_source="${execstr_source} --include '$str'"
- done
- IFS=$SAVEIFS
-
- set +o noglob
-
- execstr_options="${execstr_options} --ssh-options '$sshoptions'"
- if [ "$bandwidthlimit" != 0 ]; then
- [ -z "$desturl" ] || warning 'The bandwidthlimit option is not used when desturl is set.'
- execstr_precmd="trickle -s -d $bandwidthlimit -u $bandwidthlimit"
- fi
-}
-
-#function findlastdates(){
-# outputfile=$1
-# lastfull=0
-# lastinc=0
-# backuptime=0
-#
-# while read line; do
-# atime=0
-# arr=()
-# sort=''
-# test=$(echo $line|awk '{if (NF == 7); if ($1 == "Full" || $1 == "Incremental") {print $4, $3, $6, $5}}' )
-#
-# if [ -n "$test" ]; then
-# backuptime=$(date -u -d "$test" +%s)
-#
-# arr=($(echo $line|awk '{print $1, $2, $3, $4, $5, $6}'))
-# if [ ${arr[0]} == "Incremental" ] && [ "$lastinc" -lt "$backuptime" ] ; then
-# lastinc=$backuptime
-# elif [ ${arr[0]} == "Full" ] && [ "$lastfull" -lt "$backuptime" ] ; then
-# lastfull=$backuptime
-# fi
-#
-# fi
-#
-# done < $outputfile
-# # a full backup can be seen as incremental too
-# lastinc=$(echo $lastinc | awk 'max=="" || $1 > max {max=$1} END{ print max}')
-#}
-
-function check_status() {
- grep -q 'No orphaned or incomplete backup sets found.' $1
- if [ $? -ne 0 ] ; then
- exit 2
- fi
-}
-
-##
-## this function handles the freshness check of a backup action
-##
-
-function process_action() {
- local file="$1"
- local suffix="$2"
- setfile $file
- get_dupconf $1
- mangle_cli
-
- outputfile=`maketemp backupout`
- export PASSPHRASE=$password
- export FTP_PASSWORD=$ftp_password
- output=` su -c \
- "$execstr_precmd duplicity $execstr_options collection-status $execstr_serverpart >$outputfile 2>&1"`
- exit_code=$?
- echo -n $outputfile
-
- #check_status
- #findlastdates
-}
-
-files=`find $configdirectory -follow -mindepth 1 -maxdepth 1 -type f ! -name '.*.swp' | sort -n`
-
-for file in $files; do
- [ -f "$file" ] || continue
- suffix="${file##*.}"
- base=`basename $file`
- if [ "${base:0:1}" == "0" -o "$suffix" == "disabled" ]; then
- continue
- fi
- if [ -e "$scriptdirectory/$suffix" -a "$suffix" == "dup" ]; then
- process_action $file $suffix
- fi
-done
-
diff --git a/files/nagios_plugins/duplicity/check_backupninja_duplicity.py b/files/nagios_plugins/duplicity/check_backupninja_duplicity.py
deleted file mode 100644
index 8ed9ce68..00000000
--- a/files/nagios_plugins/duplicity/check_backupninja_duplicity.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python
-
-# Inspired by Arne Schwabe <arne-nagios@rfc2549.org> [with BSD license]
-# Inspired by backupninja [that's gpl some version]
-# minor changes by someon who doesn't understand all the license quirks
-
-from subprocess import Popen,PIPE
-import sys
-import time
-import os
-import argparse
-import getopt
-
-def main():
- # getopt = much more writing
- parser = argparse.ArgumentParser(description='Nagios Duplicity status checker')
-
- parser.add_argument("-w", dest="warninc", default=28, type=int,
- help="Number of hours allowed for incremential backup warning level, default 28")
- parser.add_argument("-W", dest="warnfull", default=31, type=int,
- help="Number of days allowed for full backup warning level, default 31")
- parser.add_argument("-c", dest="critinc", default=52, type=int,
- help="Number of hours allowed for incremential backup critical level, default 52")
- parser.add_argument("-C", dest="critfull", default=33, type=int,
- help="Number of days allowed for full backup critical level, default 33")
- args = parser.parse_args()
-
- okay = 0
-
- # *sigh* check_output is from python 2.7 and onwards. Debian, upgrade yourself.
- #output , err = check_output(['/root/freshness.sh'])
-
- if os.path.isfile("/usr/lib/nagios/plugins/backupninja_duplicity_freshness.sh") and os.access("/usr/lib/nagios/plugins/backupninja_duplicity_freshness.sh", os.X_OK):
- checkstatus, err = Popen(['/bin/bash', '/usr/lib/nagios/plugins/backupninja_duplicity_freshness.sh'], stdout=PIPE, stderr=PIPE, env={'HOME': '/root', 'PATH': os.environ['PATH']}).communicate()
- elif os.path.isfile("/usr/local/lib/nagios/plugins/backupninja_duplicity_freshness.sh") and os.access("/usr/local/lib/nagios/plugins/backupninja_duplicity_freshness.sh", os.X_OK):
- checkstatus, err = Popen(['/bin/bash', '/usr/local/lib/nagios/plugins/backupninja_duplicity_freshness.sh'], stdout=PIPE, stderr=PIPE, env={'HOME': '/root', 'PATH': os.environ['PATH']}).communicate()
-
- # Don't use exec(), popen(), etc. to execute external commands without explicity using the full path of the external program. Hijacked search path could be problematic.
- #checkstatus, err = Popen(['/bin/bash', './freshness.sh'], stdout=PIPE, stderr=PIPE, env={'HOME': '/root', 'PATH': os.environ['PATH']}).communicate()
-
- #another sigh: Debian testing, upgrade yourself, this is only needed because Debian testing uses duplicity 0.6.18-3
- # open file read/write
- f = open (checkstatus,"r")
- checklines = f.readlines()
- f.close()
-
- # remove the line that says Import of duplicity.backends.giobackend Failed: No module named gio
- f = open(checkstatus,"w")
- for line in checklines:
- if not 'Import of duplicity.backends.giobackend Failed: No module named gio' in line:
- f.write(line)
- f.close()
-
- output = open(checkstatus).read()
-
- lastfull, lastinc = findlastdates(output)
-
- sincelastfull = time.time() - lastfull
- sincelastinc = time.time() - lastinc
-
- msg = "OK: "
-
- if sincelastfull > (args.warnfull * 24 * 3600) or sincelastinc > (args.warninc * 3600):
- okay = 1
- msg = "WARNING: "
- if sincelastfull > (args.critfull * 24 * 3600) or sincelastinc > (args.critinc * 3600):
- okay = 2
- msg = "CRITICAL: "
- if not checkoutput(output):
- okay = max(okay,1)
- msg = "WARNING: duplicity output: %s " % repr(output)
- if err:
- okay=2
- msg = "Unexpected output: %s, " % repr(err)
-
- print msg, "last full %s ago, last incremential %s ago|lastfull=%d, lastinc=%d" % ( formattime(sincelastfull), formattime(sincelastinc), sincelastfull, sincelastinc)
-
- #clean up cruft
- os.remove(checkstatus)
- sys.exit(okay)
-
-def checkoutput(output):
- if not 'No orphaned or incomplete backup sets found.' in output:
- return False
-
- return True
-
-def formattime(seconds):
- days = seconds / (3600 * 24)
- hours = seconds / 3600 % 24
-
- if days:
- return "%d days %d hours" % (days,hours)
- else:
- return "%d hours" % hours
-
-
-def findlastdates(output):
- lastfull = 0
- lastinc = 0
-
- for line in output.split("\n"):
- parts = line.split()
-
- # ['Incremental', 'Sun', 'Oct', '31', '03:00:04', '2010', '1']
- if len (parts) == 7 and parts[0] in ["Full","Incremental"]:
- foo = time.strptime(" ".join(parts[1:6]),"%a %b %d %H:%M:%S %Y")
-
- backuptime = time.mktime(foo)
-
- if parts[0] == "Incremental" and lastinc < backuptime:
- lastinc = backuptime
- elif parts[0] == "Full" and lastfull < backuptime:
- lastfull = backuptime
-
-
- # Count a full backup as incremental backup
- lastinc = max(lastfull,lastinc)
- return (lastfull, lastinc)
-
-
-if __name__=='__main__':
- main()