diff --git a/.gitignore b/.gitignore
index 4e9c74d..e5c18f6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,7 +8,6 @@ doc/man/*.html
doc/man/*.htm
doc/man/*.texi
doc/man/*.man
-test/*
.*.swp
doc/man/*.[0-9]
doc/*.xml
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..5391f4d
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,12 @@
+stages:
+ - test
+
+unit_tests:
+ stage: test
+ script:
+ - make test
+
+shellcheck:
+ stage: test
+ script:
+ - make shellcheck
diff --git a/Makefile b/Makefile
index 82bab0e..789e099 100644
--- a/Makefile
+++ b/Makefile
@@ -22,14 +22,14 @@
#
INSTALL=install
-CCOLLECT_SOURCE=ccollect.sh
-CCOLLECT_DEST=ccollect.sh
+CCOLLECT_SOURCE=ccollect
+CCOLLECT_DEST=ccollect
LN=ln -sf
ASCIIDOC=asciidoc
DOCBOOKTOTEXI=docbook2x-texi
DOCBOOKTOMAN=docbook2x-man
XSLTPROC=xsltproc
-XSL=/usr/share/xml/docbook/stylesheet/nwalsh/html/docbook.xsl
+XSL=/usr/local/share/xsl/docbook/html/docbook.xsl
A2X=a2x
prefix=/usr/packages/ccollect-git
@@ -41,11 +41,7 @@ manlink=/usr/local/man/man1
path_dir=/usr/local/bin
path_destination=${path_dir}/${CCOLLECT_DEST}
-
-# where to publish
-host=localhost
-dir=/home/users/nico/privat/computer/net/netzseiten/www.nico.schottelius.org/src/software/ccollect
-docdir=${dir}/doc
+docs_archive_name=docs.tar
#
# Asciidoc will be used to generate other formats later
@@ -79,12 +75,7 @@ DOCBDOCS = ${DOCS:.text=.docbook}
DOC_ALL = ${HTMLDOCS} ${DBHTMLDOCS} ${TEXIDOCS} ${MANPDOCS} ${PDFDOCS}
-html: ${HTMLDOCS}
-htm: ${DBHTMLDOCS}
-info: ${TEXIDOCS}
-man: ${MANPDOCS}
-pdf: ${PDFDOCS}
-documentation: ${DOC_ALL}
+TEST_LOG_FILE = /tmp/ccollect/ccollect.log
#
# End user targets
@@ -96,6 +87,15 @@ all:
@echo "info: only generate Texinfo"
@echo "man: only generate manpage{s}"
@echo "install: install ccollect to ${prefix}"
+ @echo "shellcheck: shellcheck ccollect script"
+ @echo "test: run unit tests"
+
+html: ${HTMLDOCS}
+htm: ${DBHTMLDOCS}
+info: ${TEXIDOCS}
+man: ${MANPDOCS}
+pdf: ${PDFDOCS}
+documentation: ${DOC_ALL}
install: install-link install-manlink
@@ -116,21 +116,28 @@ install-manlink: install-man
#
# Tools
#
-TOOLS=ccollect_add_source.sh \
- ccollect_analyse_logs.sh \
- ccollect_delete_source.sh \
- ccollect_list_intervals.sh \
- ccollect_logwrapper.sh \
- ccollect_list_intervals.sh
+TOOLS2=ccollect_add_source
+TOOLS2 += ccollect_analyse_logs
-TOOLSMAN1 = $(subst ccollect,doc/man/ccollect,$(TOOLS))
-TOOLSMAN = $(subst .sh,.text,$(TOOLSMAN1))
+TOOLS=ccollect_add_source \
+ ccollect_analyse_logs \
+ ccollect_delete_source \
+ ccollect_list_intervals \
+ ccollect_logwrapper \
+ ccollect_list_intervals
+
+# Stick to posix
+TOOLSMAN1 = $(TOOLS:ccollect=doc/man/ccollect)
+TOOLSMAN = $(TOOLSMAN1:=.text)
TOOLSFP = $(subst ccollect,tools/ccollect,$(TOOLS))
-#t2: $(TOOLSMAN)
+## FIXME: posix make: shell? =>
+
t2:
- echo $(TOOLS) - $(TOOLSMAN) - $(TOOLSFP)
+ echo $(TOOLS) - $(TOOLSFP)
+ echo $(TOOLSMAN)
+ echo $(TOOLSFP)
# docbook gets .htm, asciidoc directly .html
@@ -166,13 +173,13 @@ t2:
#
# Developer targets
#
-update:
- @git push
+pub:
+ git push
publish-doc: documentation
- @echo "Transferring files to ${host}"
@chmod a+r ${DOCS} ${DOC_ALL}
- @tar c ${DOCS} ${DOC_ALL} | ssh ${host} "cd ${dir}; tar xv"
+ @tar cf ${docs_archive_name} ${DOCS} ${DOC_ALL}
+ @echo "Documentation files are in ${docs_archive_name}"
#
# Distribution
@@ -189,19 +196,55 @@ distclean: clean
#
dist: distclean documentation
-#test: ccollect.sh documentation
-test: ccollect.sh
+/tmp/ccollect:
mkdir -p /tmp/ccollect
- CCOLLECT_CONF=./conf ./ccollect.sh daily from-remote
- CCOLLECT_CONF=./conf ./ccollect.sh daily local
- CCOLLECT_CONF=./conf ./ccollect.sh daily "local-with&ersand"
- CCOLLECT_CONF=./conf ./ccollect.sh daily source-without-destination
- CCOLLECT_CONF=./conf ./ccollect.sh daily "source with spaces and interval"
- CCOLLECT_CONF=./conf ./ccollect.sh daily to-remote
- CCOLLECT_CONF=./conf ./ccollect.sh daily with_exec
- CCOLLECT_CONF=./conf ./ccollect.sh daily very_verbose
+
+shellcheck: ./ccollect
+ shellcheck -s sh -f gcc -x ./ccollect
+
+test-nico: $(CCOLLECT_SOURCE) /tmp/ccollect
+ cd ./conf/sources/; for s in *; do CCOLLECT_CONF=../ ../../ccollect daily "$$s"; done
touch /tmp/ccollect/$$(ls /tmp/ccollect | head -n1).ccollect-marker
- CCOLLECT_CONF=./conf ./ccollect.sh daily delete_incomplete
- CCOLLECT_CONF=./conf ./ccollect.sh daily no-source-must-fail
-# for s in $$(ls ./conf/sources); do CCOLLECT_CONF=./conf echo ./ccollect.sh daily $$s; done
-# CCOLLECT_CONF=./conf ./ccollect.sh -a daily
+ CCOLLECT_CONF=./conf ./ccollect -a daily
+ touch /tmp/ccollect/$$(ls /tmp/ccollect | head -n1).ccollect-marker
+ CCOLLECT_CONF=./conf ./ccollect -a -p daily
+
+test-dir-source:
+ mkdir -p /tmp/ccollect/source
+ cp -R -f ./* /tmp/ccollect/source
+
+test-dir-destination:
+ mkdir -p /tmp/ccollect/backup
+
+test-dir-destination-chint:
+ mkdir -p /tmp/ccollect/backup-chint
+
+test-fixed-intervals: $(CCOLLECT_SOURCE) test-dir-source test-dir-destination test-dir-destination-chint
+ for s in ./test/conf/sources/*; do \
+ CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} daily "$$(basename "$$s")"; \
+ test "$$(ls -1 /tmp/ccollect/backup | wc -l)" -gt "0" || { cat ${TEST_LOG_FILE}; exit 1; }; \
+ done
+ CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} -a -v daily
+ test "$$(ls -1 /tmp/ccollect/backup | wc -l)" -gt "0" || { cat ${TEST_LOG_FILE}; exit 1; }
+ CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} -a -p daily
+ test "$$(ls -1 /tmp/ccollect/backup | wc -l)" -gt "0" || { cat ${TEST_LOG_FILE}; exit 1; }
+ @printf "\nFixed intervals test ended successfully\n"
+
+test-interval-changing: $(CCOLLECT_SOURCE) test-dir-source test-dir-destination-chint
+ rm -rf /tmp/ccollect/backup-chint/*
+ test "$$(ls -1 /tmp/ccollect/backup-chint | wc -l)" -eq "0" || { cat ${TEST_LOG_FILE}; exit 1; }
+ printf "3" > ./test/conf/sources/local-with-interval/intervals/daily
+ for x in 1 2 3 4 5; do CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} daily local-with-interval; done
+ test "$$(ls -1 /tmp/ccollect/backup-chint | wc -l)" -eq "3" || { cat ${TEST_LOG_FILE}; exit 1; }
+ printf "5" > ./test/conf/sources/local-with-interval/intervals/daily
+ for x in 1 2 3 4 5 6 7; do CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} daily local-with-interval; done
+ test "$$(ls -1 /tmp/ccollect/backup-chint | wc -l)" -eq "5" || { cat ${TEST_LOG_FILE}; exit 1; }
+ printf "4" > ./test/conf/sources/local-with-interval/intervals/daily
+ for x in 1 2 3 4 5 6; do CCOLLECT_CONF=./test/conf ./ccollect -l ${TEST_LOG_FILE} daily local-with-interval; done
+ test "$$(ls -1 /tmp/ccollect/backup-chint | wc -l)" -eq "4" || { cat ${TEST_LOG_FILE}; exit 1; }
+ printf "3" > ./test/conf/sources/local-with-interval/intervals/daily
+ @printf "\nInterval changing test ended successfully\n"
+
+test: test-fixed-intervals test-interval-changing
+ test -f "${TEST_LOG_FILE}"
+ @printf "\nTests ended successfully\n"
diff --git a/README b/README
index 558594d..1b5341d 100644
--- a/README
+++ b/README
@@ -7,7 +7,7 @@ ccollect backups (local or remote) data to local or remote destinations.
You can retrieve the latest version of ccollect at [0].
ccollect was inspired by rsnapshot [1], which has some problems:
- - configuration parameters has to be TAB seperated
+ - configuration parameters have to be TAB seperated
- you can not specify per source exclude lists
- no per source pre/post execution support
- no parallel execution
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..ce63297
--- /dev/null
+++ b/TODO
@@ -0,0 +1,26 @@
+Hinweis:
+ Zwei Quellen auf ein Ziel funktioniert nicht, da die beiden
+ Quellen von den unterschiedlichen Verzeichnissen linken.
+
+Listing:
+ .../* funktioniert nicht teilweise (abhängig von der Shell?)
+
+Isup-check: optional!
+
+line 318/check no_xxx => correct test?
+
+REMOVE ALL PCMD code
+
+Backup to remote can be done via ssh tunnel!
+
+
+% remote host: Allow backup host to access our sshd
+ ssh -R4242:localhost:22 backupserver "ccollect interval backupremotehost"
+
+ remove $destination (==ddir)
+
+--------------------------------------------------------------------------------
+
+Remote backups:
+
+ccollect_backup_to $host $remote_port $source $interval
diff --git a/ccollect b/ccollect
new file mode 100755
index 0000000..2deaa2d
--- /dev/null
+++ b/ccollect
@@ -0,0 +1,929 @@
+#!/bin/sh
+#
+# 2005-2013 Nico Schottelius (nico-ccollect at schottelius.org)
+# 2016-2019 Darko Poljak (darko.poljak at gmail.com)
+#
+# This file is part of ccollect.
+#
+# ccollect is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ccollect is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with ccollect. If not, see .
+#
+# Initially written for SyGroup (www.sygroup.ch)
+# Date: Mon Nov 14 11:45:11 CET 2005
+
+# Error upon expanding unset variables:
+set -u
+
+#
+# Standard variables (stolen from cconf)
+#
+__mydir="${0%/*}"
+__abs_mydir="$(cd "$__mydir" && pwd -P)"
+__myname=${0##*/}
+
+#
+# where to find our configuration and temporary file
+#
+CCOLLECT_CONF="${CCOLLECT_CONF:-/etc/ccollect}"
+CSOURCES="${CCOLLECT_CONF}/sources"
+CDEFAULTS="${CCOLLECT_CONF}/defaults"
+CPREEXEC="${CDEFAULTS}/pre_exec"
+CPOSTEXEC="${CDEFAULTS}/post_exec"
+CMARKER=".ccollect-marker"
+
+TMP="$(mktemp "/tmp/${__myname}.XXXXXX")"
+export TMP
+CONTROL_PIPE="/tmp/${__myname}-control-pipe"
+
+VERSION="2.10"
+RELEASE="2020-08-26"
+HALF_VERSION="ccollect ${VERSION}"
+FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
+
+#
+# CDATE: how we use it for naming of the archives
+# DDATE: how the user should see it in our output (DISPLAY)
+#
+CDATE="date +%Y%m%d-%H%M"
+DDATE="date +%Y-%m-%d-%H:%M:%S"
+SDATE="date +%s"
+
+#
+# LOCKING: use flock if available, otherwise mkdir
+# Locking is done for each source so that only one instance per source
+# can run.
+#
+# Use CCOLLECT_CONF directory for lock files.
+# This directory can be set arbitrary so it is writable for user
+# executing ccollect.
+LOCKDIR="${CCOLLECT_CONF}"
+# printf pattern: ccollect_.lock
+LOCKFILE_PATTERN="ccollect_%s.lock"
+LOCKFD=4
+
+#
+# locking functions using flock
+#
+lock_flock()
+{
+ # $1 = source to backup
+ # shellcheck disable=SC2059
+ lockfile="${LOCKDIR}/$(printf "${LOCKFILE_PATTERN}" "$1")"
+ eval "exec ${LOCKFD}> '${lockfile}'"
+
+ flock -n ${LOCKFD} && return 0 || return 1
+}
+
+unlock_flock()
+{
+ # $1 = source to backup
+ # shellcheck disable=SC2059
+ lockfile="${LOCKDIR}/$(printf "${LOCKFILE_PATTERN}" "$1")"
+ eval "exec ${LOCKFD}>&-"
+ rm -f "${lockfile}"
+}
+
+#
+# locking functions using mkdir (mkdir is atomic)
+#
+lock_mkdir()
+{
+ # $1 = source to backup
+ # shellcheck disable=SC2059
+ lockfile="${LOCKDIR}/$(printf "${LOCKFILE_PATTERN}" "$1")"
+
+ mkdir "${lockfile}" && return 0 || return 1
+}
+
+unlock_mkdir()
+{
+ # $1 = source to backup
+ # shellcheck disable=SC2059
+ lockfile="${LOCKDIR}/$(printf "${LOCKFILE_PATTERN}" "$1")"
+
+ rmdir "${lockfile}"
+}
+
+#
+# determine locking tool: flock or mkdir
+#
+if command -v flock > /dev/null 2>&1
+then
+ lockf="lock_flock"
+ unlockf="unlock_flock"
+else
+ lockf="lock_mkdir"
+ unlockf="unlock_mkdir"
+fi
+
+#
+# unset values
+#
+PARALLEL=""
+MAX_JOBS=""
+USE_ALL=""
+LOGFILE=""
+SYSLOG=""
+# e - only errors, a - all output
+LOGLEVEL="a"
+LOGONLYERRORS=""
+
+#
+# catch signals
+#
+TRAPFUNC="rm -f \"${TMP}\""
+# shellcheck disable=SC2064
+trap "${TRAPFUNC}" 1 2 15
+
+#
+# Functions
+#
+
+# check if we are running interactive or non-interactive
+# see: http://www.tldp.org/LDP/abs/html/intandnonint.html
+_is_interactive()
+{
+ [ -t 0 ] || [ -p /dev/stdin ]
+}
+
+#
+# ssh-"feature": we cannot do '... read ...; ssh ...; < file',
+# because ssh reads stdin! -n does not work -> does not ask for password
+# Also allow deletion for files without the given suffix
+#
+delete_from_file()
+{
+ file="$1"; shift
+ suffix="" # It will be set, if deleting incomplete backups.
+ [ $# -eq 1 ] && suffix="$1" && shift
+ # dirs for deletion will be moved to this trash dir inside destination dir
+ # - for fast mv operation
+ trash="$(mktemp -d ".trash.XXXXXX")"
+ while read -r to_remove; do
+ mv "${to_remove}" "${trash}" ||
+ _exit_err "Moving ${to_remove} to ${trash} failed."
+ set -- "$@" "${to_remove}"
+ if [ "${suffix}" ]; then
+ to_remove_no_suffix="$(echo "${to_remove}" | sed "s/$suffix\$//")"
+ mv "${to_remove_no_suffix}" "${trash}" ||
+ _exit_err "Moving ${to_remove_no_suffix} to ${trash} failed."
+ set -- "$@" "${to_remove_no_suffix}"
+ fi
+ done < "${file}"
+ _techo "Removing $* in ${trash}..."
+ empty_dir=".empty-dir"
+ mkdir "${empty_dir}" || _exit_err "Empty directory ${empty_dir} cannot be created."
+ [ "${VVERBOSE}" ] && echo "Starting: rsync -a --delete ${empty_dir} ${trash}"
+ # rsync needs ending slash for directory content
+ rsync -a --delete "${empty_dir}/" "${trash}/" || _exit_err "Removing $* failed."
+ rmdir "${trash}" || _exit_err "Removing ${trash} directory failed"
+ rmdir "${empty_dir}" || _exit_err "Removing ${empty_dir} directory failed"
+ _techo "Removing $* in ${trash} finished."
+}
+
+display_version()
+{
+ echo "${FULL_VERSION}"
+ exit 0
+}
+
+usage()
+{
+ cat << eof
+${__myname}: [args]
+
+ ccollect creates (pseudo) incremental backups
+
+ -h, --help: Show this help screen
+ -a, --all: Backup all sources specified in ${CSOURCES}
+ -e, --errors: Log only errors
+ -j [max], --jobs [max] Specifies the number of jobs to run simultaneously.
+ If max is not specified then parallelise all jobs.
+ -l FILE, --logfile FILE Log to specified file
+ -p, --parallel: Parallelise backup processes (deprecated from 2.0)
+ -s, --syslog: Log to syslog with tag ccollect
+ -v, --verbose: Be very verbose (uses set -x)
+ -V, --version: Print version information
+
+ This is version ${VERSION} released on ${RELEASE}.
+
+ Retrieve latest ccollect at http://www.nico.schottelius.org/software/ccollect/
+eof
+ exit 0
+}
+
+# locking functions
+lock()
+{
+ "${lockf}" "$@" || _exit_err \
+ "Only one instance of ${__myname} for source \"$1\" can run at one time."
+}
+
+unlock()
+{
+ "${unlockf}" "$@"
+}
+
+# time displaying echo
+# stdout version
+_techo_stdout()
+{
+ echo "$(${DDATE}): $*"
+}
+
+# syslog version
+_techo_syslog()
+{
+ logger -t ccollect "$@"
+}
+
+# specified file version
+_techo_file()
+{
+ _techo_stdout "$@" >> "${LOGFILE}"
+}
+
+# determine _techo version before parsing options
+if _is_interactive
+then
+ _techof="_techo_stdout"
+else
+ _techof="_techo_syslog"
+fi
+
+# _techo with determined _techo version
+_techo()
+{
+ if [ "${LOGLEVEL}" = "a" ]
+ then
+ # name is exported before calling this function
+ # shellcheck disable=SC2154
+ set -- ${name:+"[${name}]"} "$@"
+ "${_techof}" "$@"
+ fi
+}
+
+_techo_err()
+{
+ _techo "Error: $*"
+}
+
+_exit_err()
+{
+ _techo_err "$@"
+ rm -f "${TMP}"
+ exit 1
+}
+
+#
+# Parse options
+#
+while [ "$#" -ge 1 ]; do
+ case "$1" in
+ -a|--all)
+ USE_ALL=1
+ ;;
+ -p|--parallel)
+ _techo "Warning: -p, --parallel option is deprecated," \
+ "use -j, --jobs instead."
+ PARALLEL=1
+ MAX_JOBS=""
+ ;;
+ -j|--jobs)
+ PARALLEL=1
+ if [ "$#" -ge 2 ]
+ then
+ case "$2" in
+ -*)
+ ;;
+ *)
+ MAX_JOBS=$2
+ shift
+ ;;
+ esac
+ fi
+ ;;
+ -e|--errors)
+ LOGONLYERRORS="1"
+ ;;
+ -l|--logfile)
+ if [ "$#" -ge 2 ]
+ then
+ case "$2" in
+ -*)
+ _exit_err "Missing log file"
+ ;;
+ *)
+ LOGFILE="$2"
+ shift
+ ;;
+ esac
+ else
+ _exit_err "Missing log file"
+ fi
+ ;;
+ -s|--syslog)
+ SYSLOG="1"
+ ;;
+ -v|--verbose)
+ set -x
+ ;;
+ -V|--version)
+ display_version
+ ;;
+ --)
+ # ignore the -- itself
+ shift
+ break
+ ;;
+ -h|--help|-*)
+ usage
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+
+# determine _techo version and logging level after parsing options
+if [ "${LOGFILE}" ]
+then
+ _techof="_techo_file"
+ LOGLEVEL="a"
+elif _is_interactive
+then
+ if [ "${SYSLOG}" ]
+ then
+ _techof="_techo_syslog"
+ LOGLEVEL="a"
+ else
+ _techof="_techo_stdout"
+ LOGLEVEL="e"
+ fi
+else
+ _techof="_techo_syslog"
+ LOGLEVEL="a"
+fi
+
+if [ "${LOGFILE}" ] || [ "${SYSLOG}" ]
+then
+ if [ "${LOGONLYERRORS}" ]
+ then
+ LOGLEVEL="e"
+ fi
+fi
+
+# check that MAX_JOBS is natural number > 0
+# empty string means run all in parallel
+if ! echo "${MAX_JOBS}" | grep -q -E '^[1-9][0-9]*$|^$'
+then
+ _exit_err "Invalid max jobs value \"${MAX_JOBS}\""
+fi
+
+#
+# Setup interval
+#
+if [ $# -ge 1 ]; then
+ export INTERVAL="$1"
+ shift
+else
+ usage
+fi
+
+#
+# Check for configuraton directory
+#
+[ -d "${CCOLLECT_CONF}" ] || _exit_err "No configuration found in " \
+ "\"${CCOLLECT_CONF}\" (is \$CCOLLECT_CONF properly set?)"
+
+#
+# Create (portable!) source "array"
+#
+export no_sources=0
+
+if [ "${USE_ALL}" = 1 ]; then
+ #
+ # Get sources from source configuration
+ #
+ ( cd "${CSOURCES}" && ls -1 > "${TMP}" ) || \
+ _exit_err "Listing of sources failed. Aborting."
+
+ while read -r tmp; do
+ eval export "source_${no_sources}=\"${tmp}\""
+ no_sources=$((no_sources + 1))
+ done < "${TMP}"
+else
+ #
+ # Get sources from command line
+ #
+ while [ "$#" -ge 1 ]; do
+ eval "arg=\"\$1\""
+ shift
+
+ # arg is assigned in the eval above
+ # shellcheck disable=SC2154
+ eval export "source_${no_sources}=\"${arg}\""
+ no_sources="$((no_sources + 1))"
+ done
+fi
+
+#
+# Need at least ONE source to backup
+#
+if [ "${no_sources}" -lt 1 ]; then
+ usage
+else
+ _techo "${HALF_VERSION}: Beginning backup using interval ${INTERVAL}"
+fi
+
+#
+# Look for pre-exec command (general)
+#
+if [ -x "${CPREEXEC}" ]; then
+ _techo "Executing ${CPREEXEC} ..."
+ "${CPREEXEC}"; ret=$?
+ _techo "Finished ${CPREEXEC} (return code: ${ret})."
+
+ [ "${ret}" -eq 0 ] || _exit_err "${CPREEXEC} failed. Aborting"
+fi
+
+################################################################################
+#
+# Let's do the backup - here begins the real stuff
+#
+
+# in PARALLEL mode:
+# * create control pipe
+# * determine number of jobs to start at once
+if [ "${PARALLEL}" ]; then
+ mkfifo "${CONTROL_PIPE}"
+ # fd 5 is tied to control pipe
+ eval "exec 5<>'${CONTROL_PIPE}'"
+ TRAPFUNC="${TRAPFUNC}; rm -f \"${CONTROL_PIPE}\""
+ # shellcheck disable=SC2064
+ trap "${TRAPFUNC}" 0 1 2 15
+
+ # determine how much parallel jobs to prestart
+ if [ "${MAX_JOBS}" ]
+ then
+ if [ "${MAX_JOBS}" -le "${no_sources}" ]
+ then
+ prestart="${MAX_JOBS}"
+ else
+ prestart="${no_sources}"
+ fi
+ else
+ prestart=0
+ fi
+fi
+
+source_no=0
+while [ "${source_no}" -lt "${no_sources}" ]; do
+ #
+ # Get current source
+ #
+ eval export name=\"\$source_${source_no}\"
+ source_no=$((source_no + 1))
+
+ #
+ # Start ourself, if we want parallel execution
+ #
+ if [ "${PARALLEL}" ]; then
+ if [ ! "${MAX_JOBS}" ]
+ then
+ # run all in parallel
+ "$0" "${INTERVAL}" "${name}" &
+ continue
+ elif [ "${prestart}" -gt 0 ]
+ then
+ # run prestart child if pending
+ { "$0" "${INTERVAL}" "${name}"; printf '\n' >&5; } &
+ prestart=$((prestart - 1))
+ continue
+ else
+ # each time a child finishes we get a line from the pipe
+ # and then launch another child
+ while read -r line
+ do
+ { "$0" "${INTERVAL}" "${name}"; printf '\n' >&5; } &
+ # get out of loop so we can contnue with main loop
+ # for next source
+ break
+ done <&5
+ continue
+ fi
+ fi
+
+#
+# Start subshell for easy log editing
+#
+(
+ backup="${CSOURCES}/${name}"
+ c_source="${backup}/source"
+ c_dest="${backup}/destination"
+ c_pre_exec="${backup}/pre_exec"
+ c_post_exec="${backup}/post_exec"
+
+ #
+ # Stderr to stdout, so we can produce nice logs
+ #
+ exec 2>&1
+
+ #
+ # Record start of backup: internal and for the user
+ #
+ begin_s="$(${SDATE})"
+ _techo "Beginning to backup"
+
+ #
+ # Standard configuration checks
+ #
+ if [ ! -e "${backup}" ]; then
+ _exit_err "Source \"${backup}\" does not exist."
+ fi
+
+ #
+ # Configuration _must_ be a directory (cconfig style)
+ #
+ if [ ! -d "${backup}" ]; then
+ _exit_err "\"${backup}\" is not a cconfig-directory. Skipping."
+ fi
+
+ #
+ # Acquire lock for source. If lock cannot be acquired, lock will exit
+ # with error message.
+ #
+ lock "${name}"
+
+ # redefine trap to also unlock (rm lockfile)
+ TRAPFUNC="${TRAPFUNC}; unlock \"${name}\""
+ # shellcheck disable=SC2064
+ trap "${TRAPFUNC}" 1 2 15
+
+ #
+ # First execute pre_exec, which may generate destination or other parameters
+ #
+ if [ -x "${c_pre_exec}" ]; then
+ _techo "Executing ${c_pre_exec} ..."
+ "${c_pre_exec}"; ret="$?"
+ _techo "Finished ${c_pre_exec} (return code ${ret})."
+
+ [ "${ret}" -eq 0 ] || _exit_err "${c_pre_exec} failed. Skipping."
+ fi
+
+ #
+ # Read source configuration
+ #
+ for opt in verbose very_verbose summary exclude rsync_options \
+ delete_incomplete rsync_failure_codes \
+ mtime quiet_if_down ; do
+ if [ -f "${backup}/${opt}" ] || [ -f "${backup}/no_${opt}" ]; then
+ eval "c_$opt=\"${backup}/$opt\""
+ else
+ eval "c_$opt=\"${CDEFAULTS}/$opt\""
+ fi
+ done
+
+ #
+ # Interval definition: First try source specific, fallback to default
+ #
+ c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
+
+ if [ -z "${c_interval}" ]; then
+ c_interval="$(cat "${CDEFAULTS}/intervals/${INTERVAL}" 2>/dev/null)"
+
+ if [ -z "${c_interval}" ]; then
+ _exit_err "No definition for interval \"${INTERVAL}\" found. Skipping."
+ fi
+ fi
+
+ #
+ # Sort by ctime (default) or mtime (configuration option)
+ #
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_mtime}" ] ; then
+ TSORT="t"
+ else
+ TSORT="tc"
+ fi
+
+ #
+ # Source configuration checks
+ #
+ if [ ! -f "${c_source}" ]; then
+ _exit_err "Source description \"${c_source}\" is not a file. Skipping."
+ else
+ source=$(cat "${c_source}"); ret="$?"
+ if [ "${ret}" -ne 0 ]; then
+ _exit_err "Source ${c_source} is not readable. Skipping."
+ fi
+ fi
+
+ #
+ # Destination is a path
+ #
+ if [ ! -f "${c_dest}" ]; then
+ _exit_err "Destination ${c_dest} is not a file. Skipping."
+ else
+ ddir="$(cat "${c_dest}")"; ret="$?"
+ if [ "${ret}" -ne 0 ]; then
+ _exit_err "Destination ${c_dest} is not readable. Skipping."
+ fi
+ fi
+
+ #
+ # Parameters: ccollect defaults, configuration options, user options
+ #
+
+ #
+ # Rsync standard options (archive will be added after is-up-check)
+ #
+ set -- "$@" "--delete" "--numeric-ids" "--relative" \
+ "--delete-excluded" "--sparse"
+
+ #
+ # Exclude list
+ #
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_exclude}" ]; then
+ set -- "$@" "--exclude-from=${c_exclude}"
+ fi
+
+ #
+ # Output a summary
+ #
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_summary}" ]; then
+ set -- "$@" "--stats"
+ fi
+
+ #
+ # Verbosity for rsync, rm, and mkdir
+ #
+ VVERBOSE=""
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_very_verbose}" ]; then
+ set -- "$@" "-vv"
+ VVERBOSE="-v"
+ elif [ -f "${c_verbose}" ]; then
+ set -- "$@" "-v"
+ fi
+
+ #
+ # Extra options for rsync provided by the user
+ #
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_rsync_options}" ]; then
+ while read -r line; do
+ # Trim line.
+ ln=$(echo "${line}" | awk '{$1=$1;print;}')
+ # Only if ln is non zero length string.
+ #
+ # If ln is empty then rsync '' DEST evaluates
+ # to transfer current directory to DEST which would
+ # with specific options destroy DEST content.
+ if [ -n "${ln}" ]
+ then
+ set -- "$@" "${ln}"
+ fi
+ done < "${c_rsync_options}"
+ fi
+
+ #
+ # Check: source is up and accepting connections (before deleting old backups!)
+ #
+ if ! rsync "$@" "${source}" >/dev/null 2>"${TMP}" ; then
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ ! -f "${c_quiet_if_down}" ]; then
+ cat "${TMP}"
+ fi
+ _exit_err "Source ${source} is not readable. Skipping."
+ fi
+
+ #
+ # Add --archive for real backup (looks nice in front)
+ #
+ set -- "--archive" "$@"
+
+ #
+ # Check: destination exists?
+ #
+ cd "${ddir}" || _exit_err "Cannot change to ${ddir}. Skipping."
+
+ #
+ # Check incomplete backups (needs echo to remove newlines)
+ #
+ # shellcheck disable=SC2010
+ ls -1 | grep "${CMARKER}\$" > "${TMP}"; ret=$?
+
+ if [ "$ret" -eq 0 ]; then
+ _techo "Incomplete backups: $(cat "${TMP}")"
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "${c_delete_incomplete}" ]; then
+ delete_from_file "${TMP}" "${CMARKER}" &
+ fi
+ fi
+
+ #
+ # Include current time in name, not the time when we began to remove above
+ #
+ destination_name="${INTERVAL}.$(${CDATE}).$$-${source_no}"
+ export destination_name
+ destination_dir="${ddir}/${destination_name}"
+ export destination_dir
+
+ #
+ # Check: maximum number of backups is reached?
+ #
+ # shellcheck disable=SC2010
+ count="$(ls -1 | grep -c "^${INTERVAL}\\.")"
+
+ _techo "Existing backups: ${count} Total keeping backups: ${c_interval}"
+
+ if [ "${count}" -ge "${c_interval}" ]; then
+ # Use oldest directory as new backup destination directory.
+ # It need not to be deleted, rsync will sync its content.
+ # shellcheck disable=SC2010
+ oldest_bak=$(ls -${TSORT}1r | grep "^${INTERVAL}\\." | head -n 1 || \
+ _exit_err "Listing oldest backup failed")
+ _techo "Using ${oldest_bak} for destination dir ${destination_dir}"
+ if mv "${oldest_bak}" "${destination_dir}"; then
+ # Touch dest dir so it is not sorted wrong in listings below.
+ ls_rm_exclude=$(basename "${destination_dir}")
+
+ # We have something to remove only if count > interval.
+ remove="$((count - c_interval))"
+ else
+ _techo_err "Renaming oldest backup ${oldest_bak} to ${destination_dir} failed, removing it."
+ remove="$((count - c_interval + 1))"
+ ls_rm_exclude=""
+ fi
+ if [ "${remove}" -gt 0 ]; then
+ _techo "Removing ${remove} backup(s)..."
+
+ if [ -z "${ls_rm_exclude}" ]; then
+ # shellcheck disable=SC2010
+ ls -${TSORT}1r | grep "^${INTERVAL}\\." | head -n "${remove}" > "${TMP}" || \
+ _exit_err "Listing old backups failed"
+ else
+ # shellcheck disable=SC2010
+ ls -${TSORT}1r | grep -v "${ls_rm_exclude}" | grep "^${INTERVAL}\\." | head -n "${remove}" > "${TMP}" || \
+ _exit_err "Listing old backups failed"
+ fi
+
+ delete_from_file "${TMP}" &
+ fi
+ fi
+
+ #
+ # Check for backup directory to clone from: Always clone from the latest one!
+ # Exclude destination_dir from listing, it can be touched reused and renamed
+ # oldest existing destination directory.
+ #
+ dest_dir_name=$(basename "${destination_dir}")
+ # shellcheck disable=SC2010
+ last_dir="$(ls -${TSORT}p1 | grep '/$' | grep -v "${dest_dir_name}" | head -n 1)" || \
+ _exit_err "Failed to list contents of ${ddir}."
+
+ #
+ # Clone from old backup, if existing
+ #
+ if [ "${last_dir}" ]; then
+ set -- "$@" "--link-dest=${ddir}/${last_dir}"
+ _techo "Hard linking from ${last_dir}"
+ fi
+
+ #
+ # Mark backup running and go back to original directory
+ #
+ touch "${destination_dir}${CMARKER}"
+ cd "${__abs_mydir}" || _exit_err "Cannot go back to ${__abs_mydir}."
+
+ #
+ # the rsync part
+ #
+ _techo "Transferring files..."
+ rsync "$@" "${source}" "${destination_dir}"; ret=$?
+ _techo "Finished backup (rsync return code: $ret)."
+
+ #
+ # export rsync return code, might be useful in post_exec
+ #
+ export rsync_return_code=$ret
+
+ #
+ # Set modification time (mtime) to current time, if sorting by mtime is enabled
+ #
+ [ -f "$c_mtime" ] && touch "${destination_dir}"
+
+ #
+ # Check if rsync exit code indicates failure.
+ #
+ fail=""
+ # variable is assigned using eval
+ # shellcheck disable=SC2154
+ if [ -f "$c_rsync_failure_codes" ]; then
+ while read -r code ; do
+ if [ "$ret" = "$code" ]; then
+ fail=1
+ fi
+ done <"${c_rsync_failure_codes}"
+ fi
+
+ #
+ # Remove marking here unless rsync failed.
+ #
+ if [ -z "$fail" ]; then
+ rm "${destination_dir}${CMARKER}" || \
+ _exit_err "Removing ${destination_dir}${CMARKER} failed."
+ if [ "${ret}" -ne 0 ]; then
+ _techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
+ fi
+ else
+ _techo "Warning: rsync failed with return code $ret."
+ fi
+
+ #
+ # Create symlink to newest backup
+ #
+ # shellcheck disable=SC2010
+ latest_dir="$(ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
+ _exit_err "Failed to list content of ${ddir}."
+
+ ln -snf "${ddir}${latest_dir}" "${ddir}current" || \
+ _exit_err "Failed to create 'current' symlink."
+
+ #
+ # post_exec
+ #
+ if [ -x "${c_post_exec}" ]; then
+ _techo "Executing ${c_post_exec} ..."
+ "${c_post_exec}"; ret=$?
+ _techo "Finished ${c_post_exec}."
+
+ if [ "${ret}" -ne 0 ]; then
+ _exit_err "${c_post_exec} failed."
+ fi
+ fi
+
+ #
+ # Time calculation
+ #
+ end_s="$(${SDATE})"
+ full_seconds="$((end_s - begin_s))"
+ hours="$((full_seconds / 3600))"
+ minutes="$(((full_seconds % 3600) / 60))"
+ seconds="$((full_seconds % 60))"
+
+ _techo "Backup lasted: ${hours}:${minutes}:${seconds} (h:m:s)"
+
+ unlock "${name}"
+
+ # wait for children (doing delete_from_file) if any still running
+ wait
+) || exit
+done
+
+#
+# Be a good parent and wait for our children, if they are running wild parallel
+# After all children are finished then remove control pipe.
+#
+if [ "${PARALLEL}" ]; then
+ _techo "Waiting for children to complete..."
+ wait
+ rm -f "${CONTROL_PIPE}"
+fi
+
+#
+# Look for post-exec command (general)
+#
+if [ -x "${CPOSTEXEC}" ]; then
+ _techo "Executing ${CPOSTEXEC} ..."
+ "${CPOSTEXEC}"; ret=$?
+ _techo "Finished ${CPOSTEXEC} (return code: ${ret})."
+
+ if [ "${ret}" -ne 0 ]; then
+ _techo "${CPOSTEXEC} failed."
+ fi
+fi
+
+rm -f "${TMP}"
+_techo "Finished"
diff --git a/ccollect.sh b/ccollect.sh
deleted file mode 100755
index 13ceb33..0000000
--- a/ccollect.sh
+++ /dev/null
@@ -1,596 +0,0 @@
-#!/bin/sh
-#
-# 2005-2009 Nico Schottelius (nico-ccollect at schottelius.org)
-#
-# This file is part of ccollect.
-#
-# ccollect is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ccollect is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with ccollect. If not, see .
-#
-# Initially written for SyGroup (www.sygroup.ch)
-# Date: Mon Nov 14 11:45:11 CET 2005
-
-# Error upon expanding unset variables:
-set -u
-
-#
-# Standard variables (stolen from cconf)
-#
-__pwd="$(pwd -P)"
-__mydir="${0%/*}"; __abs_mydir="$(cd "$__mydir" && pwd -P)"
-__myname=${0##*/}; __abs_myname="$__abs_mydir/$__myname"
-
-#
-# where to find our configuration and temporary file
-#
-CCOLLECT_CONF="${CCOLLECT_CONF:-/etc/ccollect}"
-CSOURCES="${CCOLLECT_CONF}/sources"
-CDEFAULTS="${CCOLLECT_CONF}/defaults"
-CPREEXEC="${CDEFAULTS}/pre_exec"
-CPOSTEXEC="${CDEFAULTS}/post_exec"
-
-export TMP=$(mktemp "/tmp/${__myname}.XXXXXX")
-VERSION="0.8"
-RELEASE="2009-08-20"
-HALF_VERSION="ccollect ${VERSION}"
-FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
-
-#
-# CDATE: how we use it for naming of the archives
-# DDATE: how the user should see it in our output (DISPLAY)
-#
-CDATE="date +%Y%m%d-%H%M"
-DDATE="date +%Y-%m-%d-%H:%M:%S"
-SDATE="date +%s"
-
-#
-# unset values
-#
-PARALLEL=""
-USE_ALL=""
-
-#
-# catch signals
-#
-trap "rm -f \"${TMP}\"" 1 2 15
-
-#
-# Functions
-#
-
-# time displaying echo
-_techo()
-{
- echo "$(${DDATE}): $@"
-}
-
-# exit on error
-_exit_err()
-{
- _techo "$@"
- rm -f "${TMP}"
- exit 1
-}
-
-add_name()
-{
- awk "{ print \"[${name}] \" \$0 }"
-}
-
-#
-# Execute on remote host, if backing up to a remote host
-#
-pcmd()
-{
- if [ "${remote_host}" ]; then
- ssh "${remote_host}" "$@"
- else
- "$@"
- fi
-}
-
-delete_from_file()
-{
- #
- # ssh-"feature": we cannot do '... read ...; ssh ...; < file',
- # because ssh reads stdin! -n does not work -> does not ask for password
- #
- file="$1"; shift
- while read to_remove; do set -- "$@" "${ddir}/${to_remove}"; done < "${file}"
- _techo "Removing $@ ..."
- pcmd rm ${VVERBOSE} -rf "$@" || _exit_err "Removing $@ failed."
-}
-
-display_version()
-{
- echo "${FULL_VERSION}"
- exit 0
-}
-
-usage()
-{
- cat << eof
-${__myname}: [args]
-
- ccollect creates (pseudo) incremental backups
-
- -h, --help: Show this help screen
- -p, --parallel: Parallelise backup processes
- -a, --all: Backup all sources specified in ${CSOURCES}
- -v, --verbose: Be very verbose (uses set -x)
- -V, --version: Print version information
-
- This is version ${VERSION}, released on ${RELEASE}
- (the first version was written on 2005-12-05 by Nico Schottelius).
-
- Retrieve latest ccollect at http://www.nico.schottelius.org/software/ccollect/
-eof
- exit 0
-}
-
-#
-# Parse options
-#
-while [ "$#" -ge 1 ]; do
- case "$1" in
- -a|--all)
- USE_ALL=1
- ;;
- -v|--verbose)
- set -x
- ;;
- -p|--parallel)
- PARALLEL=1
- ;;
- -h|--help)
- usage
- ;;
- -V|--version)
- display_version
- ;;
- -h|--help|-*)
- usage
- ;;
- --)
- # ignore the -- itself
- shift
- break
- ;;
- *)
- break
- ;;
- esac
- shift
-done
-
-#
-# Setup interval
-#
-if [ $# -ge 1 ]; then
- export INTERVAL="$1"
- shift
-else
- usage
-fi
-
-#
-# Check for configuraton directory
-#
-[ -d "${CCOLLECT_CONF}" ] || _exit_err "No configuration found in " \
- "\"${CCOLLECT_CONF}\" (is \$CCOLLECT_CONF properly set?)"
-
-#
-# Create (portable!) source "array"
-#
-export no_sources=0
-
-if [ "${USE_ALL}" = 1 ]; then
- #
- # Get sources from source configuration
- #
- ( cd "${CSOURCES}" && ls -1 > "${TMP}" ); ret=$?
-
- [ "${ret}" -eq 0 ] || _exit_err "Listing of sources failed. Aborting."
-
- while read tmp; do
- eval export source_${no_sources}=\"${tmp}\"
- no_sources=$((${no_sources}+1))
- done < "${TMP}"
-else
- #
- # Get sources from command line
- #
- while [ "$#" -ge 1 ]; do
- eval arg=\"\$1\"; shift
-
- eval export source_${no_sources}=\"${arg}\"
- no_sources="$((${no_sources}+1))"
- done
-fi
-
-#
-# Need at least ONE source to backup
-#
-if [ "${no_sources}" -lt 1 ]; then
- usage
-else
- _techo "${HALF_VERSION}: Beginning backup using interval ${INTERVAL}"
-fi
-
-#
-# Look for pre-exec command (general)
-#
-if [ -x "${CPREEXEC}" ]; then
- _techo "Executing ${CPREEXEC} ..."
- "${CPREEXEC}"; ret=$?
- _techo "Finished ${CPREEXEC} (return code: ${ret})."
-
- [ "${ret}" -eq 0 ] || _exit_err "${CPREEXEC} failed. Aborting"
-fi
-
-#
-# Let's do the backup
-#
-i=0
-while [ "${i}" -lt "${no_sources}" ]; do
- #
- # Get current source
- #
- eval name=\"\$source_${i}\"
- i=$((${i}+1))
-
- export name
-
- #
- # start ourself, if we want parallel execution
- #
- if [ "${PARALLEL}" ]; then
- "$0" "${INTERVAL}" "${name}" &
- continue
- fi
-
-#
-# Start subshell for easy log editing
-#
-(
- backup="${CSOURCES}/${name}"
- #
- # Stderr to stdout, so we can produce nice logs
- #
- exec 2>&1
-
- #
- # Record start of backup: internal and for the user
- #
- begin_s="$(${SDATE})"
- _techo "Beginning to backup"
-
- #
- # Standard configuration checks
- #
- if [ ! -e "${backup}" ]; then
- _exit_err "Source does not exist."
- fi
-
- #
- # Configuration _must_ be a directory (cconfig style)
- #
- if [ ! -d "${backup}" ]; then
- _exit_err "\"${name}\" is not a cconfig-directory. Skipping."
- fi
-
- #
- # Read / create configuration
- #
- c_source="${backup}/source"
- c_dest="${backup}/destination"
- c_pre_exec="${backup}/pre_exec"
- c_post_exec="${backup}/post_exec"
- c_marker="ccollect-marker"
- for opt in verbose very_verbose summary exclude rsync_options \
- delete_incomplete remote_host rsync_failure_codes \
- mtime quiet_if_down ; do
- if [ -f "${backup}/${opt}" -o -f "${backup}/no_${opt}" ]; then
- eval c_$opt=\"${backup}/$opt\"
- else
- eval c_$opt=\"${CDEFAULTS}/$opt\"
- fi
- done
-
- #
- # Sort by ctime (default) or mtime (configuration option)
- #
- if [ -f "$c_mtime" ] ; then
- TSORT="t"
- else
- TSORT="tc"
- fi
-
- #
- # First execute pre_exec, which may generate destination or other parameters
- #
- if [ -x "${c_pre_exec}" ]; then
- _techo "Executing ${c_pre_exec} ..."
- "${c_pre_exec}"; ret="$?"
- _techo "Finished ${c_pre_exec} (return code ${ret})."
-
- if [ "${ret}" -ne 0 ]; then
- _exit_err "${c_pre_exec} failed. Skipping."
- fi
- fi
-
- #
- # Source configuration checks
- #
- if [ ! -f "${c_source}" ]; then
- _exit_err "Source description \"${c_source}\" is not a file. Skipping."
- else
- source=$(cat "${c_source}"); ret="$?"
- if [ "${ret}" -ne 0 ]; then
- _exit_err "Source ${c_source} is not readable. Skipping."
- fi
- fi
-
- #
- # Destination is a path
- #
- if [ ! -f "${c_dest}" ]; then
- _exit_err "Destination ${c_dest} is not a file. Skipping."
- else
- ddir="$(cat "${c_dest}")"; ret="$?"
- if [ "${ret}" -ne 0 ]; then
- _exit_err "Destination ${c_dest} is not readable. Skipping."
- fi
- fi
-
- #
- # Set pre-cmd, if we backup to a remote host.
- #
- if [ -f "${c_remote_host}" ]; then
- remote_host="$(cat "${c_remote_host}")"; ret="$?"
- if [ "${ret}" -ne 0 ]; then
- _exit_err "Remote host file ${c_remote_host} exists, but is not readable. Skipping."
- fi
- destination="${remote_host}:${ddir}"
- else
- remote_host=""
- destination="${ddir}"
- fi
- export remote_host
-
- #
- # Parameters: ccollect defaults, configuration options, user options
- #
-
- #
- # Rsync standard options
- #
- set -- "$@" "--archive" "--delete" "--numeric-ids" "--relative" \
- "--delete-excluded" "--sparse"
-
- #
- # Exclude list
- #
- if [ -f "${c_exclude}" ]; then
- set -- "$@" "--exclude-from=${c_exclude}"
- fi
-
- #
- # Output a summary
- #
- if [ -f "${c_summary}" ]; then
- set -- "$@" "--stats"
- fi
-
- #
- # Verbosity for rsync, rm, and mkdir
- #
- VVERBOSE=""
- if [ -f "${c_very_verbose}" ]; then
- set -- "$@" "-vv"
- VVERBOSE="-v"
- elif [ -f "${c_verbose}" ]; then
- set -- "$@" "-v"
- fi
-
- #
- # Extra options for rsync provided by the user
- #
- if [ -f "${c_rsync_options}" ]; then
- while read line; do
- set -- "$@" "$line"
- done < "${c_rsync_options}"
- fi
-
- #
- # Check: source is up and accepting connections (before deleting old backups!)
- #
- if ! rsync "${source}" >/dev/null 2>"${TMP}" ; then
- if [ ! -f "${c_quiet_if_down}" ]; then
- cat "${TMP}"
- fi
- _exit_err "Source ${source} is not readable. Skipping."
- fi
-
- #
- # Check: destination exists?
- #
- ( pcmd cd "${ddir}" ) || _exit_err "Cannot change to ${ddir}. Skipping."
-
- #
- # Check: incomplete backups? (needs echo to remove newlines)
- #
- incomplete="$(echo \
- $(pcmd ls -1 "${ddir}/" | \
- awk "/\.${c_marker}\$/ { print \$0; gsub(\"\.${c_marker}\$\",\"\",\$0); print \$0 }" | \
- tee "${TMP}"))"
-
- if [ "${incomplete}" ]; then
- _techo "Incomplete backups: ${incomplete}"
- if [ -f "${c_delete_incomplete}" ]; then
- delete_from_file "${TMP}"
- fi
- fi
-
- #
- # Interval definition: First try source specific, fallback to default
- #
- c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
-
- if [ -z "${c_interval}" ]; then
- c_interval="$(cat "${CDEFAULTS}/intervals/${INTERVAL}" 2>/dev/null)"
-
- if [ -z "${c_interval}" ]; then
- _exit_err "No definition for interval \"${INTERVAL}\" found. Skipping."
- fi
- fi
-
- #
- # Check: maximum number of backups is reached?
- # If so remove. Use grep and ls -p so we only look at directories
- #
- count="$(pcmd ls -p1 "${ddir}" | grep "^${INTERVAL}\..*/\$" | wc -l \
- | sed 's/^ *//g')" || _exit_err "Counting backups failed"
-
- _techo "Existing backups: ${count} Total keeping backups: ${c_interval}"
-
- if [ "${count}" -ge "${c_interval}" ]; then
- substract="$((${c_interval} - 1))"
- remove="$((${count} - ${substract}))"
- _techo "Removing ${remove} backup(s)..."
-
- pcmd ls -${TSORT}p1r "${ddir}" | grep "^${INTERVAL}\..*/\$" | \
- head -n "${remove}" > "${TMP}" || \
- _exit_err "Listing old backups failed"
-
- delete_from_file "${TMP}"
- fi
-
- #
- # Check for backup directory to clone from: Always clone from the latest one!
- #
- last_dir="$(pcmd ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
- _exit_err "Failed to list contents of ${ddir}."
-
- #
- # clone from old backup, if existing
- #
- if [ "${last_dir}" ]; then
- set -- "$@" "--link-dest=${ddir}/${last_dir}"
- _techo "Hard linking from ${last_dir}"
- fi
-
- # set time when we really begin to backup, not when we began to remove above
- destination_date="$(${CDATE})"
- destination_dir="${ddir}/${INTERVAL}.${destination_date}.$$"
- destination_full="${destination}/${INTERVAL}.${destination_date}.$$"
-
- # give some info
- _techo "Beginning to backup, this may take some time..."
-
- _techo "Creating ${destination_dir} ..."
- pcmd mkdir ${VVERBOSE} "${destination_dir}" || \
- _exit_err "Creating ${destination_dir} failed. Skipping."
-
- #
- # added marking in 0.6 (and remove it, if successful later)
- #
- pcmd touch "${destination_dir}.${c_marker}"
-
- #
- # the rsync part
- #
- _techo "Transferring files..."
- rsync "$@" "${source}" "${destination_full}"; ret=$?
- _techo "Finished backup (rsync return code: $ret)."
-
- #
- # Set modification time (mtime) to current time, if sorting by mtime is enabled
- #
- [ -f "$c_mtime" ] && pcmd touch "${destination_dir}"
-
- #
- # Check if rsync exit code indicates failure.
- #
- fail=""
- if [ -f "$c_rsync_failure_codes" ]; then
- while read code ; do
- if [ "$ret" = "$code" ]; then
- fail=1
- fi
- done <"$c_rsync_failure_codes"
- fi
-
- #
- # Remove marking here unless rsync failed.
- #
- if [ -z "$fail" ]; then
- pcmd rm "${destination_dir}.${c_marker}" || \
- _exit_err "Removing ${destination_dir}.${c_marker} failed."
- if [ "${ret}" -ne 0 ]; then
- _techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
- fi
- else
- _techo "Warning: rsync failed with return code $ret."
- fi
-
- #
- # post_exec
- #
- if [ -x "${c_post_exec}" ]; then
- _techo "Executing ${c_post_exec} ..."
- "${c_post_exec}"; ret=$?
- _techo "Finished ${c_post_exec}."
-
- if [ "${ret}" -ne 0 ]; then
- _exit_err "${c_post_exec} failed."
- fi
- fi
-
- # Calculation
- end_s="$(${SDATE})"
-
- full_seconds="$((${end_s} - ${begin_s}))"
- hours="$((${full_seconds} / 3600))"
- seconds="$((${full_seconds} - (${hours} * 3600)))"
- minutes="$((${seconds} / 60))"
- seconds="$((${seconds} - (${minutes} * 60)))"
-
- _techo "Backup lasted: ${hours}:${minutes}:${seconds} (h:m:s)"
-
-) | add_name
-done
-
-#
-# Be a good parent and wait for our children, if they are running wild parallel
-#
-if [ "${PARALLEL}" ]; then
- _techo "Waiting for children to complete..."
- wait
-fi
-
-#
-# Look for post-exec command (general)
-#
-if [ -x "${CPOSTEXEC}" ]; then
- _techo "Executing ${CPOSTEXEC} ..."
- "${CPOSTEXEC}"; ret=$?
- _techo "Finished ${CPOSTEXEC} (return code: ${ret})."
-
- if [ "${ret}" -ne 0 ]; then
- _techo "${CPOSTEXEC} failed."
- fi
-fi
-
-rm -f "${TMP}"
-_techo "Finished"
diff --git a/conf/sources/to-remote/destination b/conf/sources/to-remote/destination
deleted file mode 100644
index 8cac69d..0000000
--- a/conf/sources/to-remote/destination
+++ /dev/null
@@ -1 +0,0 @@
-/tmp/ccollect
diff --git a/conf/sources/to-remote/remote_host b/conf/sources/to-remote/remote_host
deleted file mode 100644
index 2fbb50c..0000000
--- a/conf/sources/to-remote/remote_host
+++ /dev/null
@@ -1 +0,0 @@
-localhost
diff --git a/conf/sources/to-remote/source b/conf/sources/to-remote/source
deleted file mode 100644
index e64611b..0000000
--- a/conf/sources/to-remote/source
+++ /dev/null
@@ -1 +0,0 @@
-/home/users/nico/bin
diff --git a/contrib/ccollect.spec b/contrib/ccollect.spec
new file mode 100644
index 0000000..8916b7e
--- /dev/null
+++ b/contrib/ccollect.spec
@@ -0,0 +1,79 @@
+Summary: (pseudo) incremental backup with different exclude lists using hardlinks and rsync
+Name: ccollect
+Version: 2.3
+Release: 0
+URL: http://www.nico.schottelius.org/software/ccollect
+Source0: http://www.nico.schottelius.org/software/ccollect/%{name}-%{version}.tar.bz2
+
+License: GPL-3
+Group: Applications/System
+Vendor: Nico Schottelius
+BuildRoot: %{_tmppath}/%{name}-%(id -un)
+BuildArch: noarch
+Requires: rsync
+
+%description
+Ccollect backups data from local and remote hosts to your local harddisk.
+Although ccollect creates full backups, it requires very less space on the backup medium, because ccollect uses hardlinks to create an initial copy of the last backup.
+Only the inodes used by the hardlinks and the changed files need additional space.
+
+%prep
+%setup -q
+
+%install
+rm -rf $RPM_BUILD_ROOT
+
+#Installing main ccollect and /etc directory
+%__install -d 755 %buildroot%_bindir
+%__install -d 755 %buildroot%_sysconfdir/%name
+%__install -m 755 ccollect %buildroot%_bindir/
+
+#bin files from tools directory
+for t in $(ls tools/ccollect_*) ; do
+ %__install -m 755 ${t} %buildroot%_bindir/
+done
+
+#Configuration examples and docs
+%__install -d 755 %buildroot%_datadir/doc/%name-%version/examples
+
+%__install -m 644 README %buildroot%_datadir/doc/%name-%version
+%__install -m 644 COPYING %buildroot%_datadir/doc/%name-%version
+%__install -m 644 CREDITS %buildroot%_datadir/doc/%name-%version
+%__install -m 644 conf/README %buildroot%_datadir/doc/%name-%version/examples
+%__cp -pr conf/defaults %buildroot%_datadir/doc/%name-%version/examples/
+%__cp -pr conf/sources %buildroot%_datadir/doc/%name-%version/examples/
+
+#Addition documentation and some config tools
+%__install -d 755 %buildroot%_datadir/%name/tools
+%__install -m 755 tools/called_from_remote_pre_exec %buildroot%_datadir/%name/tools
+%__cp -pr tools/config-pre-* %buildroot%_datadir/%name/tools
+%__install -m 755 tools/report_success %buildroot%_datadir/%name/tools
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-,root,root)
+%_bindir/ccollect*
+%_datadir/doc/%name-%version
+%_datadir/%name/tools
+%docdir %_datadir/doc/%name-%version
+%dir %_sysconfdir/%name
+
+%changelog
+* Thu Aug 20 2009 Nico Schottelius 0.8
+- Introduce consistenst time sorting (John Lawless)
+- Check for source connectivity before trying backup (John Lawless)
+- Defensive programming patch (John Lawless)
+- Some code cleanups (argument parsing, usage) (Nico Schottelius)
+- Only consider directories as sources when using -a (Nico Schottelius)
+- Fix general parsing problem with -a (Nico Schottelius)
+- Fix potential bug when using remote_host, delete_incomplete and ssh (Nico Schottelius)
+- Improve removal performance: minimised number of 'rm' calls (Nico Schottelius)
+- Support sorting by mtime (John Lawless)
+- Improve option handling (John Lawless)
+- Add support for quiet operation for dead devices (quiet_if_down) (John Lawless)
+- Add smart option parsing, including support for default values (John Lawless)
+- Updated and cleaned up documentation (Nico Schottelius)
+- Fixed bug "removal of current directory" in ccollect_delete_source.sh (Found by G????nter St????hr, fixed by Nico Schottelius)
+
diff --git a/contrib/ccollect_mgr/README b/contrib/ccollect_mgr/README
new file mode 100644
index 0000000..63bba1a
--- /dev/null
+++ b/contrib/ccollect_mgr/README
@@ -0,0 +1,47 @@
+[Almost complete Copy of an e-mail from Patrick Drolet]
+
+Hello again,
+
+
+
+I have created a script to better manage the backups since my
+upload/download ratio and my bandwidth is limited by my ISP, and my hard
+disk space is also somewhat limited. The script is called
+"ccollect_mgr.sh".
+
+
+
+Provides the following features
+
+1) Determine the interval (daily/weekly/monthly)
+
+a. Define when you want weekly and monthly backups. It takes care of
+the rest
+
+2) Perform the backups using ccollect
+
+3) Copy the ccollect log output to the first backup of the set
+
+a. Keeping the detailed log of each backup is always handy!
+
+4) Build a periodic report and include the real amount of disk used
+
+a. Computes the real amount of disk used (eg: no double counting of
+hard links)
+
+b. Shows the actual amount of data transferred
+
+5) Send an email if there has been errors or warnings
+
+6) Send a periodic email to show transfer size, real backup size, etc
+
+a. Weekly reports are nice.!
+
+[...]
+
+- rdu (real du), which computes the real amount of disk used (no
+double/triple counting hard links), same code as in ccollect_mgr.sh.
+
+- S60ccollect_example, an example script to put in etc/init.d to
+add ccollect_mgr to the crontab
+
diff --git a/contrib/ccollect_mgr/S60ccollect_example b/contrib/ccollect_mgr/S60ccollect_example
new file mode 100644
index 0000000..08e839e
--- /dev/null
+++ b/contrib/ccollect_mgr/S60ccollect_example
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+# Standard Linux: put in /etc/init.d
+# Busybox: put in /opt/etc/init.d
+
+# Add ccollect_mgr job to crontab
+# Syntax reminder from crontab:
+# minute 0-59
+# hour 0-23
+# day of month 1-31
+# month 1-12 (or names, see below)
+# day of week 0-7 (0 or 7 is Sun, or use names)
+
+crontab -l | grep -v ccollect_mgr > /tmp/crontab.tmp
+
+# Backup every day at 1 am.
+echo "00 01 * * * /usr/local/sbin/ccollect_mgr.sh -from nas@myemail.net -to me@myemail.net -server relay_or_smtp_server NAS > /usr/local/var/log/ccollect.cron &" >> /tmp/crontab.tmp
+
+crontab /tmp/crontab.tmp
+rm /tmp/crontab.tmp
+
diff --git a/contrib/ccollect_mgr/ccollect_mgr.sh b/contrib/ccollect_mgr/ccollect_mgr.sh
new file mode 100644
index 0000000..fe69286
--- /dev/null
+++ b/contrib/ccollect_mgr/ccollect_mgr.sh
@@ -0,0 +1,542 @@
+#!/bin/sh
+#
+# ----------------------------------------------------------------------------
+# Last update: 2009-12-11
+# By : pdrolet (ccollect_mgr@drolet.name)
+# ----------------------------------------------------------------------------
+# Job manager to the ccollect utilities
+# (ccollect written by Nico Schottelius)
+#
+# Provides the following features
+# 1) Determine the interval (daily/weekly/monthly)
+# 2) Check the estimated file transfer size
+# 3) Perform the backups using ccollect
+# 4) Copy the ccollect log to the first backup of the set
+# 5) Build a periodic report and include the real amount of disk used
+# 6) Send an email if there has been errors or warnings
+# 7) Send a periodic email to show transfer size, real backup size, etc
+# ----------------------------------------------------------------------------
+#
+# This script was written primarily to gain better visibility of backups in
+# an environment where data transfer is limited and so is bandwidth
+# (eg: going through an ISP). The primary target of this script were a
+# DNS323 and a QNAP T209 (eg: Busybox devices and not standard Linux devices)
+# but it should run on any POSIX compliant device.
+#
+# Note: This is one of my first script in over a decade... don't use this as a
+# reference (but take a look at ccollect.sh... very well written!)
+# ----------------------------------------------------------------------------
+#
+# -------------------------------------------
+# TO MAKE THIS SCRIPT RUN ON A BUSYBOX DEVICE
+# -------------------------------------------
+# - You may need to install Optware and the following packages:
+# - findutils (to get a find utility which supports printf)
+# - procps (to get a ps utility that is standard)
+# - mini-sendmail (this is what I used to send emails... you could easily
+# modify this to use sendmail, mutt, putmail, etc...).
+# - On DNS323 only: Your Busybox is very limited. For details, see
+# http://wiki.dns323.info/howto:ffp#shells. You need to redirect /bin/sh
+# to the Busybox provided with ffp (Fun Plug). To do this, type:
+# ln -fs /ffp/bin/sh /bin/sh
+#
+# --------------------------------------------------
+# TO MAKE THIS SCRIPT RUN ON A STANDARD LINUX DEVICE
+# --------------------------------------------------
+# - You will need install mini_sendmail or rewrite the send_email routine.
+#
+# ----------------------------------------------------------------------------
+
+# Send warning if the worst case data transfer will be larger than (in MB)...
+warning_transfer_size=1024
+abort_transfer_size=5120
+
+# Define paths and default file names
+ADD_TO_PATH="/opt/bin:/opt/sbin:/usr/local/bin:/usr/local/sbin"
+CCOLLECT="ccollect.sh"
+CCOLLECT_CONF="/usr/local/etc/ccollect"
+
+PS="/opt/bin/ps"
+FIND="/opt/bin/find"
+
+TEMP_LOG="${CCOLLECT_CONF}"/log.$$
+per_report="${CCOLLECT_CONF}/periodic_report.log"
+tmp_report="/tmp/ccollect.$$"
+tmp_mgr="/tmp/ccollect_mgr.$$"
+tmp_email="/tmp/email.$$"
+
+backups_not_found=""
+
+# Sub routines...
+
+send_email()
+{
+ # Send a simple email using mini-sendmail.
+
+ msg_body_file="$1"
+ shift
+
+ # ------------------------------
+ # Quit if we can't send an email
+ # ------------------------------
+ if [ "${to}" == "" ] || [ "${mail_server}" == "" ]; then
+ echo "Missing mail server or destination email. No email sent with subject: $@"
+ exit 1
+ fi
+
+ echo from: "${from}" > "${tmp_email}"
+ echo subject: "$@" >> "${tmp_email}"
+ echo to: "${to}" >> "${tmp_email}"
+ echo cc: >> "${tmp_email}"
+ echo bcc: >> "${tmp_email}"
+ echo "" >> "${tmp_email}"
+ echo "" >> "${tmp_email}"
+ cat "${msg_body_file}" >> "${tmp_email}"
+ echo "" >> "${tmp_email}"
+
+ echo ""
+ echo Sending email to ${to} to report the following:
+ echo -----------------------------------------------
+ cat "${tmp_email}"
+ cat "${tmp_email}" | mini_sendmail -f"${from}" -s"${mail_server}" "${to}"
+ rm "${tmp_email}"
+}
+
+remove_source()
+{
+ remove_no=$1
+ eval echo Removing backup \"\$source_$1\"
+
+ no_sources="$(( ${no_sources} - 1 ))"
+ while [ "${remove_no}" -lt "${no_sources}" ]; do
+ eval source_${remove_no}=\"\$source_$(( ${remove_no} + 1))\"
+ eval ddir_${remove_no}=\"\$ddir_$(( ${remove_no} + 1))\"
+ remove_no=$(( ${remove_no} + 1 ))
+ done
+}
+
+compute_rdu()
+{
+ kdivider=1
+ find_options=""
+
+ while [ "$#" -ge 1 ]; do
+ case "$1" in
+ -m)
+ kdivider=1024
+ ;;
+ -g)
+ kdivider=1048576
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [ "$#" == 0 ]; then
+ rdu=0
+ return 1
+ fi
+
+ # ------------------------------------------------------------------------------------------------------
+ # Compute the real disk usage (eg: hard links do files outside the backup set don't count)
+ # ------------------------------------------------------------------------------------------------------
+ # 1) Find selected files and list link count, inodes, file type and size
+ # 2) Sort (sorts on inodes since link count is constant per inode)
+ # 3) Merge duplicates using uniq
+ # (result is occurence count, link count, inode, file type and size)
+ # 4) Use awk to sum up the file size of each inodes when the occurence count
+ # and link count are the same. Use %k for size since awk's printf is 32 bits
+ # 5) Present the result with additional dividers based on command line parameters
+ #
+
+ rdu=$(( ( `"${FIND}" "$@" -printf '%n %i %y %k \n' \
+ | sort -n \
+ | uniq -c \
+ | awk '{ if (( $1 == $2 ) || ($4 == "d")) { sum += $5; } } END { printf "%u\n",(sum); }'` \
+ + ${kdivider} - 1 ) / ${kdivider} ))
+}
+
+check_running_backups()
+{
+ # Check if a backup is already ongoing. If so, skip and send email
+ # Don't use the ccollect marker as this is no indication if it is still running
+
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup=\"\$source_${source_no}\"
+
+ PID=$$
+ "${PS}" -e -o pid,ppid,args 2> /dev/null \
+ | grep -v -e grep -e "${PID}.*ccollect.*${backup}" \
+ | grep "ccollect.*${backup}" > "${tmp_mgr}" 2> /dev/null
+ running_proc=`cat "${tmp_mgr}" | wc -l`
+
+ if [ ${running_proc} -gt 0 ]; then
+ # Remove backup from list
+ running_backups="${running_backups}${backup} "
+
+ echo "Process already running:"
+ cat "${tmp_mgr}"
+
+ remove_source ${source_no}
+ else
+ source_no=$(( ${source_no} + 1 ))
+ fi
+ rm "${tmp_mgr}"
+ done
+
+ if [ "${running_backups}" != "" ]; then
+ echo "skipping ccollect backups already running: ${running_backups}" | tee "${tmp_report}"
+ send_email "${tmp_report}" "WARNING - skipping ccollect backups already running: ${running_backups}"
+ rm "${tmp_report}"
+ fi
+}
+
+find_interval()
+{
+ # ----------------------------------------------------
+ # Find interval for ccollect backup.
+ # optional parameters:
+ # - Day of the week to do weekly backups
+ # - Do monthly instead of weekly on the Nth week
+ # ----------------------------------------------------
+
+ weekly_backup="$1"
+ monthly_backup="$2"
+
+ weekday=`date "+%w"`
+ if [ ${weekday} -eq ${weekly_backup} ]; then
+ dom=`date "+%e"`
+ weeknum=$(( ( ${dom} / 7 ) + 1 ))
+ if [ "${weeknum}" -eq "${monthly_backup}" ]; then
+ interval=monthly
+ else
+ interval=weekly
+ fi
+ else
+ interval=daily
+ fi
+}
+
+precheck_transfer_size()
+{
+ # Check the estimated (worst case) transfer size and send email if larger than certain size
+ # Abort backup if total transfer is larger than maximum limit (ex: an error somewhere
+ # requires to do full backup and not incremental, which could blow the quota with ISP)
+ #
+ # Be nice and add error checking one day...
+
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup=\"\$source_${source_no}\"
+ eval ddir=\"\$ddir_${source_no}\"
+
+ last_dir="$(ls -tcp1 "${ddir}" | grep '/$' | head -n 1)"
+ sdir="$(cat "${CCOLLECT_CONF}"/sources/"${backup}"/source)"; ret="$?"
+ if [ -f "${CCOLLECT_CONF}"/sources/"${backup}"/exclude ]; then
+ exclude="--exclude-from=${CCOLLECT_CONF}/sources/${backup}/exclude";
+ else
+ exclude=""
+ fi
+ rsync_options=""
+ if [ -f "${CCOLLECT_CONF}"/sources/"${backup}"/rsync_options ]; then
+ while read line; do
+ rsync_options="${rsync_options} ${line}"
+ done < ${CCOLLECT_CONF}/sources/${backup}/rsync_options
+ fi
+
+ rsync -n -a --delete --stats ${rsync_options} "${exclude}" "${sdir}" "${ddir}/${last_dir}" > "${tmp_report}"
+
+ tx_rx=`cat "${tmp_report}" | grep "Total transferred file size" | \
+ awk '{ { tx += $5 } } END { printf "%u",(((tx)+1024*1024-1)/1024/1024); }'`
+ total_xfer=$(( ${total_xfer} + ${tx_rx} ))
+
+ source_no=$(( ${source_no} + 1 ))
+ done
+
+ echo "Transfer estimation for${ccollect_backups}: ${total_xfer} MB"
+
+ if [ ${total_xfer} -gt ${abort_transfer_size} ]; then
+ # --------------------------------------------------
+ # Send an error if transfer is larger than max limit
+ # --------------------------------------------------
+ # Useful to detect potential issues when there is transfer quota (ex: with ISP)
+
+ echo "Data transfer larger than ${abort_transfer_size} MB is expected for${ccollect_backups}" >> "${tmp_report}"
+ echo "** BACKUP ABORTED **" >> "${tmp_report}"
+
+ send_email "${tmp_report}" "ERROR: aborted ccollect for${ccollect_backups} -- Estimated Tx+Rx: ${total_xfer} MB"
+ rm "${tmp_report}"
+ exit 1
+ elif [ ${total_xfer} -gt ${warning_transfer_size} ]; then
+ # --------------------------------------------------
+ # Send a warning if transfer is expected to be large
+ # --------------------------------------------------
+ # Useful to detect potential issues when there is transfer quota (ex: with ISP)
+
+ echo "Data transfer larger than ${warning_transfer_size} MB is expected for${ccollect_backups}" > "${tmp_report}"
+
+ send_email "${tmp_report}" "WARNING ccollect for${ccollect_backups} -- Estimated Tx+Rx: ${total_xfer} MB"
+ rm "${tmp_report}"
+ fi
+}
+
+build_backup_dir_list()
+{
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup=\"\$source_${source_no}\"
+ eval ddir=\"\$ddir_${source_no}\"
+
+ backup_dir="`cat "${TEMP_LOG}" \
+ | grep "\[${backup}\] .*: Creating.* ${ddir}" \
+ | head -n 1 \
+ | sed 's/[^\/]*\//\//; s/ \.\.\.//'`"
+
+ if [ ! -d "${backup_dir}" ]; then
+ backups_not_found="${backups_not_found}\"${backup}\" "
+ echo -n "Backup directory for \"${backup}\" not found. "
+ remove_source "${source_no}"
+ else
+ eval export backup_dir_list_${source_no}="${backup_dir}"
+# eval echo Backup Dir List: \"\$backup_dir_list_${source_no}\"
+ source_no=$(( ${source_no} + 1 ))
+ fi
+ done
+}
+
+move_log()
+{
+ if [ "${no_sources}" -gt 0 ]; then
+ eval log_file=\"\$backup_dir_list_1\"/ccollect.log
+ mv "${TEMP_LOG}" "${log_file}"
+ echo New Log Location: "${log_file}"
+ else
+ echo "WARNING: none of the backup set have been created"
+ log_file="${TEMP_LOG}"
+ fi
+}
+
+send_report()
+{
+ # Analyze log for periodic report and for error status report
+ cat "${log_file}" | ccollect_analyse_logs.sh iwe > "${tmp_report}"
+
+ # -------------------------
+ # Build the periodic report
+ # -------------------------
+ # Compute the total number of MB sent and received for all the backup sets
+ tx_rx=`cat "${tmp_report}" | \
+ grep 'sent [[:digit:]]* bytes received [0-9]* bytes' | \
+ awk '{ { tx += $3 } { rx += $6} } END \
+ { printf "%u",(((tx+rx)+(1024*1024)-1)/1024/1024); }'`
+ current_date=`date +'20%y/%m/%d %Hh%M -- '`
+
+ # ------------------------------------------
+ # Get the real disk usage for the backup set
+ # ------------------------------------------
+ total_rdu=0
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup_dir=\"\$backup_dir_list_${source_no}\"
+ compute_rdu -m "${backup_dir}"
+ total_rdu=$(( ${total_rdu} + ${rdu} ))
+ source_no=$(( ${source_no} + 1 ))
+ done
+
+ # ---------------------------------------------------------
+ # Get the disk usage for all backups of each backup sets...
+ # ** BE PATIENT!!! **
+ # ---------------------------------------------------------
+ historical_rdu=0
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup_dir=\"\$ddir_${source_no}\"
+ compute_rdu -m "${backup_dir}"
+ historical_rdu=$(( ${historical_rdu} + ${rdu} ))
+ source_no=$(( ${source_no} + 1 ))
+ done
+
+ historical_rdu=$(( (${historical_rdu}+1023) / 1024 ))
+
+ if [ "${no_sources}" -gt 0 ]; then
+ ccollect_backups=""
+ else
+ ccollect_backups="(none performed) "
+ fi
+
+ source_no=0
+ while [ "${source_no}" -lt "${no_sources}" ]; do
+ eval backup=\"\$source_${source_no}\"
+ ccollect_backups="${ccollect_backups}\"${backup}\" "
+ source_no=$(( ${source_no} + 1 ))
+ done
+
+ echo ${current_date} Tx+Rx: ${tx_rx} MB -- \
+ Disk Usage: ${total_rdu} MB -- \
+ Backup set \(${interval}\):${ccollect_backups} -- \
+ Historical backups usage: ${historical_rdu} GB >> "${per_report}"
+ echo "Total Data Transfer: ${tx_rx} MB -- Total Disk Usage: ${total_rdu} MB -- Total Historical backups usage: ${historical_rdu} GB"
+
+ # ----------------------------------------
+ # Send a status email if there is an error
+ # ----------------------------------------
+ ccollect_we=`cat "${log_file}" | ccollect_analyse_logs.sh we | wc -l`
+ if [ ${ccollect_we} -ge 1 ]; then
+ send_email "${tmp_report}" "ERROR ccollect for${ccollect_backups} -- Tx+Rx: ${tx_rx} MB"
+ fi
+
+ # --------------------
+ # Send periodic report
+ # --------------------
+ if [ ${report_interval} == ${interval} ] || [ ${interval} == "monthly" ]; then
+
+ # Make reporting atomic to handle concurrent ccollect_mgr instances
+ mv "${per_report}" "${per_report}".$$
+ cat "${per_report}".$$ >> "${per_report}".history
+
+ # Calculate total amount of bytes sent and received
+ tx_rx=`cat "${per_report}".$$ | \
+ awk '{ { transfer += $5 } } END \
+ { printf "%u",(transfer); }'`
+
+ # Send email
+ send_email "${per_report}.$$" "${report_interval} ccollect status for${ccollect_backups} -- Tx+Rx: ${tx_rx} MB"
+ rm "${per_report}.$$"
+ fi
+
+ rm "${tmp_report}"
+}
+
+# ------------------------------------------------
+# Add to PATH in case we're launching from crontab
+# ------------------------------------------------
+
+PATH="${ADD_TO_PATH}:${PATH}"
+
+# --------------
+# Default Values
+# --------------
+
+# Set on which interval status emails are sent (daily, weekly, monthly)
+report_interval=weekly
+
+# Set day of the week for weekly backups. Default is Monday
+# 0=Sun, 1=Mon, 2=Tue, 3=Wed, 4=Thu, 5=Fri, 6=Sat
+weekly_backup=1
+
+# Set the monthly backup interval. Default is 4th Monday of every month
+monthly_backup=4
+
+# ---------------------------------
+# Parse command line
+# ---------------------------------
+
+show_help=0
+export no_sources=0
+
+while [ "$#" -ge 1 ]; do
+ case "$1" in
+ -help)
+ show_help=1
+ ;;
+ -from)
+ from="$2"
+ shift
+ ;;
+ -to)
+ to="$2"
+ shift
+ ;;
+ -server|mail_server)
+ mail_server="$2"
+ shift
+ ;;
+ -weekly)
+ weekly_backup="$2"
+ shift
+ ;;
+ -monthly)
+ monthly_backup="$2"
+ shift
+ ;;
+ -warning_size)
+ warning_transfer_size="$2"
+ shift
+ ;;
+ -abort_size)
+ abort_transfer_size="$2"
+ shift
+ ;;
+ -report)
+ report_interval="$2"
+ shift
+ ;;
+ -*)
+ ccollect_options="${ccollect_options}$1 "
+ ;;
+ daily|weekly|monthly)
+ ;;
+ *)
+ eval backup=\"\$1\"
+ ddir="$(cat "${CCOLLECT_CONF}"/sources/"${backup}"/destination)"; ret="$?"
+ if [ "${ret}" -ne 0 ]; then
+ echo "Destination ${CCOLLECT_CONF}/sources/${backup}/destination is not readable... Skipping."
+ else
+ ccollect_backups="${ccollect_backups} \"$1\""
+ eval export source_${no_sources}=\"\$1\"
+ eval export ddir_${no_sources}="${ddir}"
+# eval echo Adding source \"\$source_${no_sources}\" -- \"\$ddir_${no_sources}\"
+ no_sources="$(( ${no_sources} + 1 ))"
+ fi
+ ;;
+ esac
+ shift
+done
+
+if [ "${no_sources}" -lt 1 ] || [ ${show_help} -eq 1 ]; then
+ echo ""
+ echo "$0: Syntax"
+ echo " -help This help"
+ echo " -from From email address (ex.: -from nas@home.com)"
+ echo " -to Send email to this address (ex.: -to me@home.com)"
+ echo " -server SMTP server used for sending emails"
+ echo " -weekly Define wich day of the week is the weekly backup"
+ echo " Default is ${weekly_backup}. Sunday = 0, Saturday = 6"
+ echo " -monthly Define on which week # is the monthly backup"
+ echo " Default is ${monthly_backup}. Value = 1 to 5"
+ echo " -report Frequency of report email (daily, weekly or monthly)"
+ echo " Default is ${report_interval}"
+ echo " -warning_size Send a warning email if the transfer size exceed this"
+ echo " Default is ${warning_transfer_size} MB"
+ echo " -abort_size Abort and send an error email if the transfer size exceed this"
+ echo " Default is ${abort_transfer_size} MB"
+ echo ""
+ echo " other parameters are transfered to ccollect"
+ echo ""
+ exit 0
+fi
+
+#echo Backup sets:"${ccollect_backups}"
+check_running_backups
+
+if [ "${no_sources}" -lt 1 ]; then
+ echo "No backup sets are reachable"
+ exit 1
+fi
+
+find_interval ${weekly_backup} ${monthly_backup}
+echo Interval: ${interval}
+
+precheck_transfer_size
+
+"${CCOLLECT}" ${ccollect_options} ${interval} ${ccollect_backups} | tee "${TEMP_LOG}"
+
+build_backup_dir_list
+move_log
+
+send_report
+
diff --git a/contrib/ccollect_mgr/rdu b/contrib/ccollect_mgr/rdu
new file mode 100644
index 0000000..aa5ffe4
--- /dev/null
+++ b/contrib/ccollect_mgr/rdu
@@ -0,0 +1,65 @@
+#!/bin/sh
+#
+# -------------------------------------------------------------
+# Get the real disk usage for a group of selected files
+#
+# This script counts the size of the files and directories
+# listed, but exclude files that have hard links referenced outside
+# the list.
+#
+# The undelying objective of this script is to report the
+# real amount of disk used for backup solutions that are heavily
+# using hard links to save disk space on identical files (I use
+# ccollect, but this likely works with rsnapshot)
+# -------------------------------------------------------------
+# 20091002 - initial release - pdrolet (rdu@drolet.name)
+
+# --------------------
+# Parse options
+# --------------------
+# Known problem:
+# - Command line cannot get a directory with a space in it
+#
+kdivider=1
+find_options=""
+while [ "$#" -ge 1 ]; do
+ case "$1" in
+ -m)
+ kdivider=1024
+ ;;
+ -g)
+ kdivider=1048576
+ ;;
+ -h|--help)
+ echo
+ echo $0: \ \[options below and any \"find\" options\]
+ echo \ \ -m: result in mega bytes \(rounded up\)
+ echo \ \ -g: result in giga bytes \(rounded up\)
+ echo \ \ -h: this help
+ echo
+ exit 0
+ ;;
+ *)
+ find_options="${find_options} $1"
+ ;;
+ esac
+ shift
+done
+
+# ------------------------------------------------------------------------------------------------------
+# Compute the size
+# ------------------------------------------------------------------------------------------------------
+# 1) Find selected files and list link count, inodes, file type and size
+# 2) Sort (sorts on inodes since link count is constant per inode)
+# 3) Merge duplicates using uniq
+# (result is occurence count, link count, inode, file type and size)
+# 4) Use awk to sum up the file size of each inodes when the occurence count
+# and link count are the same. Use %k for size since awk's printf is 32 bits
+# 5) Present the result with additional dividers based on command line parameters
+#
+echo $((( `find ${find_options} -printf '%n %i %y %k \n' \
+ | sort -n \
+ | uniq -c \
+ | awk '{ if (( $1 == $2 ) || ($4 == "d")) { sum += $5; } } END { printf "%u\n",(sum); }'` \
+ + ${kdivider} -1 ) / ${kdivider} ))
+
diff --git a/contrib/exclude_lists/debian b/contrib/exclude_lists/debian
new file mode 100644
index 0000000..5f43d88
--- /dev/null
+++ b/contrib/exclude_lists/debian
@@ -0,0 +1 @@
+/var/cache/apt/archives/*
diff --git a/contrib/thorsten_start_ccollect/start_ccollect b/contrib/thorsten_start_ccollect/start_ccollect
new file mode 100644
index 0000000..5a774ed
--- /dev/null
+++ b/contrib/thorsten_start_ccollect/start_ccollect
@@ -0,0 +1,72 @@
+#!/bin/bash
+
+# Backup-Ordner
+BACKUP_DIR="/mnt"
+
+# ccollect_logwrapper-Skript
+CCOLLECT_LOGWRAPPER="./ccollect_logwrapper.sh"
+
+# letzte Sicherung für Gruppe daily, weekly und monthly in Backup-Ordner ermitteln
+DATE_DAILY=` ls $BACKUP_DIR | grep daily | sort -r | sed -e'2,$d' | cut -f 2 -d.`
+DATE_WEEKLY=` ls $BACKUP_DIR | grep weekly | sort -r | sed -e'2,$d' | cut -f 2 -d.`
+DATE_MONTHLY=`ls $BACKUP_DIR | grep monthly | sort -r | sed -e'2,$d' | cut -f 2 -d.`
+DATE_YEARLY=` ls $BACKUP_DIR | grep yearly | sort -r | sed -e'2,$d' | cut -f 2 -d.`
+
+# Falls Leerstring diesen mit "altem Datum" füllen
+if [ -z "$DATE_DAILY" ] ; then DATE_DAILY="20000101-0101" ; fi
+if [ -z "$DATE_WEEKLY" ] ; then DATE_WEEKLY="20000101-0101" ; fi
+if [ -z "$DATE_MONTHLY" ] ; then DATE_MONTHLY="20000101-0101" ; fi
+if [ -z "$DATE_YEARLY" ] ; then DATE_YEARLY="20000101-0101" ; fi
+
+echo current: $DATE_CUR
+echo last daily: $DATE_DAILY
+echo last weekly: $DATE_WEEKLY
+echo last monthly: $DATE_MONTHLY
+echo last yearly: $DATE_YEARLY
+
+# Datum date-konform wandeln
+# Achtung: mit bash - nicht mit sh möglich!
+# Alternativ mit expr... konvertieren
+
+DATE_DAILY=${DATE_DAILY:0:4}-${DATE_DAILY:4:2}-${DATE_DAILY:6:2}" "${DATE_DAILY:9:2}:${DATE_DAILY:11:2}:00
+DATE_WEEKLY=${DATE_WEEKLY:0:4}-${DATE_WEEKLY:4:2}-${DATE_WEEKLY:6:2}" "${DATE_WEEKLY:9:2}:${DATE_WEEKLY:11:2}:00
+DATE_MONTHLY=${DATE_MONTHLY:0:4}-${DATE_MONTHLY:4:2}-${DATE_MONTHLY:6:2}" "${DATE_MONTHLY:9:2}:${DATE_MONTHLY:11:2}:00
+DATE_YEARLY=${DATE_YEARLY:0:4}-${DATE_YEARLY:4:2}-${DATE_YEARLY:6:2}" "${DATE_YEARLY:9:2}:${DATE_YEARLY:11:2}:00
+DATE_CUR=`date "+%Y-%m-%d %T"`
+
+# Bei Bedarf Backups durchführen
+
+if [ `date --date "$DATE_YEARLY" +%Y` -ne `date --date "$DATE_CUR" +%Y` ]
+then
+
+ # Jahresbackup erzeugen
+ echo monthly backup started
+ source $CCOLLECT_LOGWRAPPER -a yearly
+
+elif [ `date --date "$DATE_MONTHLY" +%Y%m` -ne `date --date "$DATE_CUR" +%Y%m` ]
+then
+
+ # Monatsbackup erzeugen
+ echo monthly backup started
+ source $CCOLLECT_LOGWRAPPER -a monthly
+
+elif [ `date --date "$DATE_WEEKLY" +%Y%W` -ne `date --date "$DATE_CUR" +%Y%W` ]
+then
+
+ # Wochenbackup erzeugen
+ echo weekly backup started
+ source $CCOLLECT_LOGWRAPPER -a weekly
+
+elif [ `date --date "$DATE_DAILY" +%Y%j` -ne `date --date "$DATE_CUR" +%Y%j` ]
+then
+
+ # Tagesbackup erzeugen
+ echo daily backup started
+ source $CCOLLECT_LOGWRAPPER -a daily
+
+else
+
+ # nichts zu tun
+ echo nothing to do
+
+fi
diff --git a/doc/braindumps/LOCAL_vs._REMOTE b/doc/braindumps/LOCAL_vs._REMOTE
deleted file mode 100644
index f2a40b7..0000000
--- a/doc/braindumps/LOCAL_vs._REMOTE
+++ /dev/null
@@ -1,35 +0,0 @@
- to Local to Remote
- backup destination is exiting
- pre/postexec runs locally
- --link-dest?
- /delete_incomplete - can chech ddir
-
- can check destination dir
- -> dooooooo it before!
-
-
- remote_host!
- => rddir_ls:
- incomplete: ls -1 "${INTERVAL}"*".${c_marker}"
-
- host support?
- ssh-host-support?
-
- => ssh_host => save to host
- execute commands there!
-
- rm!
-
- --link-dest?
-
- --link-dest=DIR
- => remote dirs, rsync remote
- => works!!!!
-
- local_destination
- remote_destination
- => remote_*
-
- both
- configuration is local (what to where)
-
diff --git a/doc/braindumps/README b/doc/braindumps/README
deleted file mode 100644
index 973addc..0000000
--- a/doc/braindumps/README
+++ /dev/null
@@ -1 +0,0 @@
-Do not read the files in this directory
diff --git a/doc/ccollect-restoring.text b/doc/ccollect-restoring.text
index 7bb29ea..8a3e11b 100644
--- a/doc/ccollect-restoring.text
+++ b/doc/ccollect-restoring.text
@@ -30,7 +30,7 @@ Boot the system to be rescued from a media that contains low level tools
for your OS (like partitioning, formatting) and the necessary tools
(ssh, tar or rsync).
Use
-- create the necessary partition table (or however it is called
+- create the necessary volumes (like partitions, slices, ...)
Get a live-cd, that ships with
- rsync / tar
diff --git a/doc/ccollect.text b/doc/ccollect.text
index 720bcc8..f075b9a 100644
--- a/doc/ccollect.text
+++ b/doc/ccollect.text
@@ -1,7 +1,7 @@
ccollect - Installing, Configuring and Using
============================================
Nico Schottelius
-0.8, for ccollect 0.8, Initial Version from 2006-01-13
+2.10, for ccollect 2.10, Initial Version from 2006-01-13
:Author Initials: NS
@@ -21,11 +21,12 @@ Supported and tested operating systems and architectures
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
`ccollect` was successfully tested on the following platforms:
-- GNU/Linux on amd64/hppa/i386/ppc/ARM
- FreeBSD on amd64/i386
-- Mac OS X 10.5
+- GNU/Linux on amd64/arm/hppa/i386/ppc
+- Mac OS X 10.5
- NetBSD on alpha/amd64/i386/sparc/sparc64
- OpenBSD on amd64
+- Windows by installing Cygwin, OpenSSH and rsync
It *should* run on any Unix that supports `rsync` and has a POSIX-compatible
bourne shell. If your platform is not listed above and you have it successfully
@@ -72,8 +73,19 @@ Incompatibilities and changes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Versions 0.9 and 1.0
+^^^^^^^^^^^^^^^^^^^^
+- Added "Error: " prefix in _exit_err()
+
+Versions 0.8 and 0.9
+^^^^^^^^^^^^^^^^^^^^
+- Renamed script to ccollect (.sh is not needed)
+- Removed feature to backup to a host via ccollect, added new tool
+ (FIXME: insert name here) that takes care of this via tunnel
+- Perhaps creating subdirectory of source name (idea from Stefan Schlörholz)
+
Versions 0.7 and 0.8
-^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^
.The argument order changed:
- Old: " [args] "
@@ -91,7 +103,7 @@ change for you.
- New: Options in $CCOLLECT_CONF/defaults are used as defaults (see below)
Versions 0.6 and 0.7
-^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^
.The format of `destination` changed:
- Before 0.7 it was a (link to a) directory
- As of 0.7 it is a textfile containing the destination
@@ -103,7 +115,7 @@ You can update your configuration using `tools/config-pre-0.7-to-0.7.sh`.
Versions 0.5 and 0.6
-^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^
.The format of `rsync_options` changed:
- Before 0.6 it was whitespace delimeted
- As of 0.6 it is newline seperated (so you can pass whitespaces to `rsync`)
@@ -120,7 +132,7 @@ XXXXX (- comes before digit).
Versions 0.4 and 0.5
-^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^
Not a real incompatibilty, but seems to fit in this section:
.0.5 does *NOT* require
@@ -159,7 +171,7 @@ For those who do not want to read the whole long document:
--------------------------------------------------------------------------------
# get latest ccollect tarball from http://www.nico.schottelius.org/software/ccollect/
# replace value for CCV with the current version
-export CCV=0.8
+export CCV=0.8.1
#
# replace 'wget' with 'fetch' on bsd
@@ -264,8 +276,10 @@ After having installed and used ccollect, report success using
Configuring
-----------
For configuration aid have a look at the above mentioned tools, which can assist
-you quite well. When you are successfully using `ccollect`, report success using
-`tools/report_success.sh`.
+you quite well. When you are successfully using `ccollect`, I would be happy if
+you add a link to your website, stating "I backup with ccollect", which points
+to the ccollect homepage. So more people now about ccollect, use it and
+improve it. You can also report success using `tools/report_success.sh`.
Runtime options
@@ -333,6 +347,15 @@ If you add '$CCOLLECT_CONF/defaults/`pre_exec`' or
will start `pre_exec` before the whole backup process and
`post_exec` after backup of all sources is done.
+If `pre_exec` exits with a non-zero return code, the whole backup
+process will be aborted.
+
+The `pre_exec` and `post_exec` script can access the following exported variables:
+
+- 'INTERVAL': the interval selected (`daily`)
+- 'no_sources': number of sources to backup (`2`)
+- 'source_$no': name of the source, '$no' starts at 0 (`$source_0`)
+
The following example describes how to report free disk space in
human readable format before and after the whole backup process:
-------------------------------------------------------------------------
@@ -597,6 +620,16 @@ respectively after doing the backup for *this specific* source.
If you want to have pre-/post-exec before and after *all*
backups, see above for general configuration.
+If `pre_exec` exits with a non-zero return code, the backup
+process of `this source` will be aborted (i.e. backup skipped).
+
+The `post_exec` script can access the following exported variables from
+ccollect:
+
+- 'name': name of the source that is being backed up
+- 'destination_name': contains the base directory name (`daily.20091031-1013.24496`)
+- 'destination_dir': full path (`/tmp/ccollect/daily.20091031-1013.24496`)
+- 'destination_full': like 'destination_dir', but prepended with the remote_host, if set (`host:/tmp/ccollect/daily.20091031-1013.24496` or `/tmp/ccollect/daily.20091031-1013.24496`)
Example:
--------------------------------------------------------------------------------
@@ -620,20 +653,22 @@ If you create the file `delete_incomplete` in a source specification directory,
was interrupted) and remove them. Without this file `ccollect` will only warn
the user.
+
Detailed description of "rsync_failure_codes"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you have the file `rsync_failure_codes` in your source configuration
directory, it should contain a newline-separated list of numbers representing
-rsync exit codes. If rsync exits with any code in this list, a marker will
+rsync exit codes. If rsync exits with any code in this list, a marker will
be left in the destination directory indicating failure of this backup. If
you have enabled delete_incomplete, then this backup will be deleted during
the next ccollect run on the same interval.
+
Detailed description of "mtime"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, ccollect.sh chooses the most recent backup directory for cloning or
the oldest for deletion based on the directory's last change time (ctime).
-With this option, the sorting is done based on modification time (mtime). With
+With this option, the sorting is done based on modification time (mtime). With
this version of ccollect, the ctime and mtime of your backups will normally
be the same and this option has no effect. However, if you, for example, move
your backups to another hard disk using cp -a or rsync -a, you should use this
@@ -642,6 +677,7 @@ option because the ctimes are not preserved during such operations.
If you have any backups in your repository made with ccollect version 0.7.1 or
earlier, do not use this option.
+
Detailed description of "quiet_if_down"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, ccollect.sh emits a series of error messages if a source is not
@@ -699,12 +735,16 @@ Using source names or interval in pre_/post_exec scripts
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The pre-/post_exec scripts can access some internal variables from `ccollect`:
-- INTERVAL: The interval specified on the command line
-- no_sources: number of sources
-- source_$NUM: the name of the source
-- name: the name of the currently being backuped source (not available for
+- 'INTERVAL': The interval specified on the command line
+- 'no_sources': number of sources
+- 'source_$NUM': the name of the source
+- 'name': the name of the currently being backuped source (not available for
generic pre_exec script)
+Only available for `post_exec`:
+
+- 'remote_host': name of host we backup to (empty if unused)
+
Using rsync protocol without ssh
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -825,7 +865,7 @@ If you want to see what changed between two backups, you can use
[12:00] u0255:ddba034.netstream.ch# rsync -n -a --delete --stats --progress daily.20080324-0313.17841/ daily.20080325-0313.31148/
--------------------------------------------------------------------------------
This results in a listing of changes. Because we pass -n to rsync no transfer
-is made (i.e. report only mode)"
+is made (i.e. report only mode).
This hint was reported by Daniel Aubry.
@@ -943,6 +983,31 @@ you can enter your password (have a look at screen(1), especially "C-a M"
and "C-a _", for more information).
+Backup fails, if autofs is running, but sources not reachable
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+If you are trying to backup a system containing paths that are managed
+by autofs, you may run into this error:
+
+-------------------------------------------------------------------------------
+2009-12-01-23:14:15: ccollect 0.8.1: Beginning backup using interval monatlich
+[ikn] 2009-12-01-23:14:15: Beginning to backup
+[ikn] 2009-12-01-23:14:15: Executing /home/users/nico/ethz/ccollect/sources/ikn/pre_exec ...
+Enter LUKS passphrase:
+[ikn] Command successful.
+[ikn] key slot 0 unlocked.
+[ikn] 2009-12-01-23:14:23: Finished /home/users/nico/ethz/ccollect/sources/ikn/pre_exec (return code 0). [ikn] directory has vanished: "/home/users/nico/privat/firmen/ethz/autofs/projects"
+[ikn] directory has vanished: "/home/users/nico/privat/firmen/ethz/autofs/scratch"
+[ikn] directory has vanished: "/home/users/nico/privat/firmen/ethz/autofs/sgscratch"
+[ikn] directory has vanished: "/home/users/nico/privat/firmen/ethz/autofs/supp"
+[ikn] directory has vanished: "/home/users/nico/privat/firmen/ethz/autofs/sysadmin"
+[ikn] rsync warning: some files vanished before they could be transferred (code 24) at main.c(1057) [sender=3.0.6]
+[ikn] 2009-12-01-23:44:23: Source / is not readable. Skipping.
+-------------------------------------------------------------------------------
+
+Thus, if you are unsure whether autofs paths can be mounted during backup,
+stop autofs in pre_exec and reenable it in post_exec.
+
+
Examples
--------
@@ -1125,12 +1190,12 @@ rsync -av -H --delete /mnt/archiv/ "$DDIR/archiv/"
-------------------------------------------------------------------------
-Processes running when doing ccollect -p
+Processes running when doing ccollect -j
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Truncated output from `ps axuwwwf`:
-------------------------------------------------------------------------
- S+ 11:40 0:00 | | | \_ /bin/sh /usr/local/bin/ccollect.sh daily -p ddba034 ddba045 ddba046 ddba047 ddba049 ddna010 ddna011
+ S+ 11:40 0:00 | | | \_ /bin/sh /usr/local/bin/ccollect.sh daily -j ddba034 ddba045 ddba046 ddba047 ddba049 ddna010 ddna011
S+ 11:40 0:00 | | | \_ /bin/sh /usr/local/bin/ccollect.sh daily ddba034
S+ 11:40 0:00 | | | | \_ /bin/sh /usr/local/bin/ccollect.sh daily ddba034
R+ 11:40 23:40 | | | | | \_ rsync -a --delete --numeric-ids --relative --delete-excluded --link-dest=/home/server/backup/ddba034
diff --git a/doc/changes/2.0 b/doc/changes/2.0
new file mode 100644
index 0000000..2fee139
--- /dev/null
+++ b/doc/changes/2.0
@@ -0,0 +1,16 @@
+ * Introduce -j option for max parallel jobs, deprecate -p (Darko Poljak)
+ * Add locking (Darko Poljak)
+ * Fix source-is-up check (Nikita Koshikov)
+ * Fix some minor command line parsing issues (Nico Schottelius)
+ * Correct output, if configuration is not in cconfig format (Nico Schottelius)
+ * Minor code cleanups and optimisations (Nico Schottelius)
+ * ccollect_analyse_logs.sh traps more errors and warnings (Patrick Drolet)
+ * Remove -v for mkdir and rm, as they are not POSIX (Patrick Drolet)
+ * Export destination_* to source specific post_exec (Nico Schottelius)
+ * Update documentation regarding exported variables (Nico Schottelius)
+ * Simplify time calculation (Nico Schottelius)
+ * Documentate pre_exec error handling (Nico Schottelius)
+ * Added start script (Thorsten Elle)
+ * Documentate autofs hint (Nico Schottelius)
+ * Speedup source-is-up check and remove --archive (Nico Schottelius)
+ * Removed support for remote backup (see doc) (Nico Schottelius)
diff --git a/doc/changes/2.1 b/doc/changes/2.1
new file mode 100644
index 0000000..ec42d4b
--- /dev/null
+++ b/doc/changes/2.1
@@ -0,0 +1 @@
+* Add options for stdout, file and syslog logging (Darko Poljak)
diff --git a/doc/changes/2.10 b/doc/changes/2.10
new file mode 100644
index 0000000..1d1e85d
--- /dev/null
+++ b/doc/changes/2.10
@@ -0,0 +1 @@
+* Add 'current' symlink to backup destinations (Steffen Zieger)
diff --git a/doc/changes/2.2 b/doc/changes/2.2
new file mode 100644
index 0000000..0ee09d6
--- /dev/null
+++ b/doc/changes/2.2
@@ -0,0 +1 @@
+* Bugfix: empty rsync_options line causes destroying source (Darko Poljak)
diff --git a/doc/changes/2.3 b/doc/changes/2.3
new file mode 100644
index 0000000..960110c
--- /dev/null
+++ b/doc/changes/2.3
@@ -0,0 +1 @@
+* Bugfix: Fix parallel mode deadlock when max jobs > number of sources (Darko Poljak)
diff --git a/doc/changes/2.4 b/doc/changes/2.4
new file mode 100644
index 0000000..07ffe9d
--- /dev/null
+++ b/doc/changes/2.4
@@ -0,0 +1,2 @@
+* Add Windows(Cygwin) as supported OS
+* Add source name tag in log line
diff --git a/doc/changes/2.5 b/doc/changes/2.5
new file mode 100644
index 0000000..a6b319c
--- /dev/null
+++ b/doc/changes/2.5
@@ -0,0 +1 @@
+* Bugfix: exit in case of subshell error
diff --git a/doc/changes/2.6 b/doc/changes/2.6
new file mode 100644
index 0000000..a68cbbc
--- /dev/null
+++ b/doc/changes/2.6
@@ -0,0 +1 @@
+* Improve performance, improve process of deletion of old backups
diff --git a/doc/changes/2.7 b/doc/changes/2.7
new file mode 100644
index 0000000..bafbb01
--- /dev/null
+++ b/doc/changes/2.7
@@ -0,0 +1 @@
+* Fix shellcheck reported issues
diff --git a/doc/changes/2.8 b/doc/changes/2.8
new file mode 100644
index 0000000..563b438
--- /dev/null
+++ b/doc/changes/2.8
@@ -0,0 +1 @@
+* Fix excluding destination dir from removal
diff --git a/doc/changes/2.9 b/doc/changes/2.9
new file mode 100644
index 0000000..45097c7
--- /dev/null
+++ b/doc/changes/2.9
@@ -0,0 +1 @@
+* Make rsync return code available in post_exec (Steffen Zieger)
diff --git a/doc/man/ccollect.text b/doc/man/ccollect.text
index 84e95bb..67f8f47 100644
--- a/doc/man/ccollect.text
+++ b/doc/man/ccollect.text
@@ -10,7 +10,7 @@ ccollect - (pseudo) incremental backup with different exclude lists using hardli
SYNOPSIS
--------
-'ccollect.sh' [args]
+'ccollect.sh' [args]
DESCRIPTION
@@ -26,17 +26,54 @@ texinfo or html).
OPTIONS
-------
--h, --help::
- Show the help screen
-
--p, --parallel::
- Parallelise backup processes
-
-a, --all::
Backup all sources specified in /etc/ccollect/sources
+-e, --errors::
+ Log only errors
+
+-h, --help::
+ Show the help screen
+
+-j [max], --jobs [max]::
+ Specifies the number of jobs to run simultaneously.
+ If max is not specified then parallelise all jobs.
+
+-l FILE, --logfile FILE::
+ Log to specified file
+
+-p, --parallel::
+ Parallelise backup processes (deprecated from 2.0)
+
+-s, --syslog::
+ Log to syslog with tag ccollect
+
+-V, --version::
+ Show version and exit
+
-v, --verbose::
- Be very verbose (uses set -x).
+ Be very verbose (uses set -x)
+
+
+LOGGING MECHANISM
+-----------------
+ccollect logging depends on running in non-interactive/interactive mode
+and on specified optins. The mechanism behaves as the following:
+
+non-interactive mode::
+
+ * standard output goes to syslog
+ * optional: specify logging into file
+ * log all output by default
+ * optional: log only errors
+
+interactive mode::
+
+ * standard output goes to stdout
+ * log only errors
+ * optional: log into syslog or file
+ - log all output by default
+ - optional: log only errors
SEE ALSO
diff --git a/doc/release-checklist b/doc/release-checklist
index 7cba30c..71b40ee 100644
--- a/doc/release-checklist
+++ b/doc/release-checklist
@@ -1,8 +1,7 @@
* Change version and date in ccollect.sh
* Change version in documentation/ccollect.text
-* Regenerate documentation
* Create tarball
-* Transfer to home.schottelius.org
+* Transfer to website
* Extract files
* Update website
* Announce on freshmeat
diff --git a/doc/todo/0.5.2 b/doc/todo/0.5.2
deleted file mode 100644
index 0c1c225..0000000
--- a/doc/todo/0.5.2
+++ /dev/null
@@ -1,4 +0,0 @@
-x Fix $? problem
-x Check last dir searching
-x Check count generation
-x Check general functionality (remove testing)
diff --git a/doc/todo/0.5.3 b/doc/todo/0.5.3
deleted file mode 100644
index 0019581..0000000
--- a/doc/todo/0.5.3
+++ /dev/null
@@ -1,34 +0,0 @@
-Done:
-==> screenshot
-u0219 zrha166.netstream.ch # ~chdscni9/ccollect.sh weekly zrha166.netstream.ch
-2007-08-16-21:49:44: ccollect 0.6: Beginning backup using interval weekly
-[zrha166.netstream.ch] 2007-08-16-21:49:44: Beginning to backup
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Existing backups: 0 Total keeping backups: 8
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Did not find existing backups for interval weekly.
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Using backup from daily.
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Beginning to backup, this may take some time...
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Creating /etc/ccollect/sources/zrha166.netstream.ch/destination/weekly.20070816-2149.22188 ...
-[zrha166.netstream.ch] 2007-08-16-21:49:45: Transferring files...
-
-- beep-after-delete-hack?
- -> tonnerre / #cLinux
-
-- replace nico-linux-ccollect with nico-ccollect
- * ccollect is not Linux specific
-
-- remove exit-calls
- * will leave behind unused temporary file!
- * use _exit_err
-
-X join todos
-
-- fix possible quoting problems
- * rsync_extra (redefine format)
- * exclude
- * other
- * create _rsync, filtering args, creating new $@
-
-- check and export variables for use in scripts!
-
-Contact Julian later:
- * integrate updated german documentation
diff --git a/doc/todo/0.5.3.tonnerre b/doc/todo/0.5.3.tonnerre
deleted file mode 100644
index 4e7b7db..0000000
--- a/doc/todo/0.5.3.tonnerre
+++ /dev/null
@@ -1,11 +0,0 @@
-NetBSD/i386,amd64,sparc,sparc64
-
-13:13 < Tonnerre> telmich, die kleine wä, 2 Variablen aus $(CCOLLECT) zu
- machen
-13:13 < Tonnerre> telmich, eine fü Sourcepfad und eine fü
-Destinationfpad
-13:13 < Tonnerre> pfa
-13:13 < Tonnerre> d
-13:14 < Tonnerre> telmich, die gröre wä die $() durch ${} zu ersetzen, so
-dass der Kram auch mit !GNU-Make geht
-
diff --git a/doc/todo/0.6 b/doc/todo/0.6
deleted file mode 100644
index ee8e97f..0000000
--- a/doc/todo/0.6
+++ /dev/null
@@ -1,63 +0,0 @@
-not did, did not remember why I wanted to do that:
-
-- do sed-time check:
-
-1.2. replace sed?
-compare timing:
-_echo () { echo "$name $msg" }
-and create
-_techo () { echo "$timestamp $name $msg" }
-perhaps create
-_eecho () { _techo "ERROR $msg" }
-?
-
-
-
-done:
-add note for
- 09:24 < telmich> Obri: ~/.ssh/config!
-
-- implement use of different intervals as source for cloning
- * use 'woechentlich' if available and no backup exists for 'daily'
- * add 'prefer_latest' to prefer different interval, that's newer than
- ours
- * or add 'prefer_same_interval' instead?
-- implement detection of partial backups
-3. detect unfinished backups
-sven / markierung
- - wie seht der Marker aus?
- -> .ccollect-YEAR-MM-DD.HHmm.pid (like the directory)
- --> assume incomplete, when we did not finish.
- --> means it's complete,when rsync was successful
----> partial implemented in 0.5.2 (commented out)
-- do not check by name, but by time
- * is ls -t posix?
- * also check: -p -1
-- ccollect Zeitausgabe verbessern
- - Wofuer wie lange gebraucht
- * rm
- * klonen (gibt es so ja nicht)
- Wenn Summary angegeben ist am Ende ausgeben
-- do we want rsync -H by default?
- * no: takes much more memory
-ssh / port change:
- change rsync_extra format
-- Variables:
- source_$n
- no_sources
- name
-- changed naming of sources:
- YYYYMMDD-HHMM.PID (better readable)
- => source / backup converter! => not needed!
-config:
- set destination-base
- /etc/ccollect/defaults/source_config
-
-Documentation:
-- hint: backuping backup
-- update documentation:
- - german doc?
- - exit pre/post exec -> error codes (after implementation!) (in 0.4)
- - write about fast changing fs
-- delete_incomplete
-
diff --git a/doc/todo/0.6.1 b/doc/todo/0.6.1
deleted file mode 100644
index 99a17eb..0000000
--- a/doc/todo/0.6.1
+++ /dev/null
@@ -1,23 +0,0 @@
-- Add filter support
- * filter
-- add source/ignore_failed_pre_exec
-- add source/ignore_failed_post_exec
-- do not delete_incomplete, when there's only one backup left!
-- .ccollect-marker is deleted by rsync at the beginning!
- - fix marking
-- add logwrapper
-- add loganalyser
- speedup is
- error codes
- vanished files (see netstream)!!!
-
-Done:
-- Improve finding backup from another interval:
- o strip of interval name
- o run sort -n
- o use the last entry
-- add --version, -V
-
-not needed:
-- think about 'prefer_same_interval' / 'prefer_latest'
-
diff --git a/doc/todo/0.6.2 b/doc/todo/0.6.2
deleted file mode 100644
index 05798ff..0000000
--- a/doc/todo/0.6.2
+++ /dev/null
@@ -1 +0,0 @@
-- fix delete_incomplete marker
diff --git a/doc/todo/0.7.1 b/doc/todo/0.7.1
deleted file mode 100644
index 49df154..0000000
--- a/doc/todo/0.7.1
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
---------------------------------------------------------------------------------
-done:
-- tools in makefile
-14:15 also alle tools/ccollect_*
-14:15 mach mal n besseres Makefile :)
-14:15 hatte die extra deswegen umbenannt
-14:15 nehme ich mal als hinweis für 0.7.1
-
-
-- add global delete_incomplete (/etc/ccollect/defaults/delete_incomplete)
-
-09:31 < Obri> telmich: hab nen kleinen tipp für ccollect
-09:32 < Obri> telmich: ich habe hier hosts die nicht immer online sind, das ist dumm weil so das backup
- kaputtgeht...
-09:32 < Obri> telmich: ich habe jetzt das ein preexec script gebastelt:
-09:32 < Obri> ping -c1 -q `cat /etc/ccollect/sources/$name/source | cut -d"@" -f2 | cut -d":" -f1`
-09:33 < Obri> telmich: so bricht das backup ab wenn der host nicht erreichbar ist
-09:33 < Obri> ohne dass ein altes backup entsorgt wird
-
-
-- remove basename
- -> include config from cconf! -> standard!
-
-reject:
---------------------------------------------------------------------------------
-- fix german documentation!
- => I'm the coder, somebody else can fix it.
-- add wrapper, to do logging and analyse
- * output filename in the wrapper, save into variable
- => mktemp
- * call analyser
- => output stdout?
- => no use for that currently
diff --git a/doc/todo/0.7.2 b/doc/todo/0.7.2
deleted file mode 100644
index 79a50aa..0000000
--- a/doc/todo/0.7.2
+++ /dev/null
@@ -1,63 +0,0 @@
---------------------------------------------------------------------------------
-Stats version
---------------------------------------------------------------------------------
-
-Add tools/ccollect_stats.sh, clearify license
---------------------------------------------------------------------------------
-Preamble:
- Netstream (www.netstream.ch) may consider using ccollect for backing up many
- unix servers and needs some clean reporting. The following things
- need to be done, so ccollect will be useful for netstream:
-
-Logger:
- - Needs to write small mails (sysadmins don't have time to read mails)
- - Needs to be able to only write a mail on error
- * needs configuration option to also mail on warnings
- - Should be able to send one mail per backup source
- * or one for the whole backup job
-
-Messages: (to be used for filtering)
- Errors:
- Read from remote host .*: Connection timed out
-
- Warnings:
- rsync: mknod .* failed: Invalid argument (22)
- file has vanished: ".*"
-
---------------------------------------------------------------------------------
-
-Analyzer:
- - grosse Dateien
- - grosse Veraenderungen
---------------------------------------------------------------------------------
-exclude-lists-doku:
-freebsd:
- /usr/ports/*
- /proc/*
- /dev/*
- /tmp/*
- /var/tmp/*
-linux:
- /sys/*
- /proc/*
- /dev/*
- /tmp/*
-
---------------------------------------------------------------------------------
-done:
-
-rsync_extra global!
-- \n delimeted
---------------------------------------------------------------------------------
- -S, --sparse
- Try to handle sparse files efficiently so they take up less space on the des‐
- tination. Conflicts with --inplace because it’s not possible to overwrite
- data in a sparse fashion.
-
---------------------------------------------------------------------------------
- Always report return code!
-
-[12:00] u0255:ddba034.netstream.ch# rsync -n -a --delete --stats --progress daily.20080324-0313.17841/ daily.20080325-0313.31148/
-
-$tool
-
diff --git a/doc/todo/0.7.3 b/doc/todo/0.7.3
deleted file mode 100644
index 73e5ffc..0000000
--- a/doc/todo/0.7.3
+++ /dev/null
@@ -1,2 +0,0 @@
-- add -a (archive) to ccollect_delete_source.text
-- add listing of intervals to ccollect_list_intervals
diff --git a/doc/todo/0.7.4 b/doc/todo/0.7.4
deleted file mode 100644
index 70515e7..0000000
--- a/doc/todo/0.7.4
+++ /dev/null
@@ -1 +0,0 @@
-add support for wipe
diff --git a/doc/todo/0.8.0 b/doc/todo/0.8.0
deleted file mode 100644
index 2b5130d..0000000
--- a/doc/todo/0.8.0
+++ /dev/null
@@ -1,6 +0,0 @@
-- restructure code to easily allow global versus source options:
- f_name=....
- check_option $f_name
- => source
- => defaults
-- support all senseful options as default and source specific
diff --git a/doc/todo/extern b/doc/todos.text
similarity index 60%
rename from doc/todo/extern
rename to doc/todos.text
index cb42a07..cf687cb 100644
--- a/doc/todo/extern
+++ b/doc/todos.text
@@ -1,6 +1,12 @@
+ccollect todos
+==============
+Nico Schottelius
+0.1, for the current ccollect version
+:Author Initials: NS
+
+
These todos are things that would be senseful todo, but are just nice
to have. This means I won't code them, so somebody can code them.
---------------------------------------------------------------------------------
- Add ccollect-restore.sh
(new project, perhaps coordinated with jchome's ccollect-config)
@@ -24,3 +30,16 @@ to have. This means I won't code them, so somebody can code them.
- add option ("run_only_once") to prevent ccollect to run twice
(per source/general)
+ -> or create wrapper, that takes care of it
+
+- add support for wipe
+
+- cleanup doc/: remove old stuff, make clear what can be installeld,
+ cleanup ccollect-restoring.text
+
+- clean logwrapper.text and probably create logwrapper, that does
+ what is being stated in logwrapper.text
+
+- include pdf support again (Makefile:72)
+
+- integrate website into ccollect -> updates are done here
diff --git a/test/conf/ccollect_local-with b/test/conf/ccollect_local-with
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/ccollect_source b/test/conf/ccollect_source
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/defaults/intervals/daily b/test/conf/defaults/intervals/daily
new file mode 100644
index 0000000..7ed6ff8
--- /dev/null
+++ b/test/conf/defaults/intervals/daily
@@ -0,0 +1 @@
+5
diff --git a/test/conf/defaults/intervals/monthly b/test/conf/defaults/intervals/monthly
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/conf/defaults/intervals/monthly
@@ -0,0 +1 @@
+4
diff --git a/test/conf/defaults/intervals/normal b/test/conf/defaults/intervals/normal
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/conf/defaults/intervals/normal
@@ -0,0 +1 @@
+4
diff --git a/test/conf/defaults/intervals/weekly b/test/conf/defaults/intervals/weekly
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/conf/defaults/intervals/weekly
@@ -0,0 +1 @@
+2
diff --git a/test/conf/defaults/post_exec b/test/conf/defaults/post_exec
new file mode 100755
index 0000000..0dac0ed
--- /dev/null
+++ b/test/conf/defaults/post_exec
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+echo 'General post_exec executed.'
diff --git a/test/conf/defaults/pre_exec b/test/conf/defaults/pre_exec
new file mode 100755
index 0000000..451fdad
--- /dev/null
+++ b/test/conf/defaults/pre_exec
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+echo 'General pre__exec executed.'
diff --git a/conf/sources/to-remote/exclude b/test/conf/defaults/sources/exclude
similarity index 100%
rename from conf/sources/to-remote/exclude
rename to test/conf/defaults/sources/exclude
diff --git a/test/conf/defaults/sources/rsync_options b/test/conf/defaults/sources/rsync_options
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/defaults/sources/verbose b/test/conf/defaults/sources/verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/defaults/verbose b/test/conf/defaults/verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/delete_incomplete/delete_incomplete b/test/conf/sources/delete_incomplete/delete_incomplete
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/delete_incomplete/destination b/test/conf/sources/delete_incomplete/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/delete_incomplete/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/delete_incomplete/exclude b/test/conf/sources/delete_incomplete/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/delete_incomplete/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/delete_incomplete/source b/test/conf/sources/delete_incomplete/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/delete_incomplete/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/local-with&ersand/destination b/test/conf/sources/local-with&ersand/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/local-with&ersand/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/local-with&ersand/exclude b/test/conf/sources/local-with&ersand/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/local-with&ersand/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/local-with&ersand/source b/test/conf/sources/local-with&ersand/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/local-with&ersand/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/local-with-interval/delete_incomplete b/test/conf/sources/local-with-interval/delete_incomplete
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/local-with-interval/destination b/test/conf/sources/local-with-interval/destination
new file mode 100644
index 0000000..4de7e06
--- /dev/null
+++ b/test/conf/sources/local-with-interval/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup-chint
diff --git a/test/conf/sources/local-with-interval/exclude b/test/conf/sources/local-with-interval/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/local-with-interval/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/local-with-interval/intervals/daily b/test/conf/sources/local-with-interval/intervals/daily
new file mode 100644
index 0000000..e440e5c
--- /dev/null
+++ b/test/conf/sources/local-with-interval/intervals/daily
@@ -0,0 +1 @@
+3
\ No newline at end of file
diff --git a/test/conf/sources/local-with-interval/source b/test/conf/sources/local-with-interval/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/local-with-interval/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/local-with-interval/verbose b/test/conf/sources/local-with-interval/verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/local/destination b/test/conf/sources/local/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/local/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/local/exclude b/test/conf/sources/local/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/local/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/local/no_verbose b/test/conf/sources/local/no_verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/local/source b/test/conf/sources/local/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/local/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/source with spaces and interval/delete_incomplete b/test/conf/sources/source with spaces and interval/delete_incomplete
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/source with spaces and interval/destination b/test/conf/sources/source with spaces and interval/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/source with spaces and interval/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/source with spaces and interval/exclude b/test/conf/sources/source with spaces and interval/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/source with spaces and interval/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/source with spaces and interval/source b/test/conf/sources/source with spaces and interval/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/source with spaces and interval/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/source with spaces and interval/verbose b/test/conf/sources/source with spaces and interval/verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/very_verbose/destination b/test/conf/sources/very_verbose/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/very_verbose/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/very_verbose/exclude b/test/conf/sources/very_verbose/exclude
new file mode 100644
index 0000000..6b8710a
--- /dev/null
+++ b/test/conf/sources/very_verbose/exclude
@@ -0,0 +1 @@
+.git
diff --git a/test/conf/sources/very_verbose/source b/test/conf/sources/very_verbose/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/very_verbose/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/conf/sources/very_verbose/summary b/test/conf/sources/very_verbose/summary
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/very_verbose/verbose b/test/conf/sources/very_verbose/verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/very_verbose/very_verbose b/test/conf/sources/very_verbose/very_verbose
new file mode 100644
index 0000000..e69de29
diff --git a/test/conf/sources/with_exec/destination b/test/conf/sources/with_exec/destination
new file mode 100644
index 0000000..c2a7c55
--- /dev/null
+++ b/test/conf/sources/with_exec/destination
@@ -0,0 +1 @@
+/tmp/ccollect/backup
diff --git a/test/conf/sources/with_exec/post_exec b/test/conf/sources/with_exec/post_exec
new file mode 100755
index 0000000..abc0a40
--- /dev/null
+++ b/test/conf/sources/with_exec/post_exec
@@ -0,0 +1,5 @@
+#!/bin/cat
+
+######################################################################
+Source post_exec executed.
+######################################################################
diff --git a/test/conf/sources/with_exec/pre_exec b/test/conf/sources/with_exec/pre_exec
new file mode 100755
index 0000000..ba7b2af
--- /dev/null
+++ b/test/conf/sources/with_exec/pre_exec
@@ -0,0 +1,5 @@
+#!/bin/cat
+
+######################################################################
+Source pre_exec executed.
+######################################################################
diff --git a/test/conf/sources/with_exec/source b/test/conf/sources/with_exec/source
new file mode 100644
index 0000000..9e90576
--- /dev/null
+++ b/test/conf/sources/with_exec/source
@@ -0,0 +1 @@
+/tmp/ccollect/source
diff --git a/test/exec.sh b/test/exec.sh
deleted file mode 100755
index bdf601d..0000000
--- a/test/exec.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/sh
-
-host="home.schottelius.org"
-host=""
-set -x
-pcmd()
-{
- echo "$#", "$@"
- if [ "$host" ]; then
- ssh "$host" "$@"
- else
- $@
- fi
-}
-
-#pcmd ls /
-#pcmd cd /; ls "/is not there"
-pcmd cd / && ls
diff --git a/test/local.sh b/test/local.sh
deleted file mode 100755
index c2430fd..0000000
--- a/test/local.sh
+++ /dev/null
@@ -1 +0,0 @@
-CCOLLECT_CONF=./conf ./ccollect.sh daily -v local1
diff --git a/test/remote.sh b/test/remote.sh
deleted file mode 100755
index 2af364e..0000000
--- a/test/remote.sh
+++ /dev/null
@@ -1 +0,0 @@
-CCOLLECT_CONF=./conf ./ccollect.sh daily -v remote1
diff --git a/test/return-value.sh b/test/return-value.sh
deleted file mode 100755
index 554def0..0000000
--- a/test/return-value.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/sh
-
-ls /surely-not-existent$$ 2>/dev/null
-
-if [ "$?" -ne 0 ]; then
- echo "$?"
-fi
-
-ls /surely-not-existent$$ 2>/dev/null
-
-ret=$?
-
-if [ "$ret" -ne 0 ]; then
- echo "$ret"
-fi
-
-# if is true, ls is fales
-if [ "foo" = "foo" ]; then
- ls /surely-not-existent$$ 2>/dev/null
-fi
-
-# but that's still the return of ls and not of fi
-echo $?
diff --git a/test/test-ccollect1.sh b/test/test-ccollect1.sh
deleted file mode 100755
index c5acf54..0000000
--- a/test/test-ccollect1.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/sh
-#
-# Nico Schottelius
-# Date: 27-Jan-2007
-# Last Modified: -
-# Description:
-#
-
-ccollect=../ccollect.sh
-testdir="$(dirname $0)/test-backups"
-confdir="$(dirname $0)/test-config"
-source="$(hostname)"
-source_source="/tmp"
-interval="taeglich"
-
-
-# backup destination
-mkdir -p "$testdir"
-source_dest="$(cd "$testdir"; pwd -P)"
-
-# configuration
-mkdir -p "${confdir}/sources/${source}"
-ln -s "$source_dest" "${confdir}/sources/${source}/destination"
-echo "$source_source" > "${confdir}/sources/${source}/source"
-touch "${confdir}/sources/${source}/summary"
-touch "${confdir}/sources/${source}/verbose"
-
-mkdir -p "${confdir}/defaults/intervals/"
-echo 3 > "${confdir}/defaults/intervals/$interval"
-
-# create backups
-
-CCOLLECT_CONF="$confdir" "$ccollect" "$interval" -p -a
-touch "${source_source}/$(date +%s)-$$.1982"
-
-CCOLLECT_CONF="$confdir" "$ccollect" "$interval" -p -a
-touch "${source_source}/$(date +%s)-$$.42"
-
-CCOLLECT_CONF="$confdir" "$ccollect" "$interval" -p -a
-
-du -sh "$testdir"
-du -shl "$testdir"
-
-echo "Delete $testdir and $confdir after test"
diff --git a/tools/README b/tools/README
index 7312018..bbc9706 100644
--- a/tools/README
+++ b/tools/README
@@ -1,19 +1,18 @@
Files and their tasks / destinations:
-ccollect_add_source.sh: bin
-ccollect_analyse_logs.sh: bin
-ccollect_create_source2.sh: ???
-ccollect_create_source.sh: ???
-ccollect_delete_source.sh: bin
-ccollect_list_intervals.sh: bin
-ccollect_logwrapper.sh: bin
-ccollect-stats.sh: ???
+ccollect_add_source: bin
+ccollect_analyse_logs: bin
+ccollect_create_source2: ???
+ccollect_create_source: ???
+ccollect_delete_source: bin
+ccollect_list_intervals: bin
+ccollect_logwrapper: bin
+ccollect-stats: ???
config-pre-0.4-to-0.4.BUGS: only to be used for converting
-config-pre-0.4-to-0.4.sh: only to be used for converting
-config-pre-0.4-to-0.4.sub.sh: only to be used for converting
-config-pre-0.6-to-0.6.sh: only to be used for converting
-config-pre-0.6-to-0.6.sub.sh: only to be used for converting
-config-pre-0.7-to-0.7.sh: only to be used for converting
-config-pre-0.7-to-0.7.sub.sh: only to be used for converting
-gnu-du-backup-size-compare.sh
+config-pre-0.4-to-0.4: only to be used for converting
+config-pre-0.4-to-0.4.sub: only to be used for converting
+config-pre-0.6-to-0.6: only to be used for converting
+config-pre-0.6-to-0.6.sub: only to be used for converting
+config-pre-0.7-to-0.7: only to be used for converting
+config-pre-0.7-to-0.7.sub: only to be used for converting
README
diff --git a/test/test-ccollect-tools.sh b/tools/called_from_remote_pre_exec
old mode 100644
new mode 100755
similarity index 66%
rename from test/test-ccollect-tools.sh
rename to tools/called_from_remote_pre_exec
index 5980d04..f458114
--- a/test/test-ccollect-tools.sh
+++ b/tools/called_from_remote_pre_exec
@@ -1,29 +1,30 @@
#!/bin/sh
-#
-# 2009 Nico Schottelius (nico-ccollect at schottelius.org)
-#
+#
+# 2011 Nico Schottelius (nico-ccollect at schottelius.org)
+#
# This file is part of ccollect.
#
# ccollect is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# ccollect is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with ccollect. If not, see .
#
+# Support backup triggering and actual backup through
+# ssh tunnel
#
-# Test the ccollect tools suite
+# Use the supplied called_from_remote script as pre…exec
+#
#
-set -x
+export ssh_remote_port="$1"; shift
+export backup_host="$1"; shift
-tmp="$(mktemp /tmp/ccollect-tools.XXXXXXXXXXX)"
-
-
-rm -rf "${tmp}"
+ssh -R$ssh_remote_port:127.0.0.1:22 "$backup_host" "ccollect" "$@"
diff --git a/tools/ccollect_add_source.sh b/tools/ccollect_add_source
similarity index 100%
rename from tools/ccollect_add_source.sh
rename to tools/ccollect_add_source
diff --git a/tools/ccollect_analyse_logs.sh b/tools/ccollect_analyse_logs
old mode 100755
new mode 100644
similarity index 90%
rename from tools/ccollect_analyse_logs.sh
rename to tools/ccollect_analyse_logs
index 7ff9916..7d48d54
--- a/tools/ccollect_analyse_logs.sh
+++ b/tools/ccollect_analyse_logs
@@ -72,6 +72,11 @@ if [ "$search_err" ]; then
set -- "$@" "-e" 'ssh: connect to host .*: Connection timed out'
set -- "$@" "-e" 'rsync error: unexplained error (code 255)'
set -- "$@" "-e" 'rsync: connection unexpectedly closed'
+ set -- "$@" "-e" 'rsync: send_files failed to open'
+ set -- "$@" "-e" 'rsync: readlink_stat .* failed: File name too long'
+ set -- "$@" "-e" 'IO error encountered .* skipping file deletion'
+ set -- "$@" "-e" 'rsync: read error: Connection reset by peer'
+ set -- "$@" "-e" 'rsync error: error in rsync protocol data stream'
fi
# known error strings:
@@ -103,6 +108,7 @@ fi
if [ "$search_warn" ]; then
# warn on non-zero exit code
set -- "$@" "-e" 'Finished backup (rsync return code: [^0]'
+ set -- "$@" "-e" 'rsync: file has vanished'
set -- "$@" "-e" 'WARNING: .* failed verification -- update discarded (will try again).'
fi
# known warnings:
@@ -113,9 +119,12 @@ fi
# Interesting strings in the logs: informational
# ----------------------------------------------
if [ "$search_info" ]; then
+ set -- "$@" "-e" 'Number of files: [[:digit:]]*'
+ set -- "$@" "-e" 'Number of files transferred: [[:digit:]]*'
+ set -- "$@" "-e" 'Total transferred file size: [[:digit:]]* bytes'
set -- "$@" "-e" 'total size is [[:digit:]]* speedup is'
set -- "$@" "-e" 'Backup lasted: [[:digit:]]*:[[:digit:]]\{1,2\}:[[:digit:]]* (h:m:s)$'
- set -- "$@" "-e" 'send [[:digit:]]* bytes received [0-9]* bytes [0-9]* bytes/sec$'
+ set -- "$@" "-e" 'sent [[:digit:]]* bytes received [0-9]* bytes'
fi
# info includes:
@@ -123,4 +132,6 @@ fi
# [u0160.nshq.ch.netstream.com] 2007-08-20-18:26:06: Backup lasted: 0:43:34 (h:m:s)
#[ddba012.netstream.ch] sent 3303866 bytes received 1624630525 bytes 122700.92 bytes/sec
+#echo Parameters: "$@"
+
grep "$@"
diff --git a/tools/ccollect_archive_config.sh b/tools/ccollect_archive_config
similarity index 100%
rename from tools/ccollect_archive_config.sh
rename to tools/ccollect_archive_config
diff --git a/tools/ccollect_check_config.sh b/tools/ccollect_check_config
similarity index 100%
rename from tools/ccollect_check_config.sh
rename to tools/ccollect_check_config
diff --git a/tools/ccollect_delete_source.sh b/tools/ccollect_delete_source
similarity index 100%
rename from tools/ccollect_delete_source.sh
rename to tools/ccollect_delete_source
diff --git a/tools/ccollect_list_intervals.sh b/tools/ccollect_list_intervals
similarity index 100%
rename from tools/ccollect_list_intervals.sh
rename to tools/ccollect_list_intervals
diff --git a/tools/ccollect_logwrapper.sh b/tools/ccollect_logwrapper
similarity index 94%
rename from tools/ccollect_logwrapper.sh
rename to tools/ccollect_logwrapper
index ac7f496..95e6fbb 100755
--- a/tools/ccollect_logwrapper.sh
+++ b/tools/ccollect_logwrapper
@@ -58,7 +58,7 @@ _echo "Starting with arguments: $@"
touch "${ccollect_logfile}" || _exit_err "Failed to create ${ccollect_logfile}"
# First line in the logfile is always the commandline
-echo ccollect.sh "$@" > "${ccollect_logfile}" 2>&1
-ccollect.sh "$@" >> "${ccollect_logfile}" 2>&1
+echo ccollect "$@" > "${ccollect_logfile}" 2>&1
+ccollect "$@" >> "${ccollect_logfile}" 2>&1
_echo "Finished."
diff --git a/tools/ccollect_push b/tools/ccollect_push
new file mode 100755
index 0000000..f458114
--- /dev/null
+++ b/tools/ccollect_push
@@ -0,0 +1,30 @@
+#!/bin/sh
+#
+# 2011 Nico Schottelius (nico-ccollect at schottelius.org)
+#
+# This file is part of ccollect.
+#
+# ccollect is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ccollect is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with ccollect. If not, see .
+#
+# Support backup triggering and actual backup through
+# ssh tunnel
+#
+# Use the supplied called_from_remote script as pre…exec
+#
+#
+
+export ssh_remote_port="$1"; shift
+export backup_host="$1"; shift
+
+ssh -R$ssh_remote_port:127.0.0.1:22 "$backup_host" "ccollect" "$@"
diff --git a/tools/ccollect_stats.sh b/tools/ccollect_stats
similarity index 100%
rename from tools/ccollect_stats.sh
rename to tools/ccollect_stats
diff --git a/tools/config-pre-0.4-to-0.4.sh b/tools/config-pre-0.4-to-0.4
similarity index 100%
rename from tools/config-pre-0.4-to-0.4.sh
rename to tools/config-pre-0.4-to-0.4
diff --git a/tools/config-pre-0.4-to-0.4.sub.sh b/tools/config-pre-0.4-to-0.4.sub
similarity index 100%
rename from tools/config-pre-0.4-to-0.4.sub.sh
rename to tools/config-pre-0.4-to-0.4.sub
diff --git a/tools/config-pre-0.6-to-0.6.sh b/tools/config-pre-0.6-to-0.6
similarity index 96%
rename from tools/config-pre-0.6-to-0.6.sh
rename to tools/config-pre-0.6-to-0.6
index 58a538a..1a4bc2f 100755
--- a/tools/config-pre-0.6-to-0.6.sh
+++ b/tools/config-pre-0.6-to-0.6
@@ -30,7 +30,7 @@ if [ $# -ne 1 ]; then
fi
dir="$1"
-script=$(echo $0 | sed 's/\.sh$/.sub.sh/')
+script="${0}.sub"
find "${dir}/sources/" -type f -name rsync_options -exec "${script}" {} \;
diff --git a/tools/config-pre-0.6-to-0.6.sub.sh b/tools/config-pre-0.6-to-0.6.sub
similarity index 100%
rename from tools/config-pre-0.6-to-0.6.sub.sh
rename to tools/config-pre-0.6-to-0.6.sub
diff --git a/tools/config-pre-0.7-to-0.7.sh b/tools/config-pre-0.7-to-0.7
similarity index 96%
rename from tools/config-pre-0.7-to-0.7.sh
rename to tools/config-pre-0.7-to-0.7
index 6d540ab..3098728 100755
--- a/tools/config-pre-0.7-to-0.7.sh
+++ b/tools/config-pre-0.7-to-0.7
@@ -29,7 +29,7 @@ if [ $# -ne 1 ]; then
fi
dir="$1"
-script=$(echo $0 | sed 's/\.sh$/.sub.sh/')
+script="${0}.sub"
find "${dir}/sources/" -name destination -exec "${script}" {} \;
diff --git a/tools/config-pre-0.7-to-0.7.sub.sh b/tools/config-pre-0.7-to-0.7.sub
similarity index 100%
rename from tools/config-pre-0.7-to-0.7.sub.sh
rename to tools/config-pre-0.7-to-0.7.sub
diff --git a/tools/gnu-du-backup-size-compare.sh b/tools/old/gnu-du-backup-size-compare
similarity index 100%
rename from tools/gnu-du-backup-size-compare.sh
rename to tools/old/gnu-du-backup-size-compare
diff --git a/tools/report_success.sh b/tools/report_success
similarity index 100%
rename from tools/report_success.sh
rename to tools/report_success