add updated patches from john

Signed-off-by: Nico Schottelius <nico@ikn.schottelius.org>
This commit is contained in:
Nico Schottelius 2009-06-16 10:32:05 +02:00
parent 02264020f5
commit 6595fe7b97
15 changed files with 2718 additions and 0 deletions

View file

@ -0,0 +1,15 @@
Hello Nico,
I have attached three more patches for ccollect. Each patch
has comments explaining its motivation.
All of these patches work-for-me (but I continue to test
them). I would be interested in your opinion on, for example, the
general approach used in i.patch which changes the way options are
handled. I think it is a big improvement. If, however, you wanted
the code to go in a different direction, let me know before we
diverge too far.
Regards,
John

View file

@ -0,0 +1,663 @@
#!/bin/sh
#
# 2005-2009 Nico Schottelius (nico-ccollect at schottelius.org)
#
# This file is part of ccollect.
#
# ccollect is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ccollect is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ccollect. If not, see <http://www.gnu.org/licenses/>.
#
# Initially written for SyGroup (www.sygroup.ch)
# Date: Mon Nov 14 11:45:11 CET 2005
#
# Standard variables (stolen from cconf)
#
__pwd="$(pwd -P)"
__mydir="${0%/*}"; __abs_mydir="$(cd "$__mydir" && pwd -P)"
__myname=${0##*/}; __abs_myname="$__abs_mydir/$__myname"
#
# where to find our configuration and temporary file
#
CCOLLECT_CONF=${CCOLLECT_CONF:-/etc/ccollect}
CSOURCES=${CCOLLECT_CONF}/sources
CDEFAULTS=${CCOLLECT_CONF}/defaults
CPREEXEC="${CDEFAULTS}/pre_exec"
CPOSTEXEC="${CDEFAULTS}/post_exec"
TMP=$(mktemp "/tmp/${__myname}.XXXXXX")
VERSION=0.7.1
RELEASE="2009-02-02"
HALF_VERSION="ccollect ${VERSION}"
FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
#TSORT="tc" ; NEWER="cnewer"
TSORT="t" ; NEWER="newer"
#
# CDATE: how we use it for naming of the archives
# DDATE: how the user should see it in our output (DISPLAY)
#
CDATE="date +%Y%m%d-%H%M"
DDATE="date +%Y-%m-%d-%H:%M:%S"
#
# unset parallel execution
#
PARALLEL=""
#
# catch signals
#
trap "rm -f \"${TMP}\"" 1 2 15
#
# Functions
#
# time displaying echo
_techo()
{
echo "$(${DDATE}): $@"
}
# exit on error
_exit_err()
{
_techo "$@"
rm -f "${TMP}"
exit 1
}
add_name()
{
awk "{ print \"[${name}] \" \$0 }"
}
pcmd()
{
if [ "$remote_host" ]; then
ssh "$remote_host" "$@"
else
"$@"
fi
}
#
# Version
#
display_version()
{
echo "${FULL_VERSION}"
exit 0
}
#
# Tell how to use us
#
usage()
{
echo "${__myname}: <interval name> [args] <sources to backup>"
echo ""
echo " ccollect creates (pseudo) incremental backups"
echo ""
echo " -h, --help: Show this help screen"
echo " -p, --parallel: Parallelise backup processes"
echo " -a, --all: Backup all sources specified in ${CSOURCES}"
echo " -v, --verbose: Be very verbose (uses set -x)"
echo " -V, --version: Print version information"
echo ""
echo " This is version ${VERSION}, released on ${RELEASE}"
echo " (the first version was written on 2005-12-05 by Nico Schottelius)."
echo ""
echo " Retrieve latest ccollect at http://unix.schottelius.org/ccollect/"
exit 0
}
#
# Select interval if AUTO
#
# For this to work nicely, you have to choose interval names that sort nicely
# such as int1, int2, int3 or a_daily, b_weekly, c_monthly, etc.
#
auto_interval()
{
if [ -d "${backup}/intervals" -a -n "$(ls "${backup}/intervals" 2>/dev/null)" ] ; then
intervals_dir="${backup}/intervals"
elif [ -d "${CDEFAULTS}/intervals" -a -n "$(ls "${CDEFAULTS}/intervals" 2>/dev/null)" ] ; then
intervals_dir="${CDEFAULTS}/intervals"
else
_exit_err "No intervals are defined. Skipping."
fi
echo intervals_dir=${intervals_dir}
trial_interval="$(ls -1r "${intervals_dir}/" | head -n 1)" || \
_exit_err "Failed to list contents of ${intervals_dir}/."
_techo "Considering interval ${trial_interval}"
most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${trial_interval}.*/$" | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}/."
_techo " Most recent ${trial_interval}: '${most_recent}'"
if [ -n "${most_recent}" ]; then
no_intervals="$(ls -1 "${intervals_dir}/" | wc -l)"
n=1
while [ "${n}" -le "${no_intervals}" ]; do
trial_interval="$(ls -p1 "${intervals_dir}/" | tail -n+${n} | head -n 1)"
_techo "Considering interval '${trial_interval}'"
c_interval="$(cat "${intervals_dir}/${trial_interval}" 2>/dev/null)"
m=$((${n}+1))
set -- "${ddir}" -maxdepth 1
while [ "${m}" -le "${no_intervals}" ]; do
interval_m="$(ls -1 "${intervals_dir}/" | tail -n+${m} | head -n 1)"
most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${interval_m}\..*/$" | head -n 1)"
_techo " Most recent ${interval_m}: '${most_recent}'"
if [ -n "${most_recent}" ] ; then
set -- "$@" -$NEWER "${ddir}/${most_recent}"
fi
m=$((${m}+1))
done
count=$(pcmd find "$@" -iname "${trial_interval}*" | wc -l)
_techo " Found $count more recent backups of ${trial_interval} (limit: ${c_interval})"
if [ "$count" -lt "${c_interval}" ] ; then
break
fi
n=$((${n}+1))
done
fi
export INTERVAL="${trial_interval}"
D_FILE_INTERVAL="${intervals_dir}/${INTERVAL}"
D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
}
#
# need at least interval and one source or --all
#
if [ $# -lt 2 ]; then
if [ "$1" = "-V" -o "$1" = "--version" ]; then
display_version
else
usage
fi
fi
#
# check for configuraton directory
#
[ -d "${CCOLLECT_CONF}" ] || _exit_err "No configuration found in " \
"\"${CCOLLECT_CONF}\" (is \$CCOLLECT_CONF properly set?)"
#
# Filter arguments
#
export INTERVAL="$1"; shift
i=1
no_sources=0
#
# Create source "array"
#
while [ "$#" -ge 1 ]; do
eval arg=\"\$1\"; shift
if [ "${NO_MORE_ARGS}" = 1 ]; then
eval source_${no_sources}=\"${arg}\"
no_sources=$((${no_sources}+1))
# make variable available for subscripts
eval export source_${no_sources}
else
case "${arg}" in
-a|--all)
ALL=1
;;
-v|--verbose)
VERBOSE=1
;;
-p|--parallel)
PARALLEL=1
;;
-h|--help)
usage
;;
--)
NO_MORE_ARGS=1
;;
*)
eval source_${no_sources}=\"$arg\"
no_sources=$(($no_sources+1))
;;
esac
fi
i=$(($i+1))
done
# also export number of sources
export no_sources
#
# be really, really, really verbose
#
if [ "${VERBOSE}" = 1 ]; then
set -x
fi
#
# Look, if we should take ALL sources
#
if [ "${ALL}" = 1 ]; then
# reset everything specified before
no_sources=0
#
# get entries from sources
#
cwd=$(pwd -P)
( cd "${CSOURCES}" && ls > "${TMP}" ); ret=$?
[ "${ret}" -eq 0 ] || _exit_err "Listing of sources failed. Aborting."
while read tmp; do
eval source_${no_sources}=\"${tmp}\"
no_sources=$((${no_sources}+1))
done < "${TMP}"
fi
#
# Need at least ONE source to backup
#
if [ "${no_sources}" -lt 1 ]; then
usage
else
_techo "${HALF_VERSION}: Beginning backup using interval ${INTERVAL}"
fi
#
# Look for pre-exec command (general)
#
if [ -x "${CPREEXEC}" ]; then
_techo "Executing ${CPREEXEC} ..."
"${CPREEXEC}"; ret=$?
_techo "Finished ${CPREEXEC} (return code: ${ret})."
[ "${ret}" -eq 0 ] || _exit_err "${CPREEXEC} failed. Aborting"
fi
#
# check default configuration
#
D_FILE_INTERVAL="${CDEFAULTS}/intervals/${INTERVAL}"
D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
#
# Let's do the backup
#
i=0
while [ "${i}" -lt "${no_sources}" ]; do
#
# Get current source
#
eval name=\"\$source_${i}\"
i=$((${i}+1))
export name
#
# start ourself, if we want parallel execution
#
if [ "${PARALLEL}" ]; then
"$0" "${INTERVAL}" "${name}" &
continue
fi
#
# Start subshell for easy log editing
#
(
#
# Stderr to stdout, so we can produce nice logs
#
exec 2>&1
#
# Configuration
#
backup="${CSOURCES}/${name}"
c_source="${backup}/source"
c_dest="${backup}/destination"
c_pre_exec="${backup}/pre_exec"
c_post_exec="${backup}/post_exec"
for opt in exclude verbose very_verbose rsync_options summary delete_incomplete remote_host ; do
if [ -f "${backup}/$opt" -o -f "${backup}/no_$opt" ]; then
eval c_$opt=\"${backup}/$opt\"
else
eval c_$opt=\"${CDEFAULTS}/$opt\"
fi
done
#
# Marking backups: If we abort it's not removed => Backup is broken
#
c_marker=".ccollect-marker"
#
# Times
#
begin_s=$(date +%s)
#
# unset possible options
#
VERBOSE=""
VVERBOSE=""
_techo "Beginning to backup"
#
# Standard configuration checks
#
if [ ! -e "${backup}" ]; then
_exit_err "Source does not exist."
fi
#
# configuration _must_ be a directory
#
if [ ! -d "${backup}" ]; then
_exit_err "\"${name}\" is not a cconfig-directory. Skipping."
fi
#
# first execute pre_exec, which may generate destination or other
# parameters
#
if [ -x "${c_pre_exec}" ]; then
_techo "Executing ${c_pre_exec} ..."
"${c_pre_exec}"; ret="$?"
_techo "Finished ${c_pre_exec} (return code ${ret})."
if [ "${ret}" -ne 0 ]; then
_exit_err "${c_pre_exec} failed. Skipping."
fi
fi
#
# Destination is a path
#
if [ ! -f "${c_dest}" ]; then
_exit_err "Destination ${c_dest} is not a file. Skipping."
else
ddir=$(cat "${c_dest}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Destination ${c_dest} is not readable. Skipping."
fi
fi
#
# interval definition: First try source specific, fallback to default
#
if [ "${INTERVAL}" = "AUTO" ] ; then
auto_interval
_techo "Selected interval: '$INTERVAL'"
fi
c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
if [ -z "${c_interval}" ]; then
c_interval="${D_INTERVAL}"
if [ -z "${c_interval}" ]; then
_exit_err "No definition for interval \"${INTERVAL}\" found. Skipping."
fi
fi
#
# Source checks
#
if [ ! -f "${c_source}" ]; then
_exit_err "Source description \"${c_source}\" is not a file. Skipping."
else
source=$(cat "${c_source}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Source ${c_source} is not readable. Skipping."
fi
fi
# Verify source is up and accepting connections before deleting any old backups
rsync "$source" >/dev/null || _exit_err "Source ${source} is not readable. Skipping."
#
# do we backup to a remote host? then set pre-cmd
#
if [ -f "${c_remote_host}" ]; then
# adjust ls and co
remote_host=$(cat "${c_remote_host}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Remote host file ${c_remote_host} exists, but is not readable. Skipping."
fi
destination="${remote_host}:${ddir}"
else
remote_host=""
destination="${ddir}"
fi
export remote_host
#
# check for existence / use real name
#
( pcmd cd "$ddir" ) || _exit_err "Cannot change to ${ddir}. Skipping."
# NEW method as of 0.6:
# - insert ccollect default parameters
# - insert options
# - insert user options
#
# rsync standard options
#
set -- "$@" "--archive" "--delete" "--numeric-ids" "--relative" \
"--delete-excluded" "--sparse"
#
# exclude list
#
if [ -f "${c_exclude}" ]; then
set -- "$@" "--exclude-from=${c_exclude}"
fi
#
# Output a summary
#
if [ -f "${c_summary}" ]; then
set -- "$@" "--stats"
fi
#
# Verbosity for rsync
#
if [ -f "${c_very_verbose}" ]; then
set -- "$@" "-vv"
elif [ -f "${c_verbose}" ]; then
set -- "$@" "-v"
fi
#
# extra options for rsync provided by the user
#
if [ -f "${c_rsync_options}" ]; then
while read line; do
set -- "$@" "$line"
done < "${c_rsync_options}"
fi
#
# Check for incomplete backups
#
pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" 2>/dev/null | while read marker; do
incomplete="$(echo ${marker} | sed "s/\\.${c_marker}\$//")"
_techo "Incomplete backup: ${incomplete}"
if [ -f "${c_delete_incomplete}" ]; then
_techo "Deleting ${incomplete} ..."
pcmd rm $VVERBOSE -rf "${incomplete}" || \
_exit_err "Removing ${incomplete} failed."
pcmd rm $VVERBOSE -f "${marker}" || \
_exit_err "Removing ${marker} failed."
fi
done
#
# check if maximum number of backups is reached, if so remove
# use grep and ls -p so we only look at directories
#
count="$(pcmd ls -p1 "${ddir}" | grep "^${INTERVAL}\..*/\$" | wc -l \
| sed 's/^ *//g')" || _exit_err "Counting backups failed"
_techo "Existing backups: ${count} Total keeping backups: ${c_interval}"
if [ "${count}" -ge "${c_interval}" ]; then
substract=$((${c_interval} - 1))
remove=$((${count} - ${substract}))
_techo "Removing ${remove} backup(s)..."
pcmd ls -${TSORT}p1r "$ddir" | grep "^${INTERVAL}\..*/\$" | \
head -n "${remove}" > "${TMP}" || \
_exit_err "Listing old backups failed"
i=0
while read to_remove; do
eval remove_$i=\"${to_remove}\"
i=$(($i+1))
done < "${TMP}"
j=0
while [ "$j" -lt "$i" ]; do
eval to_remove=\"\$remove_$j\"
_techo "Removing ${to_remove} ..."
pcmd rm ${VVERBOSE} -rf "${ddir}/${to_remove}" || \
_exit_err "Removing ${to_remove} failed."
j=$(($j+1))
done
fi
#
# Check for backup directory to clone from: Always clone from the latest one!
#
# Depending on your file system, you may want to sort on:
# 1. mtime (modification time) with TSORT=t, or
# 2. ctime (last change time, usually) with TSORT=tc
last_dir="$(pcmd ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}."
#
# clone from old backup, if existing
#
if [ "${last_dir}" ]; then
set -- "$@" "--link-dest=${ddir}/${last_dir}"
_techo "Hard linking from ${last_dir}"
fi
# set time when we really begin to backup, not when we began to remove above
destination_date=$(${CDATE})
destination_dir="${ddir}/${INTERVAL}.${destination_date}.$$"
destination_full="${destination}/${INTERVAL}.${destination_date}.$$"
# give some info
_techo "Beginning to backup, this may take some time..."
_techo "Creating ${destination_dir} ..."
pcmd mkdir ${VVERBOSE} "${destination_dir}" || \
_exit_err "Creating ${destination_dir} failed. Skipping."
#
# added marking in 0.6 (and remove it, if successful later)
#
pcmd touch "${destination_dir}.${c_marker}"
#
# the rsync part
#
_techo "Transferring files..."
rsync "$@" "${source}" "${destination_full}"; ret=$?
# Correct the modification time:
pcmd touch "${destination_dir}"
#
# remove marking here
#
if [ "$ret" -ne 12 ] ; then
pcmd rm "${destination_dir}.${c_marker}" || \
_exit_err "Removing ${destination_dir}/${c_marker} failed."
fi
_techo "Finished backup (rsync return code: $ret)."
if [ "${ret}" -ne 0 ]; then
_techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
fi
#
# post_exec
#
if [ -x "${c_post_exec}" ]; then
_techo "Executing ${c_post_exec} ..."
"${c_post_exec}"; ret=$?
_techo "Finished ${c_post_exec}."
if [ ${ret} -ne 0 ]; then
_exit_err "${c_post_exec} failed."
fi
fi
# Calculation
end_s=$(date +%s)
full_seconds=$((${end_s} - ${begin_s}))
hours=$((${full_seconds} / 3600))
seconds=$((${full_seconds} - (${hours} * 3600)))
minutes=$((${seconds} / 60))
seconds=$((${seconds} - (${minutes} * 60)))
_techo "Backup lasted: ${hours}:${minutes}:${seconds} (h:m:s)"
) | add_name
done
#
# Be a good parent and wait for our children, if they are running wild parallel
#
if [ "${PARALLEL}" ]; then
_techo "Waiting for children to complete..."
wait
fi
#
# Look for post-exec command (general)
#
if [ -x "${CPOSTEXEC}" ]; then
_techo "Executing ${CPOSTEXEC} ..."
"${CPOSTEXEC}"; ret=$?
_techo "Finished ${CPOSTEXEC} (return code: ${ret})."
if [ ${ret} -ne 0 ]; then
_techo "${CPOSTEXEC} failed."
fi
fi
rm -f "${TMP}"
_techo "Finished ${WE}"
# vim: set shiftwidth=3 tabstop=3 expandtab :

View file

@ -0,0 +1,74 @@
# I found that ccollect was not deleting incomplete backups despite the
# delete_incomplete option being specified. I traced the problem to:
#
# < pcmd rm $VVERBOSE -rf "${ddir}/${realincomplete}" || \
#
# which, at least on all the systems I tested, should read:
#
# > pcmd rm $VVERBOSE -rf "${realincomplete}" || \
#
# Also, the marker file is not deleted. I didn't see any reason to keep
# those files around (what do you think?), so I deleted them also:
#
# > pcmd rm $VVERBOSE -rf "${ddir}/${realincomplete}" || \
# > _exit_err "Removing ${realincomplete} failed."
#
# As long as I was messing with the delete incomplete code and therefore need
# to test it, I took the liberty of simplifying it. The v0.7.1 code uses
# multiple loops with multiple loop counters and creates many variables. I
# simplified that to a single loop:
#
# > pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" 2>/dev/null | while read marker; do
# > incomplete="$(echo ${marker} | sed "s/\\.${c_marker}\$//")"
# > _techo "Incomplete backup: ${incomplete}"
# > if [ "${DELETE_INCOMPLETE}" = "yes" ]; then
# > _techo "Deleting ${incomplete} ..."
# > pcmd rm $VVERBOSE -rf "${incomplete}" || \
# > _exit_err "Removing ${incomplete} failed."
# > pcmd rm $VVERBOSE -f "${marker}" || \
# > _exit_err "Removing ${marker} failed."
# > fi
# > done
#
# The final code (a) fixes the delete bug, (b) also deletes the marker, and
# (c) is eight lines shorter than the original.
#
--- ccollect-f.sh 2009-05-12 12:49:28.000000000 -0700
+++ ccollect-g.sh 2009-06-03 14:32:03.000000000 -0700
@@ -516,28 +516,20 @@
fi
#
# Check for incomplete backups
#
- pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" > "${TMP}" 2>/dev/null
-
- i=0
- while read incomplete; do
- eval incomplete_$i=\"$(echo ${incomplete} | sed "s/\\.${c_marker}\$//")\"
- i=$(($i+1))
- done < "${TMP}"
-
- j=0
- while [ "$j" -lt "$i" ]; do
- eval realincomplete=\"\$incomplete_$j\"
- _techo "Incomplete backup: ${realincomplete}"
+ pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" 2>/dev/null | while read marker; do
+ incomplete="$(echo ${marker} | sed "s/\\.${c_marker}\$//")"
+ _techo "Incomplete backup: ${incomplete}"
if [ "${DELETE_INCOMPLETE}" = "yes" ]; then
- _techo "Deleting ${realincomplete} ..."
- pcmd rm $VVERBOSE -rf "${ddir}/${realincomplete}" || \
- _exit_err "Removing ${realincomplete} failed."
+ _techo "Deleting ${incomplete} ..."
+ pcmd rm $VVERBOSE -rf "${incomplete}" || \
+ _exit_err "Removing ${incomplete} failed."
+ pcmd rm $VVERBOSE -f "${marker}" || \
+ _exit_err "Removing ${marker} failed."
fi
- j=$(($j+1))
done
#
# check if maximum number of backups is reached, if so remove
# use grep and ls -p so we only look at directories

View file

@ -0,0 +1,18 @@
# A line in my f.patch was missing needed quotation marks.
# This fixes that.
#
--- ccollect-g.sh 2009-06-03 14:32:03.000000000 -0700
+++ ccollect-h.sh 2009-06-03 14:32:19.000000000 -0700
@@ -412,11 +412,11 @@
fi
#
# interval definition: First try source specific, fallback to default
#
- if [ ${INTERVAL} = "AUTO" ] ; then
+ if [ "${INTERVAL}" = "AUTO" ] ; then
auto_interval
_techo "Selected interval: '$INTERVAL'"
fi
c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"

View file

@ -0,0 +1,134 @@
# I have many sources that use the same options so I put those
# options in the defaults directory. I found that ccollect was
# ignoring most of them. I thought that this was a bug so I wrote
# some code to correct this:
#
# > for opt in exclude verbose very_verbose rsync_options summary delete_incomplete remote_host ; do
# > if [ -f "${backup}/$opt" -o -f "${backup}/no_$opt" ]; then
# > eval c_$opt=\"${backup}/$opt\"
# > else
# > eval c_$opt=\"${CDEFAULTS}/$opt\"
# > fi
# > done
#
# This also adds a new feature: if some option, say verbose, is
# specified in the defaults directory, it can be turned off for
# particular sources by specifying no_verbose as a source option.
#
# A side effect of this approach is that it forces script variable
# names to be consistent with option file names. Thus, there are
# several changes such as:
#
# < if [ -f "${c_rsync_extra}" ]; then
# > if [ -f "${c_rsync_options}" ]; then
#
# and
#
# < if [ -f "${c_vverbose}" ]; then
# > if [ -f "${c_very_verbose}" ]; then
#
# After correcting the bug and adding the "no_" feature, the code is
# 12 lines shorter.
#
--- ccollect-h.sh 2009-06-01 15:59:11.000000000 -0700
+++ ccollect-i.sh 2009-06-03 14:27:58.000000000 -0700
@@ -336,20 +336,19 @@
# Configuration
#
backup="${CSOURCES}/${name}"
c_source="${backup}/source"
c_dest="${backup}/destination"
- c_exclude="${backup}/exclude"
- c_verbose="${backup}/verbose"
- c_vverbose="${backup}/very_verbose"
- c_rsync_extra="${backup}/rsync_options"
- c_summary="${backup}/summary"
c_pre_exec="${backup}/pre_exec"
c_post_exec="${backup}/post_exec"
- f_incomplete="delete_incomplete"
- c_incomplete="${backup}/${f_incomplete}"
- c_remote_host="${backup}/remote_host"
+ for opt in exclude verbose very_verbose rsync_options summary delete_incomplete remote_host ; do
+ if [ -f "${backup}/$opt" -o -f "${backup}/no_$opt" ]; then
+ eval c_$opt=\"${backup}/$opt\"
+ else
+ eval c_$opt=\"${CDEFAULTS}/$opt\"
+ fi
+ done
#
# Marking backups: If we abort it's not removed => Backup is broken
#
c_marker=".ccollect-marker"
@@ -360,16 +359,12 @@
begin_s=$(date +%s)
#
# unset possible options
#
- EXCLUDE=""
- RSYNC_EXTRA=""
- SUMMARY=""
VERBOSE=""
VVERBOSE=""
- DELETE_INCOMPLETE=""
_techo "Beginning to backup"
#
# Standard configuration checks
@@ -462,17 +457,10 @@
# check for existence / use real name
#
( pcmd cd "$ddir" ) || _exit_err "Cannot change to ${ddir}. Skipping."
- #
- # Check whether to delete incomplete backups
- #
- if [ -f "${c_incomplete}" -o -f "${CDEFAULTS}/${f_incomplete}" ]; then
- DELETE_INCOMPLETE="yes"
- fi
-
# NEW method as of 0.6:
# - insert ccollect default parameters
# - insert options
# - insert user options
@@ -498,32 +486,32 @@
fi
#
# Verbosity for rsync
#
- if [ -f "${c_vverbose}" ]; then
+ if [ -f "${c_very_verbose}" ]; then
set -- "$@" "-vv"
elif [ -f "${c_verbose}" ]; then
set -- "$@" "-v"
fi
#
# extra options for rsync provided by the user
#
- if [ -f "${c_rsync_extra}" ]; then
+ if [ -f "${c_rsync_options}" ]; then
while read line; do
set -- "$@" "$line"
- done < "${c_rsync_extra}"
+ done < "${c_rsync_options}"
fi
#
# Check for incomplete backups
#
pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" 2>/dev/null | while read marker; do
incomplete="$(echo ${marker} | sed "s/\\.${c_marker}\$//")"
_techo "Incomplete backup: ${incomplete}"
- if [ "${DELETE_INCOMPLETE}" = "yes" ]; then
+ if [ -f "${c_delete_incomplete}" ]; then
_techo "Deleting ${incomplete} ..."
pcmd rm $VVERBOSE -rf "${incomplete}" || \
_exit_err "Removing ${incomplete} failed."
pcmd rm $VVERBOSE -f "${marker}" || \
_exit_err "Removing ${marker} failed."

View file

@ -0,0 +1,296 @@
Dear Nico Schottelius,
I have started using ccollect and I very much like its design:
it is elegant and effective.
In the process of getting ccollect setup and running, I made
five changes, including one major new feature, that I hope you will
find useful.
First, I added the following before any old backup gets deleted:
> # Verify source is up and accepting connections before deleting any old backups
> rsync "$source" >/dev/null || _exit_err "Source ${source} is not readable. Skipping."
I think that this quick test is a much better than, say, pinging
the source in a pre-exec script: this tests not only that the
source is up and connected to the net, it also verifies (1) that
ssh is up and accepting our key (if we are using ssh), and (2) that
the source directory is mounted (if it needs to be mounted) and
readable.
Second, I found ccollect's use of ctime problematic. After
copying an old backup over to my ccollect destination, I adjusted
mtime and atime where needed using touch, e.g.:
touch -d"28 Apr 2009 3:00" destination/daily.01
However, as far as I know, there is no way to correct a bad ctime.
I ran into this issue repeatedly while adjusting my backup
configuration. (For example, "cp -a" preserves mtime but not
ctime. Even worse, "cp -al old new" also changes ctime on old.)
Another potential problem with ctime is that it is file-system
dependent: I have read that Windows sets ctime to create-time not
last change-time.
However, It is simple to give a new backup the correct mtime.
After the rsync step, I added the command:
553a616,617
> # Correct the modification time:
> pcmd touch "${destination_dir}"
Even if ccollect continues to use ctime for sorting, I see no
reason not to have the backup directory have the correct mtime.
To allow the rest of the code to use either ctime or mtime, I
added definitions:
44a45,47
> #TSORT="tc" ; NEWER="cnewer"
> TSORT="t" ; NEWER="newer"
(It would be better if this choice was user-configurable because
those with existing backup directories should continue to use ctime
until the mtimes of their directories are correct. The correction
would happen passively over time as new backups created using the
above touch command and the old ones are deleted.)
With these definitions, the proper link-dest directory can then be
found using this minor change (and comment update):
516,519c579,582
< # Use ls -1c instead of -1t, because last modification maybe the same on all
< # and metadate update (-c) is updated by rsync locally.
< #
< last_dir="$(pcmd ls -tcp1 "${ddir}" | grep '/$' | head -n 1)" || \
---
> # Depending on your file system, you may want to sort on:
> # 1. mtime (modification time) with TSORT=t, or
> # 2. ctime (last change time, usually) with TSORT=tc
> last_dir="$(pcmd ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
Thirdly, after I copied my old backups over to my ccollect
destination directory, I found that ccollect would delete a
recent backup not an old backup! My problem was that, unknown to
me, the algorithm to find the oldest backup (for deletion) was
inconsistent with that used to find the newest (for link-dest). I
suggest that these two should be consistent. Because time-sorting
seemed more consistent with the ccollect documentation, I suggest:
492,493c555,556
< pcmd ls -p1 "$ddir" | grep "^${INTERVAL}\..*/\$" | \
< sort -n | head -n "${remove}" > "${TMP}" || \
---
> pcmd ls -${TSORT}p1r "$ddir" | grep "^${INTERVAL}\..*/\$" | \
> head -n "${remove}" > "${TMP}" || \
Fourthly, in my experience, rsync error code 12 means complete
failure, usually because the source refuses the ssh connection.
So, I left the marker in that case:
558,559c622,625
< pcmd rm "${destination_dir}.${c_marker}" || \
< _exit_err "Removing ${destination_dir}/${c_marker} failed."
---
> if [ "$ret" -ne 12 ] ; then
> pcmd rm "${destination_dir}.${c_marker}" || \
> _exit_err "Removing ${destination_dir}/${c_marker} failed."
> fi
(A better solution might allow a user-configurable list of error
codes that are treated the same as a fail.)
Fifth, because I was frustrated by the problems of having a
cron-job decide which interval to backup, I added a major new
feature: the modified ccollect can now automatically select an
interval to use for backup.
Cron-job controlled backup works well if all machines are up and
running all the time and nothing ever goes wrong. I have, however,
some machines that are occasionally turned off, or that are mobile
and only sometimes connected to local net. For these machines, the
use of cron-jobs to select intervals can be a disaster.
There are several ways one could automatically choose an
appropriate interval. The method I show below has the advantage
that it works with existing ccollect configuration files. The only
requirement is that interval names be chosen to sort nicely (under
ls). For example, I currently use:
$ ls -1 intervals
a_daily
b_weekly
c_monthly
d_quarterly
e_yearly
$ cat intervals/*
6
3
2
3
30
A simpler example would be:
$ ls -1 intervals
int1
int2
int3
$ cat intervals/*
2
3
4
The algorithm works as follows:
If no backup exists for the least frequent interval (int3 in the
simpler example), then use that interval. Otherwise, use the
most frequent interval (int1) unless there are "$(cat
intervals/int1)" int1 backups more recent than any int2 or int3
backup, in which case select int2 unless there are "$(cat
intervals/int2)" int2 backups more recent than any int3 backups
in which case choose int3.
This algorithm works well cycling through all the backups for my
always connected machines as well as for my usually connected
machines, and rarely connected machines. (For a rarely connected
machine, interval names like "b_weekly" lose their English meaning
but it still does a reasonable job of rotating through the
intervals.)
In addition to being more robust, the automatic interval
selection means that crontab is greatly simplified: only one line
is needed. I use:
30 3 * * * ccollect.sh AUTO host1 host2 host3 | tee -a /var/log/ccollect-full.log | ccollect_analyse_logs.sh iwe
Some users might prefer a calendar-driven algorithm such as: do
a yearly backup the first time a machine is connected during a new
year; do a monthly backup the first that a machine is connected
during a month; etc. This, however, would require a change to the
ccollect configuration files. So, I didn't pursue the idea any
further.
The code checks to see if the user specified the interval as
AUTO. If so, the auto_interval function is called to select the
interval:
347a417,420
> if [ ${INTERVAL} = "AUTO" ] ; then
> auto_interval
> _techo "Selected interval: '$INTERVAL'"
> fi
The code for auto_interval is as follows (note that it allows 'more
recent' to be defined by either ctime or mtime as per the TSORT
variable):
125a129,182
> # Select interval if AUTO
> #
> # For this to work nicely, you have to choose interval names that sort nicely
> # such as int1, int2, int3 or a_daily, b_weekly, c_monthly, etc.
> #
> auto_interval()
> {
> if [ -d "${backup}/intervals" -a -n "$(ls "${backup}/intervals" 2>/dev/null)" ] ; then
> intervals_dir="${backup}/intervals"
> elif [ -d "${CDEFAULTS}/intervals" -a -n "$(ls "${CDEFAULTS}/intervals" 2>/dev/null)" ] ; then
> intervals_dir="${CDEFAULTS}/intervals"
> else
> _exit_err "No intervals are defined. Skipping."
> fi
> echo intervals_dir=${intervals_dir}
>
> trial_interval="$(ls -1r "${intervals_dir}/" | head -n 1)" || \
> _exit_err "Failed to list contents of ${intervals_dir}/."
> _techo "Considering interval ${trial_interval}"
> most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${trial_interval}.*/$" | head -n 1)" || \
> _exit_err "Failed to list contents of ${ddir}/."
> _techo " Most recent ${trial_interval}: '${most_recent}'"
> if [ -n "${most_recent}" ]; then
> no_intervals="$(ls -1 "${intervals_dir}/" | wc -l)"
> n=1
> while [ "${n}" -le "${no_intervals}" ]; do
> trial_interval="$(ls -p1 "${intervals_dir}/" | tail -n+${n} | head -n 1)"
> _techo "Considering interval '${trial_interval}'"
> c_interval="$(cat "${intervals_dir}/${trial_interval}" 2>/dev/null)"
> m=$((${n}+1))
> set -- "${ddir}" -maxdepth 1
> while [ "${m}" -le "${no_intervals}" ]; do
> interval_m="$(ls -1 "${intervals_dir}/" | tail -n+${m} | head -n 1)"
> most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${interval_m}\..*/$" | head -n 1)"
> _techo " Most recent ${interval_m}: '${most_recent}'"
> if [ -n "${most_recent}" ] ; then
> set -- "$@" -$NEWER "${ddir}/${most_recent}"
> fi
> m=$((${m}+1))
> done
> count=$(pcmd find "$@" -iname "${trial_interval}*" | wc -l)
> _techo " Found $count more recent backups of ${trial_interval} (limit: ${c_interval})"
> if [ "$count" -lt "${c_interval}" ] ; then
> break
> fi
> n=$((${n}+1))
> done
> fi
> export INTERVAL="${trial_interval}"
> D_FILE_INTERVAL="${intervals_dir}/${INTERVAL}"
> D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
> }
>
> #
While I consider the auto_interval code to be developmental, I have
been using it for my nightly backups and it works for me.
One last change: For auto_interval to work, it needs "ddir" to
be defined first. Consequently, I had to move the following code
so it gets run before auto_interval is called:
369,380c442,443
<
< #
< # Destination is a path
< #
< if [ ! -f "${c_dest}" ]; then
< _exit_err "Destination ${c_dest} is not a file. Skipping."
< else
< ddir=$(cat "${c_dest}"); ret="$?"
< if [ "${ret}" -ne 0 ]; then
< _exit_err "Destination ${c_dest} is not readable. Skipping."
< fi
< fi
345a403,414
> # Destination is a path
> #
> if [ ! -f "${c_dest}" ]; then
> _exit_err "Destination ${c_dest} is not a file. Skipping."
> else
> ddir=$(cat "${c_dest}"); ret="$?"
> if [ "${ret}" -ne 0 ]; then
> _exit_err "Destination ${c_dest} is not readable. Skipping."
> fi
> fi
>
> #
I have some other ideas but this is all I have implemented at
the moment. Files are attached.
Thanks again for developing ccollect and let me know what you
think.
Regards,
John
--
John L. Lawless, Ph.D.
Redwood Scientific, Inc.
1005 Terra Nova Blvd
Pacifica, CA 94044-4300
1-650-738-8083

View file

@ -0,0 +1,15 @@
--- ccollect-0.7.1.sh 2009-02-02 03:39:42.000000000 -0800
+++ ccollect-0.7.1-a.sh 2009-05-24 21:30:38.000000000 -0700
@@ -364,10 +364,12 @@
source=$(cat "${c_source}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Source ${c_source} is not readable. Skipping."
fi
fi
+ # Verify source is up and accepting connections before deleting any old backups
+ rsync "$source" >/dev/null || _exit_err "Source ${source} is not readable. Skipping."
#
# Destination is a path
#
if [ ! -f "${c_dest}" ]; then

View file

@ -0,0 +1,15 @@
--- ccollect-0.7.1-a.sh 2009-05-24 21:30:38.000000000 -0700
+++ ccollect-0.7.1-b.sh 2009-05-24 21:32:00.000000000 -0700
@@ -551,10 +551,12 @@
# the rsync part
#
_techo "Transferring files..."
rsync "$@" "${source}" "${destination_full}"; ret=$?
+ # Correct the modification time:
+ pcmd touch "${destination_dir}"
#
# remove marking here
#
pcmd rm "${destination_dir}.${c_marker}" || \

View file

@ -0,0 +1,35 @@
--- ccollect-0.7.1-b.sh 2009-05-24 21:32:00.000000000 -0700
+++ ccollect-0.7.1-c.sh 2009-05-24 21:39:43.000000000 -0700
@@ -40,10 +40,13 @@
VERSION=0.7.1
RELEASE="2009-02-02"
HALF_VERSION="ccollect ${VERSION}"
FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
+#TSORT="tc" ; NEWER="cnewer"
+TSORT="t" ; NEWER="newer"
+
#
# CDATE: how we use it for naming of the archives
# DDATE: how the user should see it in our output (DISPLAY)
#
CDATE="date +%Y%m%d-%H%M"
@@ -513,14 +516,14 @@
#
# Check for backup directory to clone from: Always clone from the latest one!
#
- # Use ls -1c instead of -1t, because last modification maybe the same on all
- # and metadate update (-c) is updated by rsync locally.
- #
- last_dir="$(pcmd ls -tcp1 "${ddir}" | grep '/$' | head -n 1)" || \
+ # Depending on your file system, you may want to sort on:
+ # 1. mtime (modification time) with TSORT=t, or
+ # 2. ctime (last change time, usually) with TSORT=tc
+ last_dir="$(pcmd ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}."
#
# clone from old backup, if existing
#

View file

@ -0,0 +1,615 @@
#!/bin/sh
#
# 2005-2009 Nico Schottelius (nico-ccollect at schottelius.org)
#
# This file is part of ccollect.
#
# ccollect is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ccollect is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ccollect. If not, see <http://www.gnu.org/licenses/>.
#
# Initially written for SyGroup (www.sygroup.ch)
# Date: Mon Nov 14 11:45:11 CET 2005
#
# Standard variables (stolen from cconf)
#
__pwd="$(pwd -P)"
__mydir="${0%/*}"; __abs_mydir="$(cd "$__mydir" && pwd -P)"
__myname=${0##*/}; __abs_myname="$__abs_mydir/$__myname"
#
# where to find our configuration and temporary file
#
CCOLLECT_CONF=${CCOLLECT_CONF:-/etc/ccollect}
CSOURCES=${CCOLLECT_CONF}/sources
CDEFAULTS=${CCOLLECT_CONF}/defaults
CPREEXEC="${CDEFAULTS}/pre_exec"
CPOSTEXEC="${CDEFAULTS}/post_exec"
TMP=$(mktemp "/tmp/${__myname}.XXXXXX")
VERSION=0.7.1
RELEASE="2009-02-02"
HALF_VERSION="ccollect ${VERSION}"
FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
#
# CDATE: how we use it for naming of the archives
# DDATE: how the user should see it in our output (DISPLAY)
#
CDATE="date +%Y%m%d-%H%M"
DDATE="date +%Y-%m-%d-%H:%M:%S"
#
# unset parallel execution
#
PARALLEL=""
#
# catch signals
#
trap "rm -f \"${TMP}\"" 1 2 15
#
# Functions
#
# time displaying echo
_techo()
{
echo "$(${DDATE}): $@"
}
# exit on error
_exit_err()
{
_techo "$@"
rm -f "${TMP}"
exit 1
}
add_name()
{
awk "{ print \"[${name}] \" \$0 }"
}
pcmd()
{
if [ "$remote_host" ]; then
ssh "$remote_host" "$@"
else
"$@"
fi
}
#
# Version
#
display_version()
{
echo "${FULL_VERSION}"
exit 0
}
#
# Tell how to use us
#
usage()
{
echo "${__myname}: <interval name> [args] <sources to backup>"
echo ""
echo " ccollect creates (pseudo) incremental backups"
echo ""
echo " -h, --help: Show this help screen"
echo " -p, --parallel: Parallelise backup processes"
echo " -a, --all: Backup all sources specified in ${CSOURCES}"
echo " -v, --verbose: Be very verbose (uses set -x)"
echo " -V, --version: Print version information"
echo ""
echo " This is version ${VERSION}, released on ${RELEASE}"
echo " (the first version was written on 2005-12-05 by Nico Schottelius)."
echo ""
echo " Retrieve latest ccollect at http://unix.schottelius.org/ccollect/"
exit 0
}
#
# need at least interval and one source or --all
#
if [ $# -lt 2 ]; then
if [ "$1" = "-V" -o "$1" = "--version" ]; then
display_version
else
usage
fi
fi
#
# check for configuraton directory
#
[ -d "${CCOLLECT_CONF}" ] || _exit_err "No configuration found in " \
"\"${CCOLLECT_CONF}\" (is \$CCOLLECT_CONF properly set?)"
#
# Filter arguments
#
export INTERVAL="$1"; shift
i=1
no_sources=0
#
# Create source "array"
#
while [ "$#" -ge 1 ]; do
eval arg=\"\$1\"; shift
if [ "${NO_MORE_ARGS}" = 1 ]; then
eval source_${no_sources}=\"${arg}\"
no_sources=$((${no_sources}+1))
# make variable available for subscripts
eval export source_${no_sources}
else
case "${arg}" in
-a|--all)
ALL=1
;;
-v|--verbose)
VERBOSE=1
;;
-p|--parallel)
PARALLEL=1
;;
-h|--help)
usage
;;
--)
NO_MORE_ARGS=1
;;
*)
eval source_${no_sources}=\"$arg\"
no_sources=$(($no_sources+1))
;;
esac
fi
i=$(($i+1))
done
# also export number of sources
export no_sources
#
# be really, really, really verbose
#
if [ "${VERBOSE}" = 1 ]; then
set -x
fi
#
# Look, if we should take ALL sources
#
if [ "${ALL}" = 1 ]; then
# reset everything specified before
no_sources=0
#
# get entries from sources
#
cwd=$(pwd -P)
( cd "${CSOURCES}" && ls > "${TMP}" ); ret=$?
[ "${ret}" -eq 0 ] || _exit_err "Listing of sources failed. Aborting."
while read tmp; do
eval source_${no_sources}=\"${tmp}\"
no_sources=$((${no_sources}+1))
done < "${TMP}"
fi
#
# Need at least ONE source to backup
#
if [ "${no_sources}" -lt 1 ]; then
usage
else
_techo "${HALF_VERSION}: Beginning backup using interval ${INTERVAL}"
fi
#
# Look for pre-exec command (general)
#
if [ -x "${CPREEXEC}" ]; then
_techo "Executing ${CPREEXEC} ..."
"${CPREEXEC}"; ret=$?
_techo "Finished ${CPREEXEC} (return code: ${ret})."
[ "${ret}" -eq 0 ] || _exit_err "${CPREEXEC} failed. Aborting"
fi
#
# check default configuration
#
D_FILE_INTERVAL="${CDEFAULTS}/intervals/${INTERVAL}"
D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
#
# Let's do the backup
#
i=0
while [ "${i}" -lt "${no_sources}" ]; do
#
# Get current source
#
eval name=\"\$source_${i}\"
i=$((${i}+1))
export name
#
# start ourself, if we want parallel execution
#
if [ "${PARALLEL}" ]; then
"$0" "${INTERVAL}" "${name}" &
continue
fi
#
# Start subshell for easy log editing
#
(
#
# Stderr to stdout, so we can produce nice logs
#
exec 2>&1
#
# Configuration
#
backup="${CSOURCES}/${name}"
c_source="${backup}/source"
c_dest="${backup}/destination"
c_exclude="${backup}/exclude"
c_verbose="${backup}/verbose"
c_vverbose="${backup}/very_verbose"
c_rsync_extra="${backup}/rsync_options"
c_summary="${backup}/summary"
c_pre_exec="${backup}/pre_exec"
c_post_exec="${backup}/post_exec"
f_incomplete="delete_incomplete"
c_incomplete="${backup}/${f_incomplete}"
c_remote_host="${backup}/remote_host"
#
# Marking backups: If we abort it's not removed => Backup is broken
#
c_marker=".ccollect-marker"
#
# Times
#
begin_s=$(date +%s)
#
# unset possible options
#
EXCLUDE=""
RSYNC_EXTRA=""
SUMMARY=""
VERBOSE=""
VVERBOSE=""
DELETE_INCOMPLETE=""
_techo "Beginning to backup"
#
# Standard configuration checks
#
if [ ! -e "${backup}" ]; then
_exit_err "Source does not exist."
fi
#
# configuration _must_ be a directory
#
if [ ! -d "${backup}" ]; then
_exit_err "\"${name}\" is not a cconfig-directory. Skipping."
fi
#
# first execute pre_exec, which may generate destination or other
# parameters
#
if [ -x "${c_pre_exec}" ]; then
_techo "Executing ${c_pre_exec} ..."
"${c_pre_exec}"; ret="$?"
_techo "Finished ${c_pre_exec} (return code ${ret})."
if [ "${ret}" -ne 0 ]; then
_exit_err "${c_pre_exec} failed. Skipping."
fi
fi
#
# interval definition: First try source specific, fallback to default
#
c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
if [ -z "${c_interval}" ]; then
c_interval="${D_INTERVAL}"
if [ -z "${c_interval}" ]; then
_exit_err "No definition for interval \"${INTERVAL}\" found. Skipping."
fi
fi
#
# Source checks
#
if [ ! -f "${c_source}" ]; then
_exit_err "Source description \"${c_source}\" is not a file. Skipping."
else
source=$(cat "${c_source}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Source ${c_source} is not readable. Skipping."
fi
fi
#
# Destination is a path
#
if [ ! -f "${c_dest}" ]; then
_exit_err "Destination ${c_dest} is not a file. Skipping."
else
ddir=$(cat "${c_dest}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Destination ${c_dest} is not readable. Skipping."
fi
fi
#
# do we backup to a remote host? then set pre-cmd
#
if [ -f "${c_remote_host}" ]; then
# adjust ls and co
remote_host=$(cat "${c_remote_host}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Remote host file ${c_remote_host} exists, but is not readable. Skipping."
fi
destination="${remote_host}:${ddir}"
else
remote_host=""
destination="${ddir}"
fi
export remote_host
#
# check for existence / use real name
#
( pcmd cd "$ddir" ) || _exit_err "Cannot change to ${ddir}. Skipping."
#
# Check whether to delete incomplete backups
#
if [ -f "${c_incomplete}" -o -f "${CDEFAULTS}/${f_incomplete}" ]; then
DELETE_INCOMPLETE="yes"
fi
# NEW method as of 0.6:
# - insert ccollect default parameters
# - insert options
# - insert user options
#
# rsync standard options
#
set -- "$@" "--archive" "--delete" "--numeric-ids" "--relative" \
"--delete-excluded" "--sparse"
#
# exclude list
#
if [ -f "${c_exclude}" ]; then
set -- "$@" "--exclude-from=${c_exclude}"
fi
#
# Output a summary
#
if [ -f "${c_summary}" ]; then
set -- "$@" "--stats"
fi
#
# Verbosity for rsync
#
if [ -f "${c_vverbose}" ]; then
set -- "$@" "-vv"
elif [ -f "${c_verbose}" ]; then
set -- "$@" "-v"
fi
#
# extra options for rsync provided by the user
#
if [ -f "${c_rsync_extra}" ]; then
while read line; do
set -- "$@" "$line"
done < "${c_rsync_extra}"
fi
#
# Check for incomplete backups
#
pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" > "${TMP}" 2>/dev/null
i=0
while read incomplete; do
eval incomplete_$i=\"$(echo ${incomplete} | sed "s/\\.${c_marker}\$//")\"
i=$(($i+1))
done < "${TMP}"
j=0
while [ "$j" -lt "$i" ]; do
eval realincomplete=\"\$incomplete_$j\"
_techo "Incomplete backup: ${realincomplete}"
if [ "${DELETE_INCOMPLETE}" = "yes" ]; then
_techo "Deleting ${realincomplete} ..."
pcmd rm $VVERBOSE -rf "${ddir}/${realincomplete}" || \
_exit_err "Removing ${realincomplete} failed."
fi
j=$(($j+1))
done
#
# check if maximum number of backups is reached, if so remove
# use grep and ls -p so we only look at directories
#
count="$(pcmd ls -p1 "${ddir}" | grep "^${INTERVAL}\..*/\$" | wc -l \
| sed 's/^ *//g')" || _exit_err "Counting backups failed"
_techo "Existing backups: ${count} Total keeping backups: ${c_interval}"
if [ "${count}" -ge "${c_interval}" ]; then
substract=$((${c_interval} - 1))
remove=$((${count} - ${substract}))
_techo "Removing ${remove} backup(s)..."
pcmd ls -p1 "$ddir" | grep "^${INTERVAL}\..*/\$" | \
sort -n | head -n "${remove}" > "${TMP}" || \
_exit_err "Listing old backups failed"
i=0
while read to_remove; do
eval remove_$i=\"${to_remove}\"
i=$(($i+1))
done < "${TMP}"
j=0
while [ "$j" -lt "$i" ]; do
eval to_remove=\"\$remove_$j\"
_techo "Removing ${to_remove} ..."
pcmd rm ${VVERBOSE} -rf "${ddir}/${to_remove}" || \
_exit_err "Removing ${to_remove} failed."
j=$(($j+1))
done
fi
#
# Check for backup directory to clone from: Always clone from the latest one!
#
# Use ls -1c instead of -1t, because last modification maybe the same on all
# and metadate update (-c) is updated by rsync locally.
#
last_dir="$(pcmd ls -tcp1 "${ddir}" | grep '/$' | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}."
#
# clone from old backup, if existing
#
if [ "${last_dir}" ]; then
set -- "$@" "--link-dest=${ddir}/${last_dir}"
_techo "Hard linking from ${last_dir}"
fi
# set time when we really begin to backup, not when we began to remove above
destination_date=$(${CDATE})
destination_dir="${ddir}/${INTERVAL}.${destination_date}.$$"
destination_full="${destination}/${INTERVAL}.${destination_date}.$$"
# give some info
_techo "Beginning to backup, this may take some time..."
_techo "Creating ${destination_dir} ..."
pcmd mkdir ${VVERBOSE} "${destination_dir}" || \
_exit_err "Creating ${destination_dir} failed. Skipping."
#
# added marking in 0.6 (and remove it, if successful later)
#
pcmd touch "${destination_dir}.${c_marker}"
#
# the rsync part
#
_techo "Transferring files..."
rsync "$@" "${source}" "${destination_full}"; ret=$?
#
# remove marking here
#
pcmd rm "${destination_dir}.${c_marker}" || \
_exit_err "Removing ${destination_dir}/${c_marker} failed."
_techo "Finished backup (rsync return code: $ret)."
if [ "${ret}" -ne 0 ]; then
_techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
fi
#
# post_exec
#
if [ -x "${c_post_exec}" ]; then
_techo "Executing ${c_post_exec} ..."
"${c_post_exec}"; ret=$?
_techo "Finished ${c_post_exec}."
if [ ${ret} -ne 0 ]; then
_exit_err "${c_post_exec} failed."
fi
fi
# Calculation
end_s=$(date +%s)
full_seconds=$((${end_s} - ${begin_s}))
hours=$((${full_seconds} / 3600))
seconds=$((${full_seconds} - (${hours} * 3600)))
minutes=$((${seconds} / 60))
seconds=$((${seconds} - (${minutes} * 60)))
_techo "Backup lasted: ${hours}:${minutes}:${seconds} (h:m:s)"
) | add_name
done
#
# Be a good parent and wait for our children, if they are running wild parallel
#
if [ "${PARALLEL}" ]; then
_techo "Waiting for children to complete..."
wait
fi
#
# Look for post-exec command (general)
#
if [ -x "${CPOSTEXEC}" ]; then
_techo "Executing ${CPOSTEXEC} ..."
"${CPOSTEXEC}"; ret=$?
_techo "Finished ${CPOSTEXEC} (return code: ${ret})."
if [ ${ret} -ne 0 ]; then
_techo "${CPOSTEXEC} failed."
fi
fi
rm -f "${TMP}"
_techo "Finished ${WE}"

View file

@ -0,0 +1,683 @@
#!/bin/sh
#
# 2005-2009 Nico Schottelius (nico-ccollect at schottelius.org)
#
# This file is part of ccollect.
#
# ccollect is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ccollect is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ccollect. If not, see <http://www.gnu.org/licenses/>.
#
# Initially written for SyGroup (www.sygroup.ch)
# Date: Mon Nov 14 11:45:11 CET 2005
#
# Standard variables (stolen from cconf)
#
__pwd="$(pwd -P)"
__mydir="${0%/*}"; __abs_mydir="$(cd "$__mydir" && pwd -P)"
__myname=${0##*/}; __abs_myname="$__abs_mydir/$__myname"
#
# where to find our configuration and temporary file
#
CCOLLECT_CONF=${CCOLLECT_CONF:-/etc/ccollect}
CSOURCES=${CCOLLECT_CONF}/sources
CDEFAULTS=${CCOLLECT_CONF}/defaults
CPREEXEC="${CDEFAULTS}/pre_exec"
CPOSTEXEC="${CDEFAULTS}/post_exec"
TMP=$(mktemp "/tmp/${__myname}.XXXXXX")
VERSION=0.7.1
RELEASE="2009-02-02"
HALF_VERSION="ccollect ${VERSION}"
FULL_VERSION="ccollect ${VERSION} (${RELEASE})"
#TSORT="tc" ; NEWER="cnewer"
TSORT="t" ; NEWER="newer"
#
# CDATE: how we use it for naming of the archives
# DDATE: how the user should see it in our output (DISPLAY)
#
CDATE="date +%Y%m%d-%H%M"
DDATE="date +%Y-%m-%d-%H:%M:%S"
#
# unset parallel execution
#
PARALLEL=""
#
# catch signals
#
trap "rm -f \"${TMP}\"" 1 2 15
#
# Functions
#
# time displaying echo
_techo()
{
echo "$(${DDATE}): $@"
}
# exit on error
_exit_err()
{
_techo "$@"
rm -f "${TMP}"
exit 1
}
add_name()
{
awk "{ print \"[${name}] \" \$0 }"
}
pcmd()
{
if [ "$remote_host" ]; then
ssh "$remote_host" "$@"
else
"$@"
fi
}
#
# Version
#
display_version()
{
echo "${FULL_VERSION}"
exit 0
}
#
# Tell how to use us
#
usage()
{
echo "${__myname}: <interval name> [args] <sources to backup>"
echo ""
echo " ccollect creates (pseudo) incremental backups"
echo ""
echo " -h, --help: Show this help screen"
echo " -p, --parallel: Parallelise backup processes"
echo " -a, --all: Backup all sources specified in ${CSOURCES}"
echo " -v, --verbose: Be very verbose (uses set -x)"
echo " -V, --version: Print version information"
echo ""
echo " This is version ${VERSION}, released on ${RELEASE}"
echo " (the first version was written on 2005-12-05 by Nico Schottelius)."
echo ""
echo " Retrieve latest ccollect at http://unix.schottelius.org/ccollect/"
exit 0
}
#
# Select interval if AUTO
#
# For this to work nicely, you have to choose interval names that sort nicely
# such as int1, int2, int3 or a_daily, b_weekly, c_monthly, etc.
#
auto_interval()
{
if [ -d "${backup}/intervals" -a -n "$(ls "${backup}/intervals" 2>/dev/null)" ] ; then
intervals_dir="${backup}/intervals"
elif [ -d "${CDEFAULTS}/intervals" -a -n "$(ls "${CDEFAULTS}/intervals" 2>/dev/null)" ] ; then
intervals_dir="${CDEFAULTS}/intervals"
else
_exit_err "No intervals are defined. Skipping."
fi
echo intervals_dir=${intervals_dir}
trial_interval="$(ls -1r "${intervals_dir}/" | head -n 1)" || \
_exit_err "Failed to list contents of ${intervals_dir}/."
_techo "Considering interval ${trial_interval}"
most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${trial_interval}.*/$" | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}/."
_techo " Most recent ${trial_interval}: '${most_recent}'"
if [ -n "${most_recent}" ]; then
no_intervals="$(ls -1 "${intervals_dir}/" | wc -l)"
n=1
while [ "${n}" -le "${no_intervals}" ]; do
trial_interval="$(ls -p1 "${intervals_dir}/" | tail -n+${n} | head -n 1)"
_techo "Considering interval '${trial_interval}'"
c_interval="$(cat "${intervals_dir}/${trial_interval}" 2>/dev/null)"
m=$((${n}+1))
set -- "${ddir}" -maxdepth 1
while [ "${m}" -le "${no_intervals}" ]; do
interval_m="$(ls -1 "${intervals_dir}/" | tail -n+${m} | head -n 1)"
most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${interval_m}\..*/$" | head -n 1)"
_techo " Most recent ${interval_m}: '${most_recent}'"
if [ -n "${most_recent}" ] ; then
set -- "$@" -$NEWER "${ddir}/${most_recent}"
fi
m=$((${m}+1))
done
count=$(pcmd find "$@" -iname "${trial_interval}*" | wc -l)
_techo " Found $count more recent backups of ${trial_interval} (limit: ${c_interval})"
if [ "$count" -lt "${c_interval}" ] ; then
break
fi
n=$((${n}+1))
done
fi
export INTERVAL="${trial_interval}"
D_FILE_INTERVAL="${intervals_dir}/${INTERVAL}"
D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
}
#
# need at least interval and one source or --all
#
if [ $# -lt 2 ]; then
if [ "$1" = "-V" -o "$1" = "--version" ]; then
display_version
else
usage
fi
fi
#
# check for configuraton directory
#
[ -d "${CCOLLECT_CONF}" ] || _exit_err "No configuration found in " \
"\"${CCOLLECT_CONF}\" (is \$CCOLLECT_CONF properly set?)"
#
# Filter arguments
#
export INTERVAL="$1"; shift
i=1
no_sources=0
#
# Create source "array"
#
while [ "$#" -ge 1 ]; do
eval arg=\"\$1\"; shift
if [ "${NO_MORE_ARGS}" = 1 ]; then
eval source_${no_sources}=\"${arg}\"
no_sources=$((${no_sources}+1))
# make variable available for subscripts
eval export source_${no_sources}
else
case "${arg}" in
-a|--all)
ALL=1
;;
-v|--verbose)
VERBOSE=1
;;
-p|--parallel)
PARALLEL=1
;;
-h|--help)
usage
;;
--)
NO_MORE_ARGS=1
;;
*)
eval source_${no_sources}=\"$arg\"
no_sources=$(($no_sources+1))
;;
esac
fi
i=$(($i+1))
done
# also export number of sources
export no_sources
#
# be really, really, really verbose
#
if [ "${VERBOSE}" = 1 ]; then
set -x
fi
#
# Look, if we should take ALL sources
#
if [ "${ALL}" = 1 ]; then
# reset everything specified before
no_sources=0
#
# get entries from sources
#
cwd=$(pwd -P)
( cd "${CSOURCES}" && ls > "${TMP}" ); ret=$?
[ "${ret}" -eq 0 ] || _exit_err "Listing of sources failed. Aborting."
while read tmp; do
eval source_${no_sources}=\"${tmp}\"
no_sources=$((${no_sources}+1))
done < "${TMP}"
fi
#
# Need at least ONE source to backup
#
if [ "${no_sources}" -lt 1 ]; then
usage
else
_techo "${HALF_VERSION}: Beginning backup using interval ${INTERVAL}"
fi
#
# Look for pre-exec command (general)
#
if [ -x "${CPREEXEC}" ]; then
_techo "Executing ${CPREEXEC} ..."
"${CPREEXEC}"; ret=$?
_techo "Finished ${CPREEXEC} (return code: ${ret})."
[ "${ret}" -eq 0 ] || _exit_err "${CPREEXEC} failed. Aborting"
fi
#
# check default configuration
#
D_FILE_INTERVAL="${CDEFAULTS}/intervals/${INTERVAL}"
D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
#
# Let's do the backup
#
i=0
while [ "${i}" -lt "${no_sources}" ]; do
#
# Get current source
#
eval name=\"\$source_${i}\"
i=$((${i}+1))
export name
#
# start ourself, if we want parallel execution
#
if [ "${PARALLEL}" ]; then
"$0" "${INTERVAL}" "${name}" &
continue
fi
#
# Start subshell for easy log editing
#
(
#
# Stderr to stdout, so we can produce nice logs
#
exec 2>&1
#
# Configuration
#
backup="${CSOURCES}/${name}"
c_source="${backup}/source"
c_dest="${backup}/destination"
c_exclude="${backup}/exclude"
c_verbose="${backup}/verbose"
c_vverbose="${backup}/very_verbose"
c_rsync_extra="${backup}/rsync_options"
c_summary="${backup}/summary"
c_pre_exec="${backup}/pre_exec"
c_post_exec="${backup}/post_exec"
f_incomplete="delete_incomplete"
c_incomplete="${backup}/${f_incomplete}"
c_remote_host="${backup}/remote_host"
#
# Marking backups: If we abort it's not removed => Backup is broken
#
c_marker=".ccollect-marker"
#
# Times
#
begin_s=$(date +%s)
#
# unset possible options
#
EXCLUDE=""
RSYNC_EXTRA=""
SUMMARY=""
VERBOSE=""
VVERBOSE=""
DELETE_INCOMPLETE=""
_techo "Beginning to backup"
#
# Standard configuration checks
#
if [ ! -e "${backup}" ]; then
_exit_err "Source does not exist."
fi
#
# configuration _must_ be a directory
#
if [ ! -d "${backup}" ]; then
_exit_err "\"${name}\" is not a cconfig-directory. Skipping."
fi
#
# first execute pre_exec, which may generate destination or other
# parameters
#
if [ -x "${c_pre_exec}" ]; then
_techo "Executing ${c_pre_exec} ..."
"${c_pre_exec}"; ret="$?"
_techo "Finished ${c_pre_exec} (return code ${ret})."
if [ "${ret}" -ne 0 ]; then
_exit_err "${c_pre_exec} failed. Skipping."
fi
fi
#
# Destination is a path
#
if [ ! -f "${c_dest}" ]; then
_exit_err "Destination ${c_dest} is not a file. Skipping."
else
ddir=$(cat "${c_dest}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Destination ${c_dest} is not readable. Skipping."
fi
fi
#
# interval definition: First try source specific, fallback to default
#
if [ ${INTERVAL} = "AUTO" ] ; then
auto_interval
_techo "Selected interval: '$INTERVAL'"
fi
c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
if [ -z "${c_interval}" ]; then
c_interval="${D_INTERVAL}"
if [ -z "${c_interval}" ]; then
_exit_err "No definition for interval \"${INTERVAL}\" found. Skipping."
fi
fi
#
# Source checks
#
if [ ! -f "${c_source}" ]; then
_exit_err "Source description \"${c_source}\" is not a file. Skipping."
else
source=$(cat "${c_source}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Source ${c_source} is not readable. Skipping."
fi
fi
# Verify source is up and accepting connections before deleting any old backups
rsync "$source" >/dev/null || _exit_err "Source ${source} is not readable. Skipping."
#
# do we backup to a remote host? then set pre-cmd
#
if [ -f "${c_remote_host}" ]; then
# adjust ls and co
remote_host=$(cat "${c_remote_host}"); ret="$?"
if [ "${ret}" -ne 0 ]; then
_exit_err "Remote host file ${c_remote_host} exists, but is not readable. Skipping."
fi
destination="${remote_host}:${ddir}"
else
remote_host=""
destination="${ddir}"
fi
export remote_host
#
# check for existence / use real name
#
( pcmd cd "$ddir" ) || _exit_err "Cannot change to ${ddir}. Skipping."
#
# Check whether to delete incomplete backups
#
if [ -f "${c_incomplete}" -o -f "${CDEFAULTS}/${f_incomplete}" ]; then
DELETE_INCOMPLETE="yes"
fi
# NEW method as of 0.6:
# - insert ccollect default parameters
# - insert options
# - insert user options
#
# rsync standard options
#
set -- "$@" "--archive" "--delete" "--numeric-ids" "--relative" \
"--delete-excluded" "--sparse"
#
# exclude list
#
if [ -f "${c_exclude}" ]; then
set -- "$@" "--exclude-from=${c_exclude}"
fi
#
# Output a summary
#
if [ -f "${c_summary}" ]; then
set -- "$@" "--stats"
fi
#
# Verbosity for rsync
#
if [ -f "${c_vverbose}" ]; then
set -- "$@" "-vv"
elif [ -f "${c_verbose}" ]; then
set -- "$@" "-v"
fi
#
# extra options for rsync provided by the user
#
if [ -f "${c_rsync_extra}" ]; then
while read line; do
set -- "$@" "$line"
done < "${c_rsync_extra}"
fi
#
# Check for incomplete backups
#
pcmd ls -1 "$ddir/${INTERVAL}"*".${c_marker}" > "${TMP}" 2>/dev/null
i=0
while read incomplete; do
eval incomplete_$i=\"$(echo ${incomplete} | sed "s/\\.${c_marker}\$//")\"
i=$(($i+1))
done < "${TMP}"
j=0
while [ "$j" -lt "$i" ]; do
eval realincomplete=\"\$incomplete_$j\"
_techo "Incomplete backup: ${realincomplete}"
if [ "${DELETE_INCOMPLETE}" = "yes" ]; then
_techo "Deleting ${realincomplete} ..."
pcmd rm $VVERBOSE -rf "${ddir}/${realincomplete}" || \
_exit_err "Removing ${realincomplete} failed."
fi
j=$(($j+1))
done
#
# check if maximum number of backups is reached, if so remove
# use grep and ls -p so we only look at directories
#
count="$(pcmd ls -p1 "${ddir}" | grep "^${INTERVAL}\..*/\$" | wc -l \
| sed 's/^ *//g')" || _exit_err "Counting backups failed"
_techo "Existing backups: ${count} Total keeping backups: ${c_interval}"
if [ "${count}" -ge "${c_interval}" ]; then
substract=$((${c_interval} - 1))
remove=$((${count} - ${substract}))
_techo "Removing ${remove} backup(s)..."
pcmd ls -${TSORT}p1r "$ddir" | grep "^${INTERVAL}\..*/\$" | \
head -n "${remove}" > "${TMP}" || \
_exit_err "Listing old backups failed"
i=0
while read to_remove; do
eval remove_$i=\"${to_remove}\"
i=$(($i+1))
done < "${TMP}"
j=0
while [ "$j" -lt "$i" ]; do
eval to_remove=\"\$remove_$j\"
_techo "Removing ${to_remove} ..."
pcmd rm ${VVERBOSE} -rf "${ddir}/${to_remove}" || \
_exit_err "Removing ${to_remove} failed."
j=$(($j+1))
done
fi
#
# Check for backup directory to clone from: Always clone from the latest one!
#
# Depending on your file system, you may want to sort on:
# 1. mtime (modification time) with TSORT=t, or
# 2. ctime (last change time, usually) with TSORT=tc
last_dir="$(pcmd ls -${TSORT}p1 "${ddir}" | grep '/$' | head -n 1)" || \
_exit_err "Failed to list contents of ${ddir}."
#
# clone from old backup, if existing
#
if [ "${last_dir}" ]; then
set -- "$@" "--link-dest=${ddir}/${last_dir}"
_techo "Hard linking from ${last_dir}"
fi
# set time when we really begin to backup, not when we began to remove above
destination_date=$(${CDATE})
destination_dir="${ddir}/${INTERVAL}.${destination_date}.$$"
destination_full="${destination}/${INTERVAL}.${destination_date}.$$"
# give some info
_techo "Beginning to backup, this may take some time..."
_techo "Creating ${destination_dir} ..."
pcmd mkdir ${VVERBOSE} "${destination_dir}" || \
_exit_err "Creating ${destination_dir} failed. Skipping."
#
# added marking in 0.6 (and remove it, if successful later)
#
pcmd touch "${destination_dir}.${c_marker}"
#
# the rsync part
#
_techo "Transferring files..."
rsync "$@" "${source}" "${destination_full}"; ret=$?
# Correct the modification time:
pcmd touch "${destination_dir}"
#
# remove marking here
#
if [ "$ret" -ne 12 ] ; then
pcmd rm "${destination_dir}.${c_marker}" || \
_exit_err "Removing ${destination_dir}/${c_marker} failed."
fi
_techo "Finished backup (rsync return code: $ret)."
if [ "${ret}" -ne 0 ]; then
_techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
fi
#
# post_exec
#
if [ -x "${c_post_exec}" ]; then
_techo "Executing ${c_post_exec} ..."
"${c_post_exec}"; ret=$?
_techo "Finished ${c_post_exec}."
if [ ${ret} -ne 0 ]; then
_exit_err "${c_post_exec} failed."
fi
fi
# Calculation
end_s=$(date +%s)
full_seconds=$((${end_s} - ${begin_s}))
hours=$((${full_seconds} / 3600))
seconds=$((${full_seconds} - (${hours} * 3600)))
minutes=$((${seconds} / 60))
seconds=$((${seconds} - (${minutes} * 60)))
_techo "Backup lasted: ${hours}:${minutes}:${seconds} (h:m:s)"
) | add_name
done
#
# Be a good parent and wait for our children, if they are running wild parallel
#
if [ "${PARALLEL}" ]; then
_techo "Waiting for children to complete..."
wait
fi
#
# Look for post-exec command (general)
#
if [ -x "${CPOSTEXEC}" ]; then
_techo "Executing ${CPOSTEXEC} ..."
"${CPOSTEXEC}"; ret=$?
_techo "Finished ${CPOSTEXEC} (return code: ${ret})."
if [ ${ret} -ne 0 ]; then
_techo "${CPOSTEXEC} failed."
fi
fi
rm -f "${TMP}"
_techo "Finished ${WE}"
# vim: set shiftwidth=3 tabstop=3 expandtab :

View file

@ -0,0 +1,17 @@
--- ccollect-0.7.1-c.sh 2009-05-24 21:39:43.000000000 -0700
+++ ccollect-0.7.1-d.sh 2009-05-24 21:47:09.000000000 -0700
@@ -492,12 +492,12 @@
if [ "${count}" -ge "${c_interval}" ]; then
substract=$((${c_interval} - 1))
remove=$((${count} - ${substract}))
_techo "Removing ${remove} backup(s)..."
- pcmd ls -p1 "$ddir" | grep "^${INTERVAL}\..*/\$" | \
- sort -n | head -n "${remove}" > "${TMP}" || \
+ pcmd ls -${TSORT}p1r "$ddir" | grep "^${INTERVAL}\..*/\$" | \
+ head -n "${remove}" > "${TMP}" || \
_exit_err "Listing old backups failed"
i=0
while read to_remove; do
eval remove_$i=\"${to_remove}\"

View file

@ -0,0 +1,19 @@
--- ccollect-0.7.1-d.sh 2009-05-24 21:47:09.000000000 -0700
+++ ccollect-0.7.1-e.sh 2009-05-24 22:18:16.000000000 -0700
@@ -560,12 +560,14 @@
pcmd touch "${destination_dir}"
#
# remove marking here
#
- pcmd rm "${destination_dir}.${c_marker}" || \
- _exit_err "Removing ${destination_dir}/${c_marker} failed."
+ if [ "$ret" -ne 12 ] ; then
+ pcmd rm "${destination_dir}.${c_marker}" || \
+ _exit_err "Removing ${destination_dir}/${c_marker} failed."
+ fi
_techo "Finished backup (rsync return code: $ret)."
if [ "${ret}" -ne 0 ]; then
_techo "Warning: rsync exited non-zero, the backup may be broken (see rsync errors)."
fi

View file

@ -0,0 +1,119 @@
--- ccollect-0.7.1-e.sh 2009-05-24 22:18:16.000000000 -0700
+++ ccollect-0.7.1-f.sh 2009-05-24 22:19:50.000000000 -0700
@@ -124,10 +124,64 @@
echo " Retrieve latest ccollect at http://unix.schottelius.org/ccollect/"
exit 0
}
#
+# Select interval if AUTO
+#
+# For this to work nicely, you have to choose interval names that sort nicely
+# such as int1, int2, int3 or a_daily, b_weekly, c_monthly, etc.
+#
+auto_interval()
+{
+ if [ -d "${backup}/intervals" -a -n "$(ls "${backup}/intervals" 2>/dev/null)" ] ; then
+ intervals_dir="${backup}/intervals"
+ elif [ -d "${CDEFAULTS}/intervals" -a -n "$(ls "${CDEFAULTS}/intervals" 2>/dev/null)" ] ; then
+ intervals_dir="${CDEFAULTS}/intervals"
+ else
+ _exit_err "No intervals are defined. Skipping."
+ fi
+ echo intervals_dir=${intervals_dir}
+
+ trial_interval="$(ls -1r "${intervals_dir}/" | head -n 1)" || \
+ _exit_err "Failed to list contents of ${intervals_dir}/."
+ _techo "Considering interval ${trial_interval}"
+ most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${trial_interval}.*/$" | head -n 1)" || \
+ _exit_err "Failed to list contents of ${ddir}/."
+ _techo " Most recent ${trial_interval}: '${most_recent}'"
+ if [ -n "${most_recent}" ]; then
+ no_intervals="$(ls -1 "${intervals_dir}/" | wc -l)"
+ n=1
+ while [ "${n}" -le "${no_intervals}" ]; do
+ trial_interval="$(ls -p1 "${intervals_dir}/" | tail -n+${n} | head -n 1)"
+ _techo "Considering interval '${trial_interval}'"
+ c_interval="$(cat "${intervals_dir}/${trial_interval}" 2>/dev/null)"
+ m=$((${n}+1))
+ set -- "${ddir}" -maxdepth 1
+ while [ "${m}" -le "${no_intervals}" ]; do
+ interval_m="$(ls -1 "${intervals_dir}/" | tail -n+${m} | head -n 1)"
+ most_recent="$(pcmd ls -${TSORT}p1 "${ddir}" | grep "^${interval_m}\..*/$" | head -n 1)"
+ _techo " Most recent ${interval_m}: '${most_recent}'"
+ if [ -n "${most_recent}" ] ; then
+ set -- "$@" -$NEWER "${ddir}/${most_recent}"
+ fi
+ m=$((${m}+1))
+ done
+ count=$(pcmd find "$@" -iname "${trial_interval}*" | wc -l)
+ _techo " Found $count more recent backups of ${trial_interval} (limit: ${c_interval})"
+ if [ "$count" -lt "${c_interval}" ] ; then
+ break
+ fi
+ n=$((${n}+1))
+ done
+ fi
+ export INTERVAL="${trial_interval}"
+ D_FILE_INTERVAL="${intervals_dir}/${INTERVAL}"
+ D_INTERVAL=$(cat "${D_FILE_INTERVAL}" 2>/dev/null)
+}
+
+#
# need at least interval and one source or --all
#
if [ $# -lt 2 ]; then
if [ "$1" = "-V" -o "$1" = "--version" ]; then
display_version
@@ -344,12 +398,28 @@
_exit_err "${c_pre_exec} failed. Skipping."
fi
fi
#
+ # Destination is a path
+ #
+ if [ ! -f "${c_dest}" ]; then
+ _exit_err "Destination ${c_dest} is not a file. Skipping."
+ else
+ ddir=$(cat "${c_dest}"); ret="$?"
+ if [ "${ret}" -ne 0 ]; then
+ _exit_err "Destination ${c_dest} is not readable. Skipping."
+ fi
+ fi
+
+ #
# interval definition: First try source specific, fallback to default
#
+ if [ ${INTERVAL} = "AUTO" ] ; then
+ auto_interval
+ _techo "Selected interval: '$INTERVAL'"
+ fi
c_interval="$(cat "${backup}/intervals/${INTERVAL}" 2>/dev/null)"
if [ -z "${c_interval}" ]; then
c_interval="${D_INTERVAL}"
@@ -371,22 +441,10 @@
fi
# Verify source is up and accepting connections before deleting any old backups
rsync "$source" >/dev/null || _exit_err "Source ${source} is not readable. Skipping."
#
- # Destination is a path
- #
- if [ ! -f "${c_dest}" ]; then
- _exit_err "Destination ${c_dest} is not a file. Skipping."
- else
- ddir=$(cat "${c_dest}"); ret="$?"
- if [ "${ret}" -ne 0 ]; then
- _exit_err "Destination ${c_dest} is not readable. Skipping."
- fi
- fi
-
- #
# do we backup to a remote host? then set pre-cmd
#
if [ -f "${c_remote_host}" ]; then
# adjust ls and co
remote_host=$(cat "${c_remote_host}"); ret="$?"