Merge branch 'master' into beta

This commit is contained in:
Darko Poljak 2021-04-20 07:43:33 +02:00
commit deaff9c8e7
39 changed files with 391 additions and 259 deletions

View file

@ -72,9 +72,11 @@ def commandline():
if __name__ == "__main__": if __name__ == "__main__":
if sys.version < cdist.MIN_SUPPORTED_PYTHON_VERSION: if sys.version_info[:3] < cdist.MIN_SUPPORTED_PYTHON_VERSION:
print('Python >= {} is required on the source host.'.format( print(
cdist.MIN_SUPPORTED_PYTHON_VERSIO), file=sys.stderr) 'Python >= {} is required on the source host.'.format(
".".join(map(str, cdist.MIN_SUPPORTED_PYTHON_VERSION))),
file=sys.stderr)
sys.exit(1) sys.exit(1)
exit_code = 0 exit_code = 0

View file

@ -67,7 +67,7 @@ ORDER_DEP_STATE_NAME = 'order_dep_state'
TYPEORDER_DEP_NAME = 'typeorder_dep' TYPEORDER_DEP_NAME = 'typeorder_dep'
MIN_SUPPORTED_PYTHON_VERSION = '3.5' MIN_SUPPORTED_PYTHON_VERSION = (3, 5)
class Error(Exception): class Error(Exception):

View file

@ -556,10 +556,10 @@ def parse_and_configure(argv, singleton=True):
log = logging.getLogger("cdist") log = logging.getLogger("cdist")
log.verbose("version %s" % cdist.VERSION) log.verbose("version %s", cdist.VERSION)
log.trace('command line args: {}'.format(cfg.command_line_args)) log.trace('command line args: %s', cfg.command_line_args)
log.trace('configuration: {}'.format(cfg.get_config())) log.trace('configuration: %s', cfg.get_config())
log.trace('configured args: {}'.format(args)) log.trace('configured args: %s', args)
check_beta(vars(args)) check_beta(vars(args))

View file

@ -8,6 +8,12 @@ then
exit 0 exit 0
fi fi
if [ ! -f "$__object/parameter/sum" ]
then
echo 'present'
exit 0
fi
sum_should="$( cat "$__object/parameter/sum" )" sum_should="$( cat "$__object/parameter/sum" )"
if [ -f "$__object/parameter/cmd-sum" ] if [ -f "$__object/parameter/cmd-sum" ]

View file

@ -8,9 +8,6 @@ cdist-type__download - Download a file
DESCRIPTION DESCRIPTION
----------- -----------
Destination (``$__object_id``) in target host must be persistent storage
in order to calculate checksum and decide if file must be (re-)downloaded.
By default type will try to use ``wget``, ``curl`` or ``fetch``. By default type will try to use ``wget``, ``curl`` or ``fetch``.
If download happens in target (see ``--download``) then type will If download happens in target (see ``--download``) then type will
fallback to (and install) ``wget``. fallback to (and install) ``wget``.
@ -25,14 +22,14 @@ REQUIRED PARAMETERS
url url
File's URL. File's URL.
sum
Checksum of file going to be downloaded.
By default output of ``cksum`` without filename is expected.
Other hash formats supported with prefixes: ``md5:``, ``sha1:`` and ``sha256:``.
OPTIONAL PARAMETERS OPTIONAL PARAMETERS
------------------- -------------------
sum
Checksum is used to decide if existing destination file must be redownloaded.
By default output of ``cksum`` without filename is expected.
Other hash formats supported with prefixes: ``md5:``, ``sha1:`` and ``sha256:``.
download download
If ``local`` (default), then download file to local storage and copy If ``local`` (default), then download file to local storage and copy
it to target host. If ``remote``, then download happens in target. it to target host. If ``remote``, then download happens in target.
@ -81,7 +78,7 @@ Ander Punnar <ander-at-kvlt-dot-ee>
COPYING COPYING
------- -------
Copyright \(C) 2020 Ander Punnar. You can redistribute it Copyright \(C) 2021 Ander Punnar. You can redistribute it
and/or modify it under the terms of the GNU General Public License as and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version. License, or (at your option) any later version.

View file

@ -1,3 +1,4 @@
sum
cmd-get cmd-get
cmd-sum cmd-sum
download download

View file

@ -1,2 +1 @@
url url
sum

View file

@ -1,5 +1,19 @@
#!/bin/sh #!/bin/sh -e
destination="/$__object_id/.git" destination="/${__object_id:?}/.git"
stat --print "%G" "${destination}" 2>/dev/null || exit 0 # shellcheck disable=SC2012
group_gid=$(ls -ldn "${destination}" | awk '{ print $4 }')
# NOTE: +1 because $((notanum)) prints 0.
if test $((group_gid + 1)) -ge 0
then
group_should=$(cat "${__object:?}/parameter/group")
if expr "${group_should}" : '[0-9]*$' >/dev/null
then
printf '%u\n' "${group_gid}"
else
printf '%s\n' "$(id -u -n "${group_gid}")"
fi
fi

View file

@ -1,5 +1,19 @@
#!/bin/sh #!/bin/sh -e
destination="/$__object_id/.git" destination="/${__object_id:?}/.git"
stat --print "%U" "${destination}" 2>/dev/null || exit 0 # shellcheck disable=SC2012
owner_uid=$(ls -ldn "${destination}" | awk '{ print $3 }')
# NOTE: +1 because $((notanum)) prints 0.
if test $((owner_uid + 1)) -ge 0
then
owner_should=$(cat "${__object:?}/parameter/owner")
if expr "${owner_should}" : '[0-9]*$' >/dev/null
then
printf '%u\n' "${owner_uid}"
else
printf '%s\n' "$(id -u -n "${owner_uid}")"
fi
fi

View file

@ -1,5 +1,19 @@
#!/bin/sh #!/bin/sh -e
destination="/$__object_id" destination="/${__object_id:?}"
stat --print "%G" "${destination}" 2>/dev/null || exit 0 # shellcheck disable=SC2012
group_gid=$(ls -ldn "${destination}" | awk '{ print $4 }')
# NOTE: +1 because $((notanum)) prints 0.
if test $((group_gid + 1)) -ge 0
then
group_should=$(cat "${__object:?}/parameter/group")
if expr "${group_should}" : '[0-9]*$' >/dev/null
then
printf '%u\n' "${group_gid}"
else
printf '%s\n' "$(id -u -n "${group_gid}")"
fi
fi

View file

@ -1,5 +1,19 @@
#!/bin/sh #!/bin/sh -e
destination="/$__object_id" destination="/${__object_id:?}"
stat --print "%U" "${destination}" 2>/dev/null || exit 0 # shellcheck disable=SC2012
owner_uid=$(ls -ldn "${destination}" | awk '{ print $3 }')
# NOTE: +1 because $((notanum)) prints 0.
if test $((owner_uid + 1)) -ge 0
then
owner_should=$(cat "${__object:?}/parameter/owner")
if expr "${owner_should}" : '[0-9]*$' >/dev/null
then
printf '%u\n' "${owner_uid}"
else
printf '%s\n' "$(id -u -n "${owner_uid}")"
fi
fi

View file

@ -61,7 +61,7 @@ EXAMPLES
__pyvenv /home/foo/fooenv --pyvenv /usr/local/bin/pyvenv-3.4 __pyvenv /home/foo/fooenv --pyvenv /usr/local/bin/pyvenv-3.4
# Create python virtualenv for user foo. # Create python virtualenv for user foo.
__pyvenv /home/foo/fooenv --group foo --user foo __pyvenv /home/foo/fooenv --group foo --owner foo
# Create python virtualenv with specific parameters. # Create python virtualenv with specific parameters.
__pyvenv /home/services/djangoenv --venvparams "--copies --system-site-packages" __pyvenv /home/services/djangoenv --venvparams "--copies --system-site-packages"

View file

@ -25,6 +25,7 @@ type_and_key="$(tr ' ' '\n' < "$__object/parameter/key"| awk '/^(ssh|ecdsa)-[^ ]
if [ -n "${type_and_key}" ] if [ -n "${type_and_key}" ]
then then
file="$(cat "$__object/parameter/file")" file="$(cat "$__object/parameter/file")"
test -e "$file" || exit 0
# get any entries that match the type and key # get any entries that match the type and key

View file

@ -37,9 +37,9 @@ tmpfile=\$(mktemp ${file}.cdist.XXXXXXXXXX)
# preserve ownership and permissions of existing file # preserve ownership and permissions of existing file
if [ -f "$file" ]; then if [ -f "$file" ]; then
cp -p "$file" "\$tmpfile" cp -p "$file" "\$tmpfile"
grep -v -F -x '$line' '$file' >\$tmpfile
fi fi
grep -v -F -x '$line' '$file' > \$tmpfile || true cat "\$tmpfile" >"$file"
mv -f "\$tmpfile" "$file"
DONE DONE
} }

View file

@ -39,7 +39,14 @@ in
(freebsd|netbsd|openbsd) (freebsd|netbsd|openbsd)
# whitelist # whitelist
;; ;;
(openbmc-phosphor)
# whitelist
# OpenBMC can be configured with dropbear and OpenSSH.
# If dropbear is used, the state explorer will already fail because it
# cannot find the sshd binary.
;;
(*) (*)
: "${__type:?}" # make shellcheck happy
printf 'Your operating system (%s) is currently not supported by this type (%s)\n' \ printf 'Your operating system (%s) is currently not supported by this type (%s)\n' \
"${os}" "${__type##*/}" >&2 "${os}" "${__type##*/}" >&2
printf 'Please contribute an implementation for it if you can.\n' >&2 printf 'Please contribute an implementation for it if you can.\n' >&2

View file

@ -193,7 +193,7 @@ class Config:
fd.write(sys.stdin.read()) fd.write(sys.stdin.read())
except (IOError, OSError) as e: except (IOError, OSError) as e:
raise cdist.Error(("Creating tempfile for stdin data " raise cdist.Error(("Creating tempfile for stdin data "
"failed: %s" % e)) "failed: {}").format(e))
args.manifest = initial_manifest_temp_path args.manifest = initial_manifest_temp_path
atexit.register(lambda: os.remove(initial_manifest_temp_path)) atexit.register(lambda: os.remove(initial_manifest_temp_path))
@ -276,15 +276,15 @@ class Config:
host_tags = None host_tags = None
host_base_path, hostdir = cls.create_host_base_dirs( host_base_path, hostdir = cls.create_host_base_dirs(
host, base_root_path) host, base_root_path)
log.debug("Base root path for target host \"{}\" is \"{}\"".format( log.debug("Base root path for target host \"%s\" is \"%s\"",
host, host_base_path)) host, host_base_path)
hostcnt += 1 hostcnt += 1
if args.parallel: if args.parallel:
pargs = (host, host_tags, host_base_path, hostdir, args, True, pargs = (host, host_tags, host_base_path, hostdir, args, True,
configuration) configuration)
log.trace(("Args for multiprocessing operation " log.trace("Args for multiprocessing operation for host %s: %s",
"for host {}: {}".format(host, pargs))) host, pargs)
process_args.append(pargs) process_args.append(pargs)
else: else:
try: try:
@ -301,10 +301,10 @@ class Config:
except cdist.Error: except cdist.Error:
failed_hosts.append(host) failed_hosts.append(host)
elif args.parallel: elif args.parallel:
log.trace("Multiprocessing start method is {}".format( log.trace("Multiprocessing start method is %s",
multiprocessing.get_start_method())) multiprocessing.get_start_method())
log.trace(("Starting multiprocessing Pool for {} " log.trace("Starting multiprocessing Pool for %d parallel host"
"parallel host operation".format(args.parallel))) " operation", args.parallel)
results = mp_pool_run(cls.onehost, results = mp_pool_run(cls.onehost,
process_args, process_args,
@ -399,16 +399,13 @@ class Config:
remote_exec, remote_copy, cleanup_cmd = cls._resolve_remote_cmds( remote_exec, remote_copy, cleanup_cmd = cls._resolve_remote_cmds(
args) args)
log.debug("remote_exec for host \"{}\": {}".format( log.debug("remote_exec for host \"%s\": %s", host, remote_exec)
host, remote_exec)) log.debug("remote_copy for host \"%s\": %s", host, remote_copy)
log.debug("remote_copy for host \"{}\": {}".format(
host, remote_copy))
family = cls._address_family(args) family = cls._address_family(args)
log.debug("address family: {}".format(family)) log.debug("address family: %s", family)
target_host = cls.resolve_target_addresses(host, family) target_host = cls.resolve_target_addresses(host, family)
log.debug("target_host for host \"{}\": {}".format( log.debug("target_host for host \"%s\": %s", host, target_host)
host, target_host))
local = cdist.exec.local.Local( local = cdist.exec.local.Local(
target_host=target_host, target_host=target_host,
@ -423,6 +420,9 @@ class Config:
exec_path=sys.argv[0], exec_path=sys.argv[0],
save_output_streams=args.save_output_streams) save_output_streams=args.save_output_streams)
# Make __global state dir available to custom remote scripts.
os.environ['__global'] = local.base_path
remote = cdist.exec.remote.Remote( remote = cdist.exec.remote.Remote(
target_host=target_host, target_host=target_host,
remote_exec=remote_exec, remote_exec=remote_exec,
@ -475,8 +475,8 @@ class Config:
"""Do what is most often done: deploy & cleanup""" """Do what is most often done: deploy & cleanup"""
start_time = time.time() start_time = time.time()
self.log.info("Starting {} run".format( self.log.info("Starting %s run",
'dry' if self.dry_run else 'configuration')) 'dry' if self.dry_run else 'configuration')
self._init_files_dirs() self._init_files_dirs()
self.local.collect_python_types() self.local.collect_python_types()
@ -495,9 +495,9 @@ class Config:
self._remove_files_dirs() self._remove_files_dirs()
self.local.save_cache(start_time) self.local.save_cache(start_time)
self.log.info("Finished {} run in {:.2f} seconds".format( self.log.info("Finished %s run in %.2f seconds",
'dry' if self.dry_run else 'successful', 'dry' if self.dry_run else 'successful',
time.time() - start_time)) time.time() - start_time)
def cleanup(self): def cleanup(self):
self.log.debug("Running cleanup commands") self.log.debug("Running cleanup commands")
@ -521,8 +521,8 @@ class Config:
self.local.object_path, self.local.type_path, self.local.object_path, self.local.type_path,
self.local.object_marker_name): self.local.object_marker_name):
if cdist_object.cdist_type.is_install: if cdist_object.cdist_type.is_install:
self.log.debug(("Running in config mode, ignoring install " self.log.debug("Running in config mode, ignoring install "
"object: {0}").format(cdist_object)) "object: %s", cdist_object)
else: else:
yield cdist_object yield cdist_object
@ -542,7 +542,7 @@ class Config:
objects_changed = False objects_changed = False
for cdist_object in self.object_list(): for cdist_object in self.object_list():
if cdist_object.requirements_unfinished( if cdist_object.has_requirements_unfinished(
cdist_object.requirements): cdist_object.requirements):
"""We cannot do anything for this poor object""" """We cannot do anything for this poor object"""
continue continue
@ -553,7 +553,7 @@ class Config:
self.object_prepare(cdist_object) self.object_prepare(cdist_object)
objects_changed = True objects_changed = True
if cdist_object.requirements_unfinished( if cdist_object.has_requirements_unfinished(
cdist_object.autorequire): cdist_object.autorequire):
"""The previous step created objects we depend on - """The previous step created objects we depend on -
wait for them wait for them
@ -567,13 +567,13 @@ class Config:
return objects_changed return objects_changed
def _iterate_once_parallel(self): def _iterate_once_parallel(self):
self.log.debug("Iteration in parallel mode in {} jobs".format( self.log.debug("Iteration in parallel mode in %d jobs", self.jobs)
self.jobs))
objects_changed = False objects_changed = False
cargo = [] cargo = []
for cdist_object in self.object_list(): for cdist_object in self.object_list():
if cdist_object.requirements_unfinished(cdist_object.requirements): if cdist_object.has_requirements_unfinished(
cdist_object.requirements):
"""We cannot do anything for this poor object""" """We cannot do anything for this poor object"""
continue continue
@ -590,8 +590,8 @@ class Config:
self.object_prepare(cargo[0]) self.object_prepare(cargo[0])
objects_changed = True objects_changed = True
elif cargo: elif cargo:
self.log.trace("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is %s",
multiprocessing.get_start_method())) multiprocessing.get_start_method())
self.log.trace("Multiprocessing cargo: %s", cargo) self.log.trace("Multiprocessing cargo: %s", cargo)
@ -605,9 +605,8 @@ class Config:
"sequentially")) "sequentially"))
self.explorer.transfer_type_explorers(cargo_types.pop()) self.explorer.transfer_type_explorers(cargo_types.pop())
else: else:
self.log.trace(("Starting multiprocessing Pool for {} " self.log.trace("Starting multiprocessing Pool for %d "
"parallel types explorers transferring".format( "parallel types explorers transferring", nt)
nt)))
args = [ args = [
(ct, ) for ct in cargo_types (ct, ) for ct in cargo_types
] ]
@ -616,8 +615,8 @@ class Config:
self.log.trace(("Multiprocessing for parallel transferring " self.log.trace(("Multiprocessing for parallel transferring "
"types' explorers finished")) "types' explorers finished"))
self.log.trace(("Starting multiprocessing Pool for {} parallel " self.log.trace("Starting multiprocessing Pool for %d parallel "
"objects preparation".format(n))) "objects preparation", n)
args = [ args = [
(c, False, ) for c in cargo (c, False, ) for c in cargo
] ]
@ -628,12 +627,13 @@ class Config:
del cargo[:] del cargo[:]
for cdist_object in self.object_list(): for cdist_object in self.object_list():
if cdist_object.requirements_unfinished(cdist_object.requirements): if cdist_object.has_requirements_unfinished(
cdist_object.requirements):
"""We cannot do anything for this poor object""" """We cannot do anything for this poor object"""
continue continue
if cdist_object.state == core.CdistObject.STATE_PREPARED: if cdist_object.state == core.CdistObject.STATE_PREPARED:
if cdist_object.requirements_unfinished( if cdist_object.has_requirements_unfinished(
cdist_object.autorequire): cdist_object.autorequire):
"""The previous step created objects we depend on - """The previous step created objects we depend on -
wait for them wait for them
@ -669,10 +669,10 @@ class Config:
self.object_run(chunk[0]) self.object_run(chunk[0])
objects_changed = True objects_changed = True
elif chunk: elif chunk:
self.log.trace("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is %s",
multiprocessing.get_start_method())) multiprocessing.get_start_method())
self.log.trace(("Starting multiprocessing Pool for {} " self.log.trace("Starting multiprocessing Pool for %d "
"parallel object run".format(n))) "parallel object run", n)
args = [ args = [
(c, ) for c in chunk (c, ) for c in chunk
] ]
@ -704,20 +704,22 @@ class Config:
check for cycles. check for cycles.
''' '''
graph = {} graph = {}
for cdist_object in self.object_list():
def _add_requirements(cdist_object, requirements):
obj_name = cdist_object.name obj_name = cdist_object.name
if obj_name not in graph: if obj_name not in graph:
graph[obj_name] = [] graph[obj_name] = []
for requirement in cdist_object.requirements_unfinished(
requirements):
graph[obj_name].append(requirement.name)
for cdist_object in self.object_list():
if cdist_object.state == cdist_object.STATE_DONE: if cdist_object.state == cdist_object.STATE_DONE:
continue continue
for requirement in cdist_object.requirements_unfinished( _add_requirements(cdist_object, cdist_object.requirements)
cdist_object.requirements): _add_requirements(cdist_object, cdist_object.autorequire)
graph[obj_name].append(requirement.name)
for requirement in cdist_object.requirements_unfinished(
cdist_object.autorequire):
graph[obj_name].append(requirement.name)
return graph_check_cycle(graph) return graph_check_cycle(graph)
def iterate_until_finished(self): def iterate_until_finished(self):
@ -771,7 +773,7 @@ class Config:
raise cdist.UnresolvableRequirementsError( raise cdist.UnresolvableRequirementsError(
("The requirements of the following objects could not be " ("The requirements of the following objects could not be "
"resolved:\n%s") % ("\n".join(info_string))) "resolved:\n{}").format("\n".join(info_string)))
def _handle_deprecation(self, cdist_object): def _handle_deprecation(self, cdist_object):
cdist_type = cdist_object.cdist_type cdist_type = cdist_object.cdist_type
@ -811,9 +813,10 @@ class Config:
def object_prepare(self, cdist_object, transfer_type_explorers=True): def object_prepare(self, cdist_object, transfer_type_explorers=True):
"""Prepare object: Run type explorer + manifest""" """Prepare object: Run type explorer + manifest"""
self._handle_deprecation(cdist_object) self._handle_deprecation(cdist_object)
self.log.verbose("Preparing object {}".format(cdist_object.name)) self.log.verbose("Preparing object %s", cdist_object.name)
self.log.verbose( self.log.verbose("Running manifest and explorers for %s",
"Running manifest and explorers for " + cdist_object.name) cdist_object.name)
self.explorer.run_type_explorers(cdist_object, transfer_type_explorers)
try: try:
self.log.verbose("Preparing object {}".format(cdist_object.name)) self.log.verbose("Preparing object {}".format(cdist_object.name))
self.log.verbose( self.log.verbose(
@ -838,13 +841,13 @@ class Config:
def object_run(self, cdist_object): def object_run(self, cdist_object):
"""Run gencode and code for an object""" """Run gencode and code for an object"""
try: try:
self.log.verbose("Running object " + cdist_object.name) self.log.verbose("Running object %s", cdist_object.name)
if cdist_object.state == core.CdistObject.STATE_DONE: if cdist_object.state == core.CdistObject.STATE_DONE:
raise cdist.Error(("Attempting to run an already finished " raise cdist.Error(("Attempting to run an already finished "
"object: %s"), cdist_object) "object: {}").format(cdist_object))
# Generate # Generate
self.log.debug("Generating code for %s" % (cdist_object.name)) self.log.debug("Generating code for %s", cdist_object.name)
if pytype_util.is_python_type(cdist_object.cdist_type): if pytype_util.is_python_type(cdist_object.cdist_type):
cdist_object.code_local = '' cdist_object.code_local = ''
cdist_object.code_remote = self._timeit( cdist_object.code_remote = self._timeit(
@ -865,24 +868,24 @@ class Config:
# Execute # Execute
if cdist_object.code_local or cdist_object.code_remote: if cdist_object.code_local or cdist_object.code_remote:
self.log.info("Processing %s" % (cdist_object.name)) self.log.info("Processing %s", cdist_object.name)
if not self.dry_run: if not self.dry_run:
if cdist_object.code_local: if cdist_object.code_local:
self.log.trace("Executing local code for %s" self.log.trace("Executing local code for %s",
% (cdist_object.name)) cdist_object.ame))
self._timeit(self.code.run_code_local, self._timeit(self.code.run_code_local,
"Type run code local for {}".format( "Type run code local for {}".format(
cdist_object.name))(cdist_object) cdist_object.name))(cdist_object)
if cdist_object.code_remote: if cdist_object.code_remote:
self.log.trace("Executing remote code for %s" self.log.trace("Executing remote code for %s",
% (cdist_object.name)) cdist_object.name)
self.code.transfer_code_remote(cdist_object) self.code.transfer_code_remote(cdist_object)
self._timeit(self.code.run_code_remote, self._timeit(self.code.run_code_remote,
"Type run code remote for {}".format( "Type run code remote for {}".format(
cdist_object.name))(cdist_object) cdist_object.name))(cdist_object)
# Mark this object as done # Mark this object as done
self.log.trace("Finishing run of " + cdist_object.name) self.log.trace("Finishing run of %s", cdist_object.name)
cdist_object.state = core.CdistObject.STATE_DONE cdist_object.state = core.CdistObject.STATE_DONE
except cdist.Error as e: except cdist.Error as e:
raise cdist.CdistObjectError(cdist_object, e) raise cdist.CdistObjectError(cdist_object, e)

View file

@ -34,17 +34,17 @@ class IllegalObjectIdError(cdist.Error):
self.message = message or 'Illegal object id' self.message = message or 'Illegal object id'
def __str__(self): def __str__(self):
return '%s: %s' % (self.message, self.object_id) return '{}: {}'.format(self.message, self.object_id)
class MissingObjectIdError(cdist.Error): class MissingObjectIdError(cdist.Error):
def __init__(self, type_name): def __init__(self, type_name):
self.type_name = type_name self.type_name = type_name
self.message = ("Type %s requires object id (is not a " self.message = ("Type {} requires object id (is not a "
"singleton type)") % self.type_name "singleton type)").format(self.type_name)
def __str__(self): def __str__(self):
return '%s' % (self.message) return '{}'.format(self.message)
class CdistObject: class CdistObject:
@ -142,7 +142,7 @@ class CdistObject:
if self.object_marker in self.object_id.split(os.sep): if self.object_marker in self.object_id.split(os.sep):
raise IllegalObjectIdError( raise IllegalObjectIdError(
self.object_id, ('object_id may not contain ' self.object_id, ('object_id may not contain '
'\'%s\'') % self.object_marker) '\'{}\'').format(self.object_marker))
if '//' in self.object_id: if '//' in self.object_id:
raise IllegalObjectIdError( raise IllegalObjectIdError(
self.object_id, 'object_id may not contain //') self.object_id, 'object_id may not contain //')
@ -189,7 +189,7 @@ class CdistObject:
object_id=object_id) object_id=object_id)
def __repr__(self): def __repr__(self):
return '<CdistObject %s>' % self.name return '<CdistObject {}>'.format(self.name)
def __eq__(self, other): def __eq__(self, other):
"""define equality as 'name is the same'""" """define equality as 'name is the same'"""
@ -247,6 +247,13 @@ class CdistObject:
lambda obj: os.path.join(obj.absolute_path, 'typeorder')) lambda obj: os.path.join(obj.absolute_path, 'typeorder'))
typeorder_dep = fsproperty.FileListProperty( typeorder_dep = fsproperty.FileListProperty(
lambda obj: os.path.join(obj.absolute_path, 'typeorder_dep')) lambda obj: os.path.join(obj.absolute_path, 'typeorder_dep'))
# objects without parents are objects specified in init manifest
parents = fsproperty.FileListProperty(
lambda obj: os.path.join(obj.absolute_path, 'parents'))
# objects without children are object of types that do not reuse other
# types
children = fsproperty.FileListProperty(
lambda obj: os.path.join(obj.absolute_path, 'children'))
def cleanup(self): def cleanup(self):
try: try:
@ -270,10 +277,10 @@ class CdistObject:
os.makedirs(path, exist_ok=allow_overwrite) os.makedirs(path, exist_ok=allow_overwrite)
except EnvironmentError as error: except EnvironmentError as error:
raise cdist.Error(('Error creating directories for cdist object: ' raise cdist.Error(('Error creating directories for cdist object: '
'%s: %s') % (self, error)) '{}: {}').format(self, error))
def requirements_unfinished(self, requirements): def requirements_unfinished(self, requirements):
"""Return state whether requirements are satisfied""" """Return unsatisfied requirements"""
object_list = [] object_list = []
@ -284,3 +291,14 @@ class CdistObject:
object_list.append(cdist_object) object_list.append(cdist_object)
return object_list return object_list
def has_requirements_unfinished(self, requirements):
"""Return whether requirements are satisfied"""
for requirement in requirements:
cdist_object = self.object_from_name(requirement)
if cdist_object.state != self.STATE_DONE:
return True
return False

View file

@ -34,7 +34,7 @@ class InvalidTypeError(cdist.Error):
self.source_path = os.path.realpath(self.type_absolute_path) self.source_path = os.path.realpath(self.type_absolute_path)
def __str__(self): def __str__(self):
return "Invalid type '%s' at '%s' defined at '%s'" % ( return "Invalid type '{}' at '{}' defined at '{}'".format(
self.type_path, self.type_absolute_path, self.source_path) self.type_path, self.type_absolute_path, self.source_path)
@ -82,9 +82,9 @@ class CdistType:
yield cls(base_path, name) yield cls(base_path, name)
except InvalidTypeError as e: except InvalidTypeError as e:
# ignore invalid type, log warning and continue # ignore invalid type, log warning and continue
msg = "Ignoring invalid type '%s' at '%s' defined at '%s'" % ( cls.log.warning("Ignoring invalid type '%s' at '%s' defined"
e.type_path, e.type_absolute_path, e.source_path) " at '%s'", e.type_path, e.type_absolute_path,
cls.log.warning(msg) e.source_path)
# remove invalid from runtime conf dir # remove invalid from runtime conf dir
os.remove(e.type_absolute_path) os.remove(e.type_absolute_path)
@ -109,7 +109,7 @@ class CdistType:
return cls._instances[name] return cls._instances[name]
def __repr__(self): def __repr__(self):
return '<CdistType %s>' % self.name return '<CdistType {}>'.format(self.name)
def __eq__(self, other): def __eq__(self, other):
return isinstance(other, self.__class__) and self.name == other.name return isinstance(other, self.__class__) and self.name == other.name

View file

@ -145,8 +145,8 @@ class Code:
def _run_gencode(self, cdist_object, which): def _run_gencode(self, cdist_object, which):
cdist_type = cdist_object.cdist_type cdist_type = cdist_object.cdist_type
script = os.path.join(self.local.type_path, gencode_attr = getattr(cdist_type, 'gencode_{}_path'.format(which))
getattr(cdist_type, 'gencode_%s_path' % which)) script = os.path.join(self.local.type_path, gencode_attr)
if os.path.isfile(script): if os.path.isfile(script):
env = os.environ.copy() env = os.environ.copy()
env.update(self.env) env.update(self.env)
@ -190,8 +190,8 @@ class Code:
def _run_code(self, cdist_object, which, env=None): def _run_code(self, cdist_object, which, env=None):
which_exec = getattr(self, which) which_exec = getattr(self, which)
script = os.path.join(which_exec.object_path, code_attr = getattr(cdist_object, 'code_{}_path'.format(which))
getattr(cdist_object, 'code_%s_path' % which)) script = os.path.join(which_exec.object_path, code_attr)
if which_exec.save_output_streams: if which_exec.save_output_streams:
stderr_path = os.path.join(cdist_object.stderr_path, stderr_path = os.path.join(cdist_object.stderr_path,
'code-' + which) 'code-' + which)

View file

@ -131,18 +131,17 @@ class Explorer:
self._run_global_explorer(explorer, out_path) self._run_global_explorer(explorer, out_path)
def _run_global_explorers_parallel(self, out_path): def _run_global_explorers_parallel(self, out_path):
self.log.debug("Running global explorers in {} parallel jobs".format( self.log.debug("Running global explorers in %s parallel jobs",
self.jobs)) self.jobs)
self.log.trace("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is %s",
multiprocessing.get_start_method())) multiprocessing.get_start_method())
self.log.trace(("Starting multiprocessing Pool for global " self.log.trace("Starting multiprocessing Pool for global explorers"
"explorers run")) " run")
args = [ args = [
(e, out_path, ) for e in self.list_global_explorer_names() (e, out_path, ) for e in self.list_global_explorer_names()
] ]
mp_pool_run(self._run_global_explorer, args, jobs=self.jobs) mp_pool_run(self._run_global_explorer, args, jobs=self.jobs)
self.log.trace(("Multiprocessing run for global explorers " self.log.trace("Multiprocessing run for global explorers finished")
"finished"))
# logger is not pickable, so remove it when we pickle # logger is not pickable, so remove it when we pickle
def __getstate__(self): def __getstate__(self):
@ -161,8 +160,8 @@ class Explorer:
self.remote.transfer(self.local.global_explorer_path, self.remote.transfer(self.local.global_explorer_path,
self.remote.global_explorer_path, self.remote.global_explorer_path,
self.jobs) self.jobs)
self.remote.run(["chmod", "0700", self.remote.run(["chmod", "0700", "{}/*".format(
"%s/*" % (self.remote.global_explorer_path)]) self.remote.global_explorer_path)])
def run_global_explorer(self, explorer): def run_global_explorer(self, explorer):
"""Run the given global explorer and return it's output.""" """Run the given global explorer and return it's output."""
@ -184,15 +183,14 @@ class Explorer:
in the object. in the object.
""" """
self.log.verbose("Running type explorers for {}".format( self.log.verbose("Running type explorers for %s",
cdist_object.cdist_type)) cdist_object.cdist_type)
if transfer_type_explorers: if transfer_type_explorers:
self.log.trace("Transferring type explorers for type: %s", self.log.trace("Transferring type explorers for type: %s",
cdist_object.cdist_type) cdist_object.cdist_type)
self.transfer_type_explorers(cdist_object.cdist_type) self.transfer_type_explorers(cdist_object.cdist_type)
else: else:
self.log.trace(("No need for transferring type explorers for " self.log.trace("No need for transferring type explorers for %s",
"type: %s"),
cdist_object.cdist_type) cdist_object.cdist_type)
self.log.trace("Transferring object parameters for object: %s", self.log.trace("Transferring object parameters for object: %s",
cdist_object.name) cdist_object.name)
@ -236,15 +234,15 @@ class Explorer:
remote side.""" remote side."""
if cdist_type.explorers: if cdist_type.explorers:
if cdist_type.name in self._type_explorers_transferred: if cdist_type.name in self._type_explorers_transferred:
self.log.trace(("Skipping retransfer of type explorers " self.log.trace("Skipping retransfer of type explorers for: %s",
"for: %s"), cdist_type) cdist_type)
else: else:
source = os.path.join(self.local.type_path, source = os.path.join(self.local.type_path,
cdist_type.explorer_path) cdist_type.explorer_path)
destination = os.path.join(self.remote.type_path, destination = os.path.join(self.remote.type_path,
cdist_type.explorer_path) cdist_type.explorer_path)
self.remote.transfer(source, destination) self.remote.transfer(source, destination)
self.remote.run(["chmod", "0700", "%s/*" % (destination)]) self.remote.run(["chmod", "0700", "{}/*".format(destination)])
self._type_explorers_transferred.append(cdist_type.name) self._type_explorers_transferred.append(cdist_type.name)
def transfer_object_parameters(self, cdist_object): def transfer_object_parameters(self, cdist_object):

View file

@ -83,13 +83,12 @@ class NoInitialManifestError(cdist.Error):
if user_supplied: if user_supplied:
if os.path.islink(manifest_path): if os.path.islink(manifest_path):
self.message = "%s: %s -> %s" % ( self.message = "{}: {} -> {}".format(
msg_header, manifest_path, msg_header, manifest_path, os.path.realpath(manifest_path))
os.path.realpath(manifest_path))
else: else:
self.message = "%s: %s" % (msg_header, manifest_path) self.message = "{}: {}".format(msg_header, manifest_path)
else: else:
self.message = "%s" % (msg_header) self.message = "{}".format(msg_header)
def __str__(self): def __str__(self):
return repr(self.message) return repr(self.message)
@ -107,7 +106,7 @@ class Manifest:
self._open_logger() self._open_logger()
self.env = { self.env = {
'PATH': "%s:%s" % (self.local.bin_path, os.environ['PATH']), 'PATH': "{}:{}".format(self.local.bin_path, os.environ['PATH']),
# for use in type emulator # for use in type emulator
'__cdist_type_base_path': self.local.type_path, '__cdist_type_base_path': self.local.type_path,
'__global': self.local.base_path, '__global': self.local.base_path,
@ -161,7 +160,7 @@ class Manifest:
raise NoInitialManifestError(initial_manifest, user_supplied) raise NoInitialManifestError(initial_manifest, user_supplied)
message_prefix = "initialmanifest" message_prefix = "initialmanifest"
self.log.verbose("Running initial manifest " + initial_manifest) self.log.verbose("Running initial manifest %s", initial_manifest)
which = "init" which = "init"
if self.local.save_output_streams: if self.local.save_output_streams:
stderr_path = os.path.join(self.local.stderr_base_path, which) stderr_path = os.path.join(self.local.stderr_base_path, which)

View file

@ -38,8 +38,8 @@ import inspect
class MissingRequiredEnvironmentVariableError(cdist.Error): class MissingRequiredEnvironmentVariableError(cdist.Error):
def __init__(self, name): def __init__(self, name):
self.name = name self.name = name
self.message = ("Emulator requires the environment variable %s to be " self.message = ("Emulator requires the environment variable {} to be "
"setup" % self.name) "setup").format(self.name)
def __str__(self): def __str__(self):
return self.message return self.message
@ -129,8 +129,9 @@ class Emulator:
self.save_stdin() self.save_stdin()
self.record_requirements() self.record_requirements()
self.record_auto_requirements() self.record_auto_requirements()
self.log.trace("Finished %s %s" % ( self.record_parent_child_relationships()
self.cdist_object.path, self.parameters)) self.log.trace("Finished %s %s", self.cdist_object.path,
self.parameters)
def __init_log(self): def __init_log(self):
"""Setup logging facility""" """Setup logging facility"""
@ -192,7 +193,7 @@ class Emulator:
def commandline(self, parser): def commandline(self, parser):
"""Parse command line""" """Parse command line"""
self.args = parser.parse_args(self.argv[1:]) self.args = parser.parse_args(self.argv[1:])
self.log.trace('Args: %s' % self.args) self.log.trace('Args: %s', self.args)
def init_object(self): def init_object(self):
# Initialize object - and ensure it is not in args # Initialize object - and ensure it is not in args
@ -253,18 +254,18 @@ class Emulator:
if self.cdist_object.exists and 'CDIST_OVERRIDE' not in self.env: if self.cdist_object.exists and 'CDIST_OVERRIDE' not in self.env:
obj_params = self._object_params_in_context() obj_params = self._object_params_in_context()
if obj_params != self.parameters: if obj_params != self.parameters:
errmsg = ("Object %s already exists with conflicting " errmsg = ("Object {} already exists with conflicting "
"parameters:\n%s: %s\n%s: %s" % ( "parameters:\n{}: {}\n{}: {}").format(
self.cdist_object.name, self.cdist_object.name,
" ".join(self.cdist_object.source), " ".join(self.cdist_object.source),
obj_params, obj_params,
self.object_source, self.object_source,
self.parameters)) self.parameters)
raise cdist.Error(errmsg) raise cdist.Error(errmsg)
else: else:
if self.cdist_object.exists: if self.cdist_object.exists:
self.log.debug(('Object %s override forced with ' self.log.debug('Object %s override forced with CDIST_OVERRIDE',
'CDIST_OVERRIDE'), self.cdist_object.name) self.cdist_object.name)
self.cdist_object.create(True) self.cdist_object.create(True)
else: else:
self.cdist_object.create() self.cdist_object.create()
@ -282,8 +283,8 @@ class Emulator:
parent = self.cdist_object.object_from_name(__object_name) parent = self.cdist_object.object_from_name(__object_name)
parent.typeorder.append(self.cdist_object.name) parent.typeorder.append(self.cdist_object.name)
if self._order_dep_on(): if self._order_dep_on():
self.log.trace(('[ORDER_DEP] Adding %s to typeorder dep' self.log.trace('[ORDER_DEP] Adding %s to typeorder dep for %s',
' for %s'), depname, parent.name) depname, parent.name)
parent.typeorder_dep.append(depname) parent.typeorder_dep.append(depname)
elif self._order_dep_on(): elif self._order_dep_on():
self.log.trace('[ORDER_DEP] Adding %s to global typeorder dep', self.log.trace('[ORDER_DEP] Adding %s to global typeorder dep',
@ -314,7 +315,7 @@ class Emulator:
fd.write(chunk) fd.write(chunk)
chunk = self._read_stdin() chunk = self._read_stdin()
except EnvironmentError as e: except EnvironmentError as e:
raise cdist.Error('Failed to read from stdin: %s' % e) raise cdist.Error('Failed to read from stdin: {}'.format(e))
def record_requirement(self, requirement): def record_requirement(self, requirement):
"""record requirement and return recorded requirement""" """record requirement and return recorded requirement"""
@ -323,16 +324,14 @@ class Emulator:
try: try:
cdist_object = self.cdist_object.object_from_name(requirement) cdist_object = self.cdist_object.object_from_name(requirement)
except core.cdist_type.InvalidTypeError as e: except core.cdist_type.InvalidTypeError as e:
self.log.error(("%s requires object %s, but type %s does not" self.log.error("%s requires object %s, but type %s does not"
" exist. Defined at %s" % ( " exist. Defined at %s", self.cdist_object.name,
self.cdist_object.name, requirement, e.name, self.object_source)
requirement, e.name, self.object_source)))
raise raise
except core.cdist_object.MissingObjectIdError: except core.cdist_object.MissingObjectIdError:
self.log.error(("%s requires object %s without object id." self.log.error("%s requires object %s without object id."
" Defined at %s" % (self.cdist_object.name, " Defined at %s", self.cdist_object.name,
requirement, requirement, self.object_source)
self.object_source)))
raise raise
self.log.debug("Recording requirement %s for %s", self.log.debug("Recording requirement %s for %s",
@ -402,10 +401,9 @@ class Emulator:
self.env['require'] += " " + lastcreatedtype self.env['require'] += " " + lastcreatedtype
else: else:
self.env['require'] = lastcreatedtype self.env['require'] = lastcreatedtype
self.log.debug(("Injecting require for " self.log.debug("Injecting require for"
"CDIST_ORDER_DEPENDENCY: %s for %s"), " CDIST_ORDER_DEPENDENCY: %s for %s",
lastcreatedtype, lastcreatedtype, self.cdist_object.name)
self.cdist_object.name)
except IndexError: except IndexError:
# if no second last line, we are on the first type, # if no second last line, we are on the first type,
# so do not set a requirement # so do not set a requirement
@ -413,7 +411,7 @@ class Emulator:
if "require" in self.env: if "require" in self.env:
requirements = self.env['require'] requirements = self.env['require']
self.log.debug("reqs = " + requirements) self.log.debug("reqs = %s", requirements)
for requirement in self._parse_require(requirements): for requirement in self._parse_require(requirements):
# Ignore empty fields - probably the only field anyway # Ignore empty fields - probably the only field anyway
if len(requirement) == 0: if len(requirement) == 0:
@ -443,3 +441,21 @@ class Emulator:
self.log.debug("Recording autorequirement %s for %s", self.log.debug("Recording autorequirement %s for %s",
current_object.name, parent.name) current_object.name, parent.name)
parent.autorequire.append(current_object.name) parent.autorequire.append(current_object.name)
def record_parent_child_relationships(self):
# __object_name is the name of the object whose type manifest is
# currently executed
__object_name = self.env.get('__object_name', None)
if __object_name:
# The object whose type manifest is currently run
parent = self.cdist_object.object_from_name(__object_name)
# The object currently being defined
current_object = self.cdist_object
if current_object.name not in parent.children:
self.log.debug("Recording child %s for %s",
current_object.name, parent.name)
parent.children.append(current_object.name)
if parent.name not in current_object.parents:
self.log.debug("Recording parent %s for %s",
parent.name, current_object.name)
current_object.parents.append(parent.name)

View file

@ -155,10 +155,10 @@ class Local:
def _setup_object_marker_file(self): def _setup_object_marker_file(self):
with open(self.object_marker_file, 'w') as fd: with open(self.object_marker_file, 'w') as fd:
fd.write("%s\n" % self.object_marker_name) fd.write("{}\n".format(self.object_marker_name))
self.log.trace("Object marker %s saved in %s" % ( self.log.trace("Object marker %s saved in %s",
self.object_marker_name, self.object_marker_file)) self.object_marker_name, self.object_marker_file)
def _init_cache_dir(self, cache_dir): def _init_cache_dir(self, cache_dir):
home_dir = cdist.home_dir() home_dir = cdist.home_dir()
@ -187,7 +187,7 @@ class Local:
""" """
assert isinstance(command, (list, tuple)), ( assert isinstance(command, (list, tuple)), (
"list or tuple argument expected, got: %s" % command) "list or tuple argument expected, got: {}".format(command))
quiet = self.quiet_mode or quiet_mode quiet = self.quiet_mode or quiet_mode
do_save_output = save_output and not quiet and self.save_output_streams do_save_output = save_output and not quiet and self.save_output_streams
@ -292,14 +292,12 @@ class Local:
return cache_subpath return cache_subpath
def save_cache(self, start_time=time.time()): def save_cache(self, start_time=time.time()):
self.log.trace("cache subpath pattern: {}".format( self.log.trace("cache subpath pattern: %s", self.cache_path_pattern)
self.cache_path_pattern))
cache_subpath = self._cache_subpath(start_time, cache_subpath = self._cache_subpath(start_time,
self.cache_path_pattern) self.cache_path_pattern)
self.log.debug("cache subpath: {}".format(cache_subpath)) self.log.debug("cache subpath: %s", cache_subpath)
destination = os.path.join(self.cache_path, cache_subpath) destination = os.path.join(self.cache_path, cache_subpath)
self.log.trace(("Saving cache: " + self.base_path + " to " + self.log.trace("Saving cache %s to %s", self.base_path, destination)
destination))
if not os.path.exists(destination): if not os.path.exists(destination):
shutil.move(self.base_path, destination) shutil.move(self.base_path, destination)
@ -335,7 +333,7 @@ class Local:
# Iterate over all directories and link the to the output dir # Iterate over all directories and link the to the output dir
for conf_dir in self.conf_dirs: for conf_dir in self.conf_dirs:
self.log.debug("Checking conf_dir %s ..." % (conf_dir)) self.log.debug("Checking conf_dir %s ...", conf_dir)
for sub_dir in CONF_SUBDIRS_LINKED: for sub_dir in CONF_SUBDIRS_LINKED:
current_dir = os.path.join(conf_dir, sub_dir) current_dir = os.path.join(conf_dir, sub_dir)
@ -353,11 +351,12 @@ class Local:
if os.path.exists(dst): if os.path.exists(dst):
os.unlink(dst) os.unlink(dst)
self.log.trace("Linking %s to %s ..." % (src, dst)) self.log.trace("Linking %s to %s ...", src, dst)
try: try:
os.symlink(src, dst) os.symlink(src, dst)
except OSError as e: except OSError as e:
raise cdist.Error("Linking %s %s to %s failed: %s" % ( raise cdist.Error(
"Linking {} {} to {} failed: {}".format(
sub_dir, src, dst, e.__str__())) sub_dir, src, dst, e.__str__()))
def _link_types_for_emulator(self): def _link_types_for_emulator(self):
@ -371,7 +370,7 @@ class Local:
os.symlink(src, dst) os.symlink(src, dst)
except OSError as e: except OSError as e:
raise cdist.Error( raise cdist.Error(
"Linking emulator from %s to %s failed: %s" % ( "Linking emulator from {} to {} failed: {}".format(
src, dst, e.__str__())) src, dst, e.__str__()))
def collect_python_types(self): def collect_python_types(self):

View file

@ -24,12 +24,10 @@ import os
import glob import glob
import subprocess import subprocess
import logging import logging
import multiprocessing
import cdist import cdist
import cdist.exec.util as util import cdist.exec.util as util
import cdist.util.ipaddr as ipaddr import cdist.util.ipaddr as ipaddr
from cdist.mputil import mp_pool_run
def _wrap_addr(addr): def _wrap_addr(addr):
@ -176,19 +174,19 @@ class Remote:
# create archive # create archive
tarpath, fcnt = autil.tar(source, self.archiving_mode) tarpath, fcnt = autil.tar(source, self.archiving_mode)
if tarpath is None: if tarpath is None:
self.log.trace(("Files count {} is lower than {} limit, " self.log.trace("Files count %d is lower than %d limit, "
"skipping archiving").format( "skipping archiving",
fcnt, autil.FILES_LIMIT)) fcnt, autil.FILES_LIMIT)
else: else:
self.log.trace(("Archiving mode, tarpath: %s, file count: " self.log.trace("Archiving mode, tarpath: %s, file count: "
"%s"), tarpath, fcnt) "%s", tarpath, fcnt)
# get archive name # get archive name
tarname = os.path.basename(tarpath) tarname = os.path.basename(tarpath)
self.log.trace("Archiving mode tarname: %s", tarname) self.log.trace("Archiving mode tarname: %s", tarname)
# archive path at the remote # archive path at the remote
desttarpath = os.path.join(destination, tarname) desttarpath = os.path.join(destination, tarname)
self.log.trace( self.log.trace("Archiving mode desttarpath: %s",
"Archiving mode desttarpath: %s", desttarpath) desttarpath)
# transfer archive to the remote side # transfer archive to the remote side
self.log.trace("Archiving mode: transferring") self.log.trace("Archiving mode: transferring")
self._transfer_file(tarpath, desttarpath) self._transfer_file(tarpath, desttarpath)
@ -262,9 +260,10 @@ class Remote:
# remotely in e.g. csh and setting up CDIST_REMOTE_SHELL to e.g. # remotely in e.g. csh and setting up CDIST_REMOTE_SHELL to e.g.
# /bin/csh will execute this script in the right way. # /bin/csh will execute this script in the right way.
if env: if env:
remote_env = [" export %s=%s;" % item for item in env.items()] remote_env = [" export {env[0]}={env[1]};".format(env=item)
string_cmd = ("/bin/sh -c '" + " ".join(remote_env) + for item in env.items()]
" ".join(command) + "'") string_cmd = ("/bin/sh -c '{}{}'").format(" ".join(remote_env),
" ".join(command))
cmd.append(string_cmd) cmd.append(string_cmd)
else: else:
cmd.extend(command) cmd.extend(command)
@ -278,7 +277,7 @@ class Remote:
""" """
assert isinstance(command, (list, tuple)), ( assert isinstance(command, (list, tuple)), (
"list or tuple argument expected, got: %s" % command) "list or tuple argument expected, got: {}".format(command))
close_stdout = False close_stdout = False
close_stderr = False close_stderr = False

View file

@ -47,4 +47,4 @@ class Install(cdist.config.Config):
yield cdist_object yield cdist_object
else: else:
self.log.debug("Running in install mode, ignoring non install" self.log.debug("Running in install mode, ignoring non install"
"object: {0}".format(cdist_object)) "object: %s", cdist_object)

View file

@ -92,7 +92,7 @@ class Inventory:
self.init_db() self.init_db()
def init_db(self): def init_db(self):
self.log.trace("Init db: {}".format(self.db_basedir)) self.log.trace("Init db: %s", self.db_basedir)
if not os.path.exists(self.db_basedir): if not os.path.exists(self.db_basedir):
os.makedirs(self.db_basedir, exist_ok=True) os.makedirs(self.db_basedir, exist_ok=True)
elif not os.path.isdir(self.db_basedir): elif not os.path.isdir(self.db_basedir):
@ -182,9 +182,9 @@ class Inventory:
configuration = cfg.get_config(section='GLOBAL') configuration = cfg.get_config(section='GLOBAL')
determine_default_inventory_dir(args, configuration) determine_default_inventory_dir(args, configuration)
log.debug("Using inventory: {}".format(args.inventory_dir)) log.debug("Using inventory: %s", args.inventory_dir)
log.trace("Inventory args: {}".format(vars(args))) log.trace("Inventory args: %s", vars(args))
log.trace("Inventory command: {}".format(args.subcommand)) log.trace("Inventory command: %s", args.subcommand)
if args.subcommand == "list": if args.subcommand == "list":
c = InventoryList(hosts=args.host, istag=args.tag, c = InventoryList(hosts=args.host, istag=args.tag,
@ -237,16 +237,16 @@ class InventoryList(Inventory):
def _do_list(self, it_tags, it_hosts, check_func): def _do_list(self, it_tags, it_hosts, check_func):
if (it_tags is not None): if (it_tags is not None):
param_tags = set(it_tags) param_tags = set(it_tags)
self.log.trace("param_tags: {}".format(param_tags)) self.log.trace("param_tags: %s", param_tags)
else: else:
param_tags = set() param_tags = set()
for host in it_hosts: for host in it_hosts:
self.log.trace("host: {}".format(host)) self.log.trace("host: %s", host)
tags = self._get_host_tags(host) tags = self._get_host_tags(host)
if tags is None: if tags is None:
self.log.debug("Host \'{}\' not found, skipped".format(host)) self.log.debug("Host \'%s\' not found, skipped", host)
continue continue
self.log.trace("tags: {}".format(tags)) self.log.trace("tags: %s", tags)
if check_func(tags, param_tags): if check_func(tags, param_tags):
yield host, tags yield host, tags
@ -308,11 +308,11 @@ class InventoryHost(Inventory):
def _action(self, host): def _action(self, host):
if self.action == "add": if self.action == "add":
self.log.debug("Adding host \'{}\'".format(host)) self.log.debug("Adding host \'%s\'", host)
elif self.action == "del": elif self.action == "del":
self.log.debug("Deleting host \'{}\'".format(host)) self.log.debug("Deleting host \'%s\'", host)
hostpath = self._host_path(host) hostpath = self._host_path(host)
self.log.trace("hostpath: {}".format(hostpath)) self.log.trace("hostpath: %s", hostpath)
if self.action == "add" and not os.path.exists(hostpath): if self.action == "add" and not os.path.exists(hostpath):
self._new_hostpath(hostpath) self._new_hostpath(hostpath)
else: else:
@ -372,23 +372,23 @@ class InventoryTag(Inventory):
print("Host \'{}\' does not exist, skipping".format(host), print("Host \'{}\' does not exist, skipping".format(host),
file=sys.stderr) file=sys.stderr)
return return
self.log.trace("existing host_tags: {}".format(host_tags)) self.log.trace("existing host_tags: %s", host_tags)
if self.action == "del" and self.all: if self.action == "del" and self.all:
host_tags = set() host_tags = set()
else: else:
for tag in self.input_tags: for tag in self.input_tags:
if self.action == "add": if self.action == "add":
self.log.debug("Adding tag \'{}\' for host \'{}\'".format( self.log.debug("Adding tag \'%s\' for host \'%s\'",
tag, host)) tag, host)
host_tags.add(tag) host_tags.add(tag)
elif self.action == "del": elif self.action == "del":
self.log.debug("Deleting tag \'{}\' for host " self.log.debug("Deleting tag \'%s\' for host \'%s\'",
"\'{}\'".format(tag, host)) tag, host)
if tag in host_tags: if tag in host_tags:
host_tags.remove(tag) host_tags.remove(tag)
self.log.trace("new host tags: {}".format(host_tags)) self.log.trace("new host tags: %s", host_tags)
if not self._write_host_tags(host, host_tags): if not self._write_host_tags(host, host_tags):
self.log.trace("{} does not exist, skipped".format(host)) self.log.trace("%s does not exist, skipped", host)
def run(self): def run(self):
if self.allhosts: if self.allhosts:

View file

@ -70,7 +70,7 @@ class Message:
with open(self.global_messages, 'a') as fd: with open(self.global_messages, 'a') as fd:
for line in content: for line in content:
fd.write("%s:%s" % (self.prefix, line)) fd.write("{}:{}".format(self.prefix, line))
def merge_messages(self): def merge_messages(self):
self._merge_messages() self._merge_messages()

View file

@ -49,7 +49,7 @@ def scan_preos_dir_plugins(dir):
c = cm[1] c = cm[1]
yield from preos_plugin(c) yield from preos_plugin(c)
except ImportError as e: except ImportError as e:
log.warning("Cannot import '{}': {}".format(module_name, e)) log.warning("Cannot import '%s': %s", module_name, e)
def find_preos_plugins(): def find_preos_plugins():
@ -102,7 +102,7 @@ class PreOS:
parser.add_argument('remainder_args', nargs=argparse.REMAINDER) parser.add_argument('remainder_args', nargs=argparse.REMAINDER)
args = parser.parse_args(argv[1:]) args = parser.parse_args(argv[1:])
cdist.argparse.handle_loglevel(args) cdist.argparse.handle_loglevel(args)
log.debug("preos args : {}".format(args)) log.debug("preos args : %s", args)
conf_dirs = util.resolve_conf_dirs_from_config_and_args(args) conf_dirs = util.resolve_conf_dirs_from_config_and_args(args)
@ -122,7 +122,7 @@ class PreOS:
func_args = [preos, args.remainder_args, ] func_args = [preos, args.remainder_args, ]
else: else:
func_args = [args.remainder_args, ] func_args = [args.remainder_args, ]
log.info("Running preos : {}".format(preos_name)) log.info("Running preos : %s", preos_name)
func(*func_args) func(*func_args)
else: else:
raise cdist.Error( raise cdist.Error(

View file

@ -172,7 +172,7 @@ class Debian:
args.pxe_boot_dir = os.path.realpath(args.pxe_boot_dir) args.pxe_boot_dir = os.path.realpath(args.pxe_boot_dir)
cdist.argparse.handle_loglevel(args) cdist.argparse.handle_loglevel(args)
log.debug("preos: {}, args: {}".format(cls._preos_name, args)) log.debug("preos: %s, args: %s", cls._preos_name, args)
try: try:
env = vars(args) env = vars(args)
new_env = {} new_env = {}
@ -196,8 +196,9 @@ class Debian:
env = new_env env = new_env
env.update(os.environ) env.update(os.environ)
cls.update_env(env) cls.update_env(env)
log.debug("preos: {} env: {}".format(cls._preos_name, env)) log.debug("preos: %s env: %s", cls._preos_name, env)
cmd = os.path.join(cls._files_dir, "code")
if log.getEffectiveLevel() <= logging.INFO:
info_msg = ["Running preos: {}, suite: {}, arch: {}".format( info_msg = ["Running preos: {}, suite: {}, arch: {}".format(
cls._preos_name, args.suite, args.arch), ] cls._preos_name, args.suite, args.arch), ]
if args.mirror: if args.mirror:
@ -213,10 +214,12 @@ class Debian:
if args.drive: if args.drive:
info_msg.append("creating bootable drive") info_msg.append("creating bootable drive")
log.info(info_msg) log.info(info_msg)
log.debug("cmd={}".format(cmd))
cmd = os.path.join(cls._files_dir, "code")
log.debug("cmd=%s", cmd)
subprocess.check_call(cmd, env=env, shell=True) subprocess.check_call(cmd, env=env, shell=True)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
log.error("preos {} failed: {}".format(cls._preos_name, e)) log.error("preos %s failed: %s", cls._preos_name, e)
class Ubuntu(Debian): class Ubuntu(Debian):

View file

@ -93,8 +93,8 @@ class Trigger(object):
time.sleep(self.sleeptime) time.sleep(self.sleeptime)
def trigger(self, interface): def trigger(self, interface):
packet = IPv6(dst=f"ff02::1%{interface}") / ICMPv6EchoRequest() packet = IPv6(dst="ff02::1{}".format(interface)) / ICMPv6EchoRequest()
log.debug(f"Sending request on {interface}") log.debug("Sending request on %s", interface)
send(packet, verbose=self.verbose) send(packet, verbose=self.verbose)
@ -114,7 +114,7 @@ class Scanner(object):
def handle_pkg(self, pkg): def handle_pkg(self, pkg):
if ICMPv6EchoReply in pkg: if ICMPv6EchoReply in pkg:
host = pkg['IPv6'].src host = pkg['IPv6'].src
log.verbose(f"Host {host} is alive") log.verbose("Host %s is alive", host)
dir = os.path.join(self.outdir, host) dir = os.path.join(self.outdir, host)
fname = os.path.join(dir, "last_seen") fname = os.path.join(dir, "last_seen")

View file

@ -65,7 +65,7 @@ class Shell:
def _init_environment(self): def _init_environment(self):
self.env = os.environ.copy() self.env = os.environ.copy()
additional_env = { additional_env = {
'PATH': "%s:%s" % (self.local.bin_path, os.environ['PATH']), 'PATH': "{}:{}".format(self.local.bin_path, os.environ['PATH']),
# for use in type emulator # for use in type emulator
'__cdist_type_base_path': self.local.type_path, '__cdist_type_base_path': self.local.type_path,
'__cdist_manifest': "cdist shell", '__cdist_manifest': "cdist shell",

View file

@ -86,8 +86,7 @@ class ObjectClassTestCase(test.CdistTestCase):
def test_create_singleton(self): def test_create_singleton(self):
"""Check whether creating an object without id (singleton) works""" """Check whether creating an object without id (singleton) works"""
singleton = self.expected_objects[0].object_from_name( self.expected_objects[0].object_from_name("__test_singleton")
"__test_singleton")
# came here - everything fine # came here - everything fine
def test_create_singleton_not_singleton_type(self): def test_create_singleton_not_singleton_type(self):
@ -126,16 +125,16 @@ class ObjectIdTestCase(test.CdistTestCase):
def test_object_id_contains_object_marker(self): def test_object_id_contains_object_marker(self):
cdist_type = core.CdistType(type_base_path, '__third') cdist_type = core.CdistType(type_base_path, '__third')
illegal_object_id = ( illegal_object_id = 'object_id/may/not/contain/{}/anywhere'.format(
'object_id/may/not/contain/%s/anywhere' % OBJECT_MARKER_NAME) OBJECT_MARKER_NAME)
with self.assertRaises(core.IllegalObjectIdError): with self.assertRaises(core.IllegalObjectIdError):
core.CdistObject(cdist_type, self.object_base_path, core.CdistObject(cdist_type, self.object_base_path,
OBJECT_MARKER_NAME, illegal_object_id) OBJECT_MARKER_NAME, illegal_object_id)
def test_object_id_contains_object_marker_string(self): def test_object_id_contains_object_marker_string(self):
cdist_type = core.CdistType(type_base_path, '__third') cdist_type = core.CdistType(type_base_path, '__third')
illegal_object_id = ( illegal_object_id = 'object_id/may/contain_{}_in_filename'.format(
'object_id/may/contain_%s_in_filename' % OBJECT_MARKER_NAME) OBJECT_MARKER_NAME)
core.CdistObject(cdist_type, self.object_base_path, core.CdistObject(cdist_type, self.object_base_path,
OBJECT_MARKER_NAME, illegal_object_id) OBJECT_MARKER_NAME, illegal_object_id)
# if we get here, the test passed # if we get here, the test passed
@ -195,28 +194,32 @@ class ObjectTestCase(test.CdistTestCase):
def test_path(self): def test_path(self):
self.assertEqual(self.cdist_object.path, self.assertEqual(self.cdist_object.path,
"__third/moon/%s" % OBJECT_MARKER_NAME) "__third/moon/{}".format(OBJECT_MARKER_NAME))
def test_absolute_path(self): def test_absolute_path(self):
self.assertEqual(self.cdist_object.absolute_path, self.assertEqual(self.cdist_object.absolute_path,
os.path.join(self.object_base_path, os.path.join(self.object_base_path,
"__third/moon/%s" % OBJECT_MARKER_NAME)) "__third/moon/{}".format(
OBJECT_MARKER_NAME)))
def test_code_local_path(self): def test_code_local_path(self):
self.assertEqual(self.cdist_object.code_local_path, self.assertEqual(self.cdist_object.code_local_path,
"__third/moon/%s/code-local" % OBJECT_MARKER_NAME) "__third/moon/{}/code-local".format(
OBJECT_MARKER_NAME))
def test_code_remote_path(self): def test_code_remote_path(self):
self.assertEqual(self.cdist_object.code_remote_path, self.assertEqual(self.cdist_object.code_remote_path,
"__third/moon/%s/code-remote" % OBJECT_MARKER_NAME) "__third/moon/{}/code-remote".format(
OBJECT_MARKER_NAME))
def test_parameter_path(self): def test_parameter_path(self):
self.assertEqual(self.cdist_object.parameter_path, self.assertEqual(self.cdist_object.parameter_path,
"__third/moon/%s/parameter" % OBJECT_MARKER_NAME) "__third/moon/{}/parameter".format(
OBJECT_MARKER_NAME))
def test_explorer_path(self): def test_explorer_path(self):
self.assertEqual(self.cdist_object.explorer_path, self.assertEqual(self.cdist_object.explorer_path,
"__third/moon/%s/explorer" % OBJECT_MARKER_NAME) "__third/moon/{}/explorer".format(OBJECT_MARKER_NAME))
def test_parameters(self): def test_parameters(self):
expected_parameters = {'planet': 'Saturn', 'name': 'Prometheus'} expected_parameters = {'planet': 'Saturn', 'name': 'Prometheus'}

View file

@ -84,8 +84,8 @@ class EmulatorTestCase(test.CdistTestCase):
def test_illegal_object_id_requirement(self): def test_illegal_object_id_requirement(self):
argv = ['__file', '/tmp/foobar'] argv = ['__file', '/tmp/foobar']
self.env['require'] = ( self.env['require'] = "__file/bad/id/with/{}/inside".format(
"__file/bad/id/with/%s/inside") % self.local.object_marker_name self.local.object_marker_name)
emu = emulator.Emulator(argv, env=self.env) emu = emulator.Emulator(argv, env=self.env)
self.assertRaises(core.IllegalObjectIdError, emu.run) self.assertRaises(core.IllegalObjectIdError, emu.run)

View file

@ -47,8 +47,8 @@ class RemoteTestCase(test.CdistTestCase):
args = (self.target_host,) args = (self.target_host,)
kwargs.setdefault('base_path', self.base_path) kwargs.setdefault('base_path', self.base_path)
user = getpass.getuser() user = getpass.getuser()
kwargs.setdefault('remote_exec', 'ssh -o User=%s -q' % user) kwargs.setdefault('remote_exec', 'ssh -o User={} -q'.format(user))
kwargs.setdefault('remote_copy', 'scp -o User=%s -q' % user) kwargs.setdefault('remote_copy', 'scp -o User={} -q'.format(user))
if 'stdout_base_path' not in kwargs: if 'stdout_base_path' not in kwargs:
stdout_path = os.path.join(self.temp_dir, 'stdout') stdout_path = os.path.join(self.temp_dir, 'stdout')
os.makedirs(stdout_path, exist_ok=True) os.makedirs(stdout_path, exist_ok=True)
@ -170,7 +170,7 @@ class RemoteTestCase(test.CdistTestCase):
r = self.create_remote(remote_exec=remote_exec, r = self.create_remote(remote_exec=remote_exec,
remote_copy=remote_copy) remote_copy=remote_copy)
self.assertEqual(r.run('true', return_output=True), self.assertEqual(r.run('true', return_output=True),
"%s\n" % self.target_host[0]) "{}\n".format(self.target_host[0]))
def test_run_script_target_host_in_env(self): def test_run_script_target_host_in_env(self):
handle, remote_exec_path = self.mkstemp(dir=self.temp_dir) handle, remote_exec_path = self.mkstemp(dir=self.temp_dir)
@ -185,7 +185,7 @@ class RemoteTestCase(test.CdistTestCase):
with os.fdopen(handle, "w") as fd: with os.fdopen(handle, "w") as fd:
fd.writelines(["#!/bin/sh\n", "true"]) fd.writelines(["#!/bin/sh\n", "true"])
self.assertEqual(r.run_script(script, return_output=True), self.assertEqual(r.run_script(script, return_output=True),
"%s\n" % self.target_host[0]) "{}\n".format(self.target_host[0]))
def test_run_script_with_env_target_host_in_env(self): def test_run_script_with_env_target_host_in_env(self):
handle, script = self.mkstemp(dir=self.temp_dir) handle, script = self.mkstemp(dir=self.temp_dir)

View file

@ -67,7 +67,7 @@ class MessageTestCase(test.CdistTestCase):
def test_message_merge_prefix(self): def test_message_merge_prefix(self):
"""Ensure messages are merged and are prefixed""" """Ensure messages are merged and are prefixed"""
expectedcontent = "%s:%s" % (self.prefix, self.content) expectedcontent = "{}:{}".format(self.prefix, self.content)
out = self.message.env['__messages_out'] out = self.message.env['__messages_out']

View file

@ -30,7 +30,7 @@ class AbsolutePathRequiredError(cdist.Error):
self.path = path self.path = path
def __str__(self): def __str__(self):
return 'Absolute path required, got: %s' % self.path return 'Absolute path required, got: {}'.format(self.path)
class FileList(collections.MutableSequence): class FileList(collections.MutableSequence):
@ -218,7 +218,7 @@ class FileBasedProperty:
def _get_attribute(self, instance, owner): def _get_attribute(self, instance, owner):
name = self._get_property_name(owner) name = self._get_property_name(owner)
attribute_name = '__%s' % name attribute_name = '__{}'.format(name)
if not hasattr(instance, attribute_name): if not hasattr(instance, attribute_name):
path = self._get_path(instance) path = self._get_path(instance)
attribute_instance = self.attribute_class(path) attribute_instance = self.attribute_class(path)

View file

@ -42,8 +42,7 @@ def resolve_target_host_name(host, family=0):
# gethostbyaddr returns triple # gethostbyaddr returns triple
# (hostname, aliaslist, ipaddrlist) # (hostname, aliaslist, ipaddrlist)
host_name = socket.gethostbyaddr(ip_addr)[0] host_name = socket.gethostbyaddr(ip_addr)[0]
log.debug("derived host_name for host \"{}\": {}".format( log.debug("derived host_name for host \"%s\": %s", host, host_name)
host, host_name))
except (socket.gaierror, socket.herror) as e: except (socket.gaierror, socket.herror) as e:
# in case of error provide empty value # in case of error provide empty value
host_name = '' host_name = ''
@ -54,8 +53,7 @@ def resolve_target_fqdn(host):
log = logging.getLogger(host) log = logging.getLogger(host)
try: try:
host_fqdn = socket.getfqdn(host) host_fqdn = socket.getfqdn(host)
log.debug("derived host_fqdn for host \"{}\": {}".format( log.debug("derived host_fqdn for host \"%s\": %s", host, host_fqdn)
host, host_fqdn))
except socket.herror as e: except socket.herror as e:
# in case of error provide empty value # in case of error provide empty value
host_fqdn = '' host_fqdn = ''

View file

@ -5,6 +5,16 @@ next:
* Core: Add trigger functionality (Nico Schottelius, Darko Poljak) * Core: Add trigger functionality (Nico Schottelius, Darko Poljak)
* Core: Implement core support for python types (Darko Poljak) * Core: Implement core support for python types (Darko Poljak)
6.9.6: 2021-04-20
* Type __pyvenv: Fix user example in man page (Dennis Camera)
* Core: config: Make local state directory available to custom remotes (Steven Armstrong
* Type __ssh_authorized_key: grep only if file exists (Dennis Camera)
* Type __sshd_config: Whitelist OpenBMC (Dennis Camera)
* Core: Maintain object relationship graph in cdist cache (Darko Poljak)
* Type __git: Fix numeric owner and group handling (Dennis Camera)
* Type __pyvenv: Fix numeric owner and group handling (Dennis Camera)
* Type __download: Make sum parameter optional (Ander Punnar)
6.9.5: 2021-02-28 6.9.5: 2021-02-28
* Core: preos: Fix passing cdist debug parameter (Darko Poljak) * Core: preos: Fix passing cdist debug parameter (Darko Poljak)
* Type __sshd_config: Produce error if invalid config is generated, fix processing of AuthenticationMethods and AuthorizedKeysFile, document explorer bug (Dennis Camera) * Type __sshd_config: Produce error if invalid config is generated, fix processing of AuthenticationMethods and AuthorizedKeysFile, document explorer bug (Dennis Camera)

View file

@ -61,6 +61,14 @@ Object cache overview
~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~
Each object under :strong:`object` directory has its own structure. Each object under :strong:`object` directory has its own structure.
autorequire
file containing a list of object auto requirements
children
file containing a list of object children, i.e. objects of types that this
type reuses (along with 'parents' it is used for maintaining parent-child
relationship graph)
code-local code-local
code generated from gencode-local, present only if something is code generated from gencode-local, present only if something is
generated generated
@ -80,6 +88,15 @@ parameter
directory containing type parameter named files containing parameter directory containing type parameter named files containing parameter
values values
parents
file containing a list of object parents, i.e. objects of types that reuse
this type (along with 'children' it is used for maintaining parent-child
relationship graph); objects without parents are objects specified in init
manifest
require
file containing a list of object requirements
source source
this type's source (init manifest) this type's source (init manifest)