Merge remote-tracking branch 'ungleich/master' into ssh-mux-sigpipe

This commit is contained in:
Darko Poljak 2017-07-19 07:55:19 +02:00
commit 01d7f63fcb
44 changed files with 322 additions and 148 deletions

View file

@ -22,6 +22,7 @@
import os import os
import hashlib import hashlib
import cdist.log
import cdist.version import cdist.version
VERSION = cdist.version.VERSION VERSION = cdist.version.VERSION

View file

@ -17,13 +17,19 @@ EPILOG = "Get cdist at http://www.nico.schottelius.org/software/cdist/"
parser = None parser = None
_verbosity_level_off = -2
_verbosity_level = { _verbosity_level = {
0: logging.ERROR, _verbosity_level_off: logging.OFF,
1: logging.WARNING, -1: logging.ERROR,
2: logging.INFO, 0: logging.WARNING,
1: logging.INFO,
2: logging.VERBOSE,
3: logging.DEBUG,
4: logging.TRACE,
} }
# All verbosity levels above 4 are TRACE.
_verbosity_level = collections.defaultdict( _verbosity_level = collections.defaultdict(
lambda: logging.DEBUG, _verbosity_level) lambda: logging.TRACE, _verbosity_level)
def add_beta_command(cmd): def add_beta_command(cmd):
@ -80,16 +86,17 @@ def get_parsers():
# Options _all_ parsers have in common # Options _all_ parsers have in common
parser['loglevel'] = argparse.ArgumentParser(add_help=False) parser['loglevel'] = argparse.ArgumentParser(add_help=False)
parser['loglevel'].add_argument( parser['loglevel'].add_argument(
'-d', '--debug', '-q', '--quiet',
help=('Set log level to debug (deprecated, use -vvv instead)'), help='Quiet mode: disables logging, including WARNING and ERROR',
action='store_true', default=False) action='store_true', default=False)
parser['loglevel'].add_argument( parser['loglevel'].add_argument(
'-v', '--verbose', '-v', '--verbose',
help=('Increase the verbosity level. Every instance of -v ' help=('Increase the verbosity level. Every instance of -v '
'increments the verbosity level by one. Its default value ' 'increments the verbosity level by one. Its default value '
'is 0. There are 4 levels of verbosity. The order of levels ' 'is 0 which includes ERROR and WARNING levels. '
'from the lowest to the highest are: ERROR (0), ' 'The levels, in order from the lowest to the highest, are: '
'WARNING (1), INFO (2) and DEBUG (3 or higher).'), 'ERROR (-1), WARNING (0), INFO (1), VERBOSE (2), DEBUG (3) '
'TRACE (4 or higher).'),
action='count', default=0) action='count', default=0)
parser['beta'] = argparse.ArgumentParser(add_help=False) parser['beta'] = argparse.ArgumentParser(add_help=False)
@ -211,12 +218,7 @@ def get_parsers():
def handle_loglevel(args): def handle_loglevel(args):
if args.debug: if args.quiet:
retval = "-d/--debug is deprecated, use -vvv instead" args.verbose = _verbosity_level_off
args.verbose = 3
else:
retval = None
logging.root.setLevel(_verbosity_level[args.verbose]) logging.root.setLevel(_verbosity_level[args.verbose])
return retval

View file

@ -29,8 +29,11 @@ upload_file=
create_file= create_file=
if [ "$state_should" = "present" -o "$state_should" = "exists" ]; then if [ "$state_should" = "present" -o "$state_should" = "exists" ]; then
if [ ! -f "$__object/parameter/source" ]; then if [ ! -f "$__object/parameter/source" ]; then
remote_stat="$(cat "$__object/explorer/stat")"
if [ -z "$remote_stat" ]; then
create_file=1 create_file=1
echo create >> "$__messages_out" echo create >> "$__messages_out"
fi
else else
source="$(cat "$__object/parameter/source")" source="$(cat "$__object/parameter/source")"
if [ "$source" = "-" ]; then if [ "$source" = "-" ]; then

View file

@ -1 +1,3 @@
#!/bin/sh -e
__line go_in_path --line 'export PATH=/usr/local/go/bin:$PATH' --file /etc/profile __line go_in_path --line 'export PATH=/usr/local/go/bin:$PATH' --file /etc/profile

View file

@ -1,3 +1,5 @@
#!/bin/sh -e
os=$(cat $__global/explorer/os) os=$(cat $__global/explorer/os)
os_version=$(cat $__global/explorer/os_version) os_version=$(cat $__global/explorer/os_version)

View file

@ -0,0 +1 @@
../__chroot_mount/gencode-local

View file

View file

@ -151,7 +151,7 @@ class Config(object):
hostcnt += 1 hostcnt += 1
if args.parallel: if args.parallel:
log.debug("Creating child process for %s", host) log.trace("Creating child process for %s", host)
process[host] = multiprocessing.Process( process[host] = multiprocessing.Process(
target=cls.onehost, target=cls.onehost,
args=(host, host_base_path, hostdir, args, True)) args=(host, host_base_path, hostdir, args, True))
@ -166,14 +166,14 @@ class Config(object):
# Catch errors in parallel mode when joining # Catch errors in parallel mode when joining
if args.parallel: if args.parallel:
for host in process.keys(): for host in process.keys():
log.debug("Joining process %s", host) log.trace("Joining process %s", host)
process[host].join() process[host].join()
if not process[host].exitcode == 0: if not process[host].exitcode == 0:
failed_hosts.append(host) failed_hosts.append(host)
time_end = time.time() time_end = time.time()
log.info("Total processing time for %s host(s): %s", hostcnt, log.verbose("Total processing time for %s host(s): %s", hostcnt,
(time_end - time_start)) (time_end - time_start))
if len(failed_hosts) > 0: if len(failed_hosts) > 0:
@ -233,13 +233,15 @@ class Config(object):
host_dir_name=host_dir_name, host_dir_name=host_dir_name,
initial_manifest=args.manifest, initial_manifest=args.manifest,
add_conf_dirs=args.conf_dir, add_conf_dirs=args.conf_dir,
cache_path_pattern=args.cache_path_pattern) cache_path_pattern=args.cache_path_pattern,
quiet_mode=args.quiet)
remote = cdist.exec.remote.Remote( remote = cdist.exec.remote.Remote(
target_host=target_host, target_host=target_host,
remote_exec=remote_exec, remote_exec=remote_exec,
remote_copy=remote_copy, remote_copy=remote_copy,
base_path=args.remote_out_path) base_path=args.remote_out_path,
quiet_mode=args.quiet)
cleanup_cmds = [] cleanup_cmds = []
if cleanup_cmd: if cleanup_cmd:
@ -284,6 +286,8 @@ class Config(object):
"""Do what is most often done: deploy & cleanup""" """Do what is most often done: deploy & cleanup"""
start_time = time.time() start_time = time.time()
self.log.info("Starting configuration run")
self._init_files_dirs() self._init_files_dirs()
self.explorer.run_global_explorers(self.local.global_explorer_out_path) self.explorer.run_global_explorers(self.local.global_explorer_out_path)
@ -292,8 +296,8 @@ class Config(object):
self.cleanup() self.cleanup()
self.local.save_cache(start_time) self.local.save_cache(start_time)
self.log.info("Finished successful run in %s seconds", self.log.info("Finished successful run in {:.2f} seconds".format(
time.time() - start_time) time.time() - start_time))
def cleanup(self): def cleanup(self):
self.log.debug("Running cleanup commands") self.log.debug("Running cleanup commands")
@ -329,7 +333,7 @@ class Config(object):
return objects_changed return objects_changed
def _iterate_once_sequential(self): def _iterate_once_sequential(self):
self.log.info("Iteration in sequential mode") self.log.debug("Iteration in sequential mode")
objects_changed = False objects_changed = False
for cdist_object in self.object_list(): for cdist_object in self.object_list():
@ -356,7 +360,7 @@ class Config(object):
return objects_changed return objects_changed
def _iterate_once_parallel(self): def _iterate_once_parallel(self):
self.log.info("Iteration in parallel mode in {} jobs".format( self.log.debug("Iteration in parallel mode in {} jobs".format(
self.jobs)) self.jobs))
objects_changed = False objects_changed = False
@ -379,15 +383,39 @@ class Config(object):
self.object_prepare(cargo[0]) self.object_prepare(cargo[0])
objects_changed = True objects_changed = True
elif cargo: elif cargo:
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for {} parallel "
self.log.trace("Multiprocessing cargo: %s", cargo)
cargo_types = set()
for c in cargo:
cargo_types.add(c.cdist_type)
self.log.trace("Multiprocessing cargo_types: %s", cargo_types)
nt = len(cargo_types)
if nt == 1:
self.log.debug(("Only one type, transfering explorers "
"sequentially"))
self.explorer.transfer_type_explorers(cargo_types.pop())
else:
self.log.trace(("Starting multiprocessing Pool for {} "
"parallel transfering types' explorers".format(
nt)))
args = [
(ct, ) for ct in cargo_types
]
mp_pool_run(self.explorer.transfer_type_explorers, args,
jobs=self.jobs)
self.log.trace(("Multiprocessing for parallel transfering "
"types' explorers finished"))
self.log.trace(("Starting multiprocessing Pool for {} parallel "
"objects preparation".format(n))) "objects preparation".format(n)))
args = [ args = [
(c, ) for c in cargo (c, False, ) for c in cargo
] ]
mp_pool_run(self.object_prepare, args, jobs=self.jobs) mp_pool_run(self.object_prepare, args, jobs=self.jobs)
self.log.debug(("Multiprocessing for parallel object " self.log.trace(("Multiprocessing for parallel object "
"preparation finished")) "preparation finished"))
objects_changed = True objects_changed = True
@ -407,23 +435,42 @@ class Config(object):
# self.object_run(cdist_object) # self.object_run(cdist_object)
# objects_changed = True # objects_changed = True
cargo.append(cdist_object)
n = len(cargo) # put objects in chuncks of distinct types
# so that there is no more than one object
# of the same type in one chunk because there is a
# possibility of object's process locking which
# prevents parallel execution at remote
# and do this only for nonparallel marked types
for chunk in cargo:
for obj in chunk:
if (obj.cdist_type == cdist_object.cdist_type and
cdist_object.cdist_type.is_nonparallel):
break
else:
chunk.append(cdist_object)
break
else:
chunk = [cdist_object, ]
cargo.append(chunk)
for chunk in cargo:
self.log.trace("Running chunk: %s", chunk)
n = len(chunk)
if n == 1: if n == 1:
self.log.debug("Only one object, running sequentially") self.log.debug("Only one object, running sequentially")
self.object_run(cargo[0]) self.object_run(chunk[0])
objects_changed = True objects_changed = True
elif cargo: elif chunk:
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for {} parallel " self.log.trace(("Starting multiprocessing Pool for {} "
"object run".format(n))) "parallel object run".format(n)))
args = [ args = [
(c, ) for c in cargo (c, ) for c in chunk
] ]
mp_pool_run(self.object_run, args, jobs=self.jobs) mp_pool_run(self.object_run, args, jobs=self.jobs)
self.log.debug(("Multiprocessing for parallel object " self.log.trace(("Multiprocessing for parallel object "
"run finished")) "run finished"))
objects_changed = True objects_changed = True
@ -491,18 +538,19 @@ class Config(object):
("The requirements of the following objects could not be " ("The requirements of the following objects could not be "
"resolved:\n%s") % ("\n".join(info_string))) "resolved:\n%s") % ("\n".join(info_string)))
def object_prepare(self, cdist_object): def object_prepare(self, cdist_object, transfer_type_explorers=True):
"""Prepare object: Run type explorer + manifest""" """Prepare object: Run type explorer + manifest"""
self.log.info( self.log.verbose("Preparing object {}".format(cdist_object.name))
self.log.verbose(
"Running manifest and explorers for " + cdist_object.name) "Running manifest and explorers for " + cdist_object.name)
self.explorer.run_type_explorers(cdist_object) self.explorer.run_type_explorers(cdist_object, transfer_type_explorers)
self.manifest.run_type_manifest(cdist_object) self.manifest.run_type_manifest(cdist_object)
cdist_object.state = core.CdistObject.STATE_PREPARED cdist_object.state = core.CdistObject.STATE_PREPARED
def object_run(self, cdist_object): def object_run(self, cdist_object):
"""Run gencode and code for an object""" """Run gencode and code for an object"""
self.log.debug("Trying to run object %s" % (cdist_object.name)) self.log.verbose("Running object " + cdist_object.name)
if cdist_object.state == core.CdistObject.STATE_DONE: if cdist_object.state == core.CdistObject.STATE_DONE:
raise cdist.Error(("Attempting to run an already finished " raise cdist.Error(("Attempting to run an already finished "
"object: %s"), cdist_object) "object: %s"), cdist_object)
@ -510,7 +558,7 @@ class Config(object):
cdist_type = cdist_object.cdist_type cdist_type = cdist_object.cdist_type
# Generate # Generate
self.log.info("Generating code for %s" % (cdist_object.name)) self.log.debug("Generating code for %s" % (cdist_object.name))
cdist_object.code_local = self.code.run_gencode_local(cdist_object) cdist_object.code_local = self.code.run_gencode_local(cdist_object)
cdist_object.code_remote = self.code.run_gencode_remote(cdist_object) cdist_object.code_remote = self.code.run_gencode_remote(cdist_object)
if cdist_object.code_local or cdist_object.code_remote: if cdist_object.code_local or cdist_object.code_remote:
@ -519,15 +567,19 @@ class Config(object):
# Execute # Execute
if not self.dry_run: if not self.dry_run:
if cdist_object.code_local or cdist_object.code_remote: if cdist_object.code_local or cdist_object.code_remote:
self.log.info("Executing code for %s" % (cdist_object.name)) self.log.info("Processing %s" % (cdist_object.name))
if cdist_object.code_local: if cdist_object.code_local:
self.log.trace("Executing local code for %s"
% (cdist_object.name))
self.code.run_code_local(cdist_object) self.code.run_code_local(cdist_object)
if cdist_object.code_remote: if cdist_object.code_remote:
self.log.trace("Executing remote code for %s"
% (cdist_object.name))
self.code.transfer_code_remote(cdist_object) self.code.transfer_code_remote(cdist_object)
self.code.run_code_remote(cdist_object) self.code.run_code_remote(cdist_object)
else: else:
self.log.info("Skipping code execution due to DRY RUN") self.log.verbose("Skipping code execution due to DRY RUN")
# Mark this object as done # Mark this object as done
self.log.debug("Finishing run of " + cdist_object.name) self.log.trace("Finishing run of " + cdist_object.name)
cdist_object.state = core.CdistObject.STATE_DONE cdist_object.state = core.CdistObject.STATE_DONE

View file

@ -27,3 +27,4 @@ from cdist.core.cdist_object import IllegalObjectIdError
from cdist.core.explorer import Explorer from cdist.core.explorer import Explorer
from cdist.core.manifest import Manifest from cdist.core.manifest import Manifest
from cdist.core.code import Code from cdist.core.code import Code
from cdist.core.util import listdir

View file

@ -22,7 +22,6 @@
# #
import fnmatch import fnmatch
import logging
import os import os
import collections import collections
@ -30,8 +29,6 @@ import cdist
import cdist.core import cdist.core
from cdist.util import fsproperty from cdist.util import fsproperty
log = logging.getLogger(__name__)
class IllegalObjectIdError(cdist.Error): class IllegalObjectIdError(cdist.Error):
def __init__(self, object_id, message=None): def __init__(self, object_id, message=None):
@ -107,7 +104,7 @@ class CdistObject(object):
@classmethod @classmethod
def list_type_names(cls, object_base_path): def list_type_names(cls, object_base_path):
"""Return a list of type names""" """Return a list of type names"""
return os.listdir(object_base_path) return cdist.core.listdir(object_base_path)
@staticmethod @staticmethod
def split_name(object_name): def split_name(object_name):

View file

@ -21,8 +21,8 @@
# #
import os import os
import cdist import cdist
import cdist.core
class NoSuchTypeError(cdist.Error): class NoSuchTypeError(cdist.Error):
@ -66,6 +66,9 @@ class CdistType(object):
self.__boolean_parameters = None self.__boolean_parameters = None
self.__parameter_defaults = None self.__parameter_defaults = None
def __hash__(self):
return hash(self.name)
@classmethod @classmethod
def list_types(cls, base_path): def list_types(cls, base_path):
"""Return a list of type instances""" """Return a list of type instances"""
@ -75,7 +78,7 @@ class CdistType(object):
@classmethod @classmethod
def list_type_names(cls, base_path): def list_type_names(cls, base_path):
"""Return a list of type names""" """Return a list of type names"""
return os.listdir(base_path) return cdist.core.listdir(base_path)
_instances = {} _instances = {}
@ -112,13 +115,19 @@ class CdistType(object):
(if not: for configuration)""" (if not: for configuration)"""
return os.path.isfile(os.path.join(self.absolute_path, "install")) return os.path.isfile(os.path.join(self.absolute_path, "install"))
@property
def is_nonparallel(self):
"""Check whether a type is a non parallel, i.e. its objects
cannot run in parallel."""
return os.path.isfile(os.path.join(self.absolute_path, "nonparallel"))
@property @property
def explorers(self): def explorers(self):
"""Return a list of available explorers""" """Return a list of available explorers"""
if not self.__explorers: if not self.__explorers:
try: try:
self.__explorers = os.listdir(os.path.join(self.absolute_path, self.__explorers = cdist.core.listdir(
"explorer")) os.path.join(self.absolute_path, "explorer"))
except EnvironmentError: except EnvironmentError:
# error ignored # error ignored
self.__explorers = [] self.__explorers = []
@ -222,7 +231,7 @@ class CdistType(object):
defaults_dir = os.path.join(self.absolute_path, defaults_dir = os.path.join(self.absolute_path,
"parameter", "parameter",
"default") "default")
for name in os.listdir(defaults_dir): for name in cdist.core.listdir(defaults_dir):
try: try:
with open(os.path.join(defaults_dir, name)) as fd: with open(os.path.join(defaults_dir, name)) as fd:
defaults[name] = fd.read().strip() defaults[name] = fd.read().strip()

View file

@ -21,13 +21,10 @@
# #
# #
import logging
import os import os
import cdist import cdist
log = logging.getLogger(__name__)
''' '''
common: common:
@ -143,8 +140,7 @@ class Code(object):
cdist_object.code_remote_path) cdist_object.code_remote_path)
destination = os.path.join(self.remote.object_path, destination = os.path.join(self.remote.object_path,
cdist_object.code_remote_path) cdist_object.code_remote_path)
# FIXME: BUG: do not create destination, but top level of destination! self.remote.mkdir(os.path.dirname(destination))
self.remote.mkdir(destination)
self.remote.transfer(source, destination) self.remote.transfer(source, destination)
def _run_code(self, cdist_object, which, env=None): def _run_code(self, cdist_object, which, env=None):

View file

@ -95,7 +95,7 @@ class Explorer(object):
out_path directory. out_path directory.
""" """
self.log.info("Running global explorers") self.log.verbose("Running global explorers")
self.transfer_global_explorers() self.transfer_global_explorers()
if self.jobs is None: if self.jobs is None:
self._run_global_explorers_seq(out_path) self._run_global_explorers_seq(out_path)
@ -109,22 +109,22 @@ class Explorer(object):
fd.write(output) fd.write(output)
def _run_global_explorers_seq(self, out_path): def _run_global_explorers_seq(self, out_path):
self.log.info("Running global explorers sequentially") self.log.debug("Running global explorers sequentially")
for explorer in self.list_global_explorer_names(): for explorer in self.list_global_explorer_names():
self._run_global_explorer(explorer, out_path) self._run_global_explorer(explorer, out_path)
def _run_global_explorers_parallel(self, out_path): def _run_global_explorers_parallel(self, out_path):
self.log.info("Running global explorers in {} parallel jobs".format( self.log.debug("Running global explorers in {} parallel jobs".format(
self.jobs)) self.jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for global " self.log.trace(("Starting multiprocessing Pool for global "
"explorers run")) "explorers run"))
args = [ args = [
(e, out_path, ) for e in self.list_global_explorer_names() (e, out_path, ) for e in self.list_global_explorer_names()
] ]
mp_pool_run(self._run_global_explorer, args, jobs=self.jobs) mp_pool_run(self._run_global_explorer, args, jobs=self.jobs)
self.log.debug(("Multiprocessing run for global explorers " self.log.trace(("Multiprocessing run for global explorers "
"finished")) "finished"))
# logger is not pickable, so remove it when we pickle # logger is not pickable, so remove it when we pickle
@ -163,20 +163,27 @@ class Explorer(object):
except EnvironmentError: except EnvironmentError:
return [] return []
def run_type_explorers(self, cdist_object): def run_type_explorers(self, cdist_object, transfer_type_explorers=True):
"""Run the type explorers for the given object and save their output """Run the type explorers for the given object and save their output
in the object. in the object.
""" """
self.log.debug("Transfering type explorers for type: %s", self.log.verbose("Running type explorers for {}".format(
cdist_object.cdist_type))
if transfer_type_explorers:
self.log.trace("Transfering type explorers for type: %s",
cdist_object.cdist_type) cdist_object.cdist_type)
self.transfer_type_explorers(cdist_object.cdist_type) self.transfer_type_explorers(cdist_object.cdist_type)
self.log.debug("Transfering object parameters for object: %s", else:
self.log.trace(("No need for transfering type explorers for "
"type: %s"),
cdist_object.cdist_type)
self.log.trace("Transfering object parameters for object: %s",
cdist_object.name) cdist_object.name)
self.transfer_object_parameters(cdist_object) self.transfer_object_parameters(cdist_object)
for explorer in self.list_type_explorer_names(cdist_object.cdist_type): for explorer in self.list_type_explorer_names(cdist_object.cdist_type):
output = self.run_type_explorer(explorer, cdist_object) output = self.run_type_explorer(explorer, cdist_object)
self.log.debug("Running type explorer '%s' for object '%s'", self.log.trace("Running type explorer '%s' for object '%s'",
explorer, cdist_object.name) explorer, cdist_object.name)
cdist_object.explorers[explorer] = output cdist_object.explorers[explorer] = output
@ -203,7 +210,7 @@ class Explorer(object):
remote side.""" remote side."""
if cdist_type.explorers: if cdist_type.explorers:
if cdist_type.name in self._type_explorers_transferred: if cdist_type.name in self._type_explorers_transferred:
self.log.debug("Skipping retransfer of type explorers for: %s", self.log.trace("Skipping retransfer of type explorers for: %s",
cdist_type) cdist_type)
else: else:
source = os.path.join(self.local.type_path, source = os.path.join(self.local.type_path,

View file

@ -145,12 +145,11 @@ class Manifest(object):
else: else:
user_supplied = True user_supplied = True
self.log.info("Running initial manifest " + initial_manifest)
if not os.path.isfile(initial_manifest): if not os.path.isfile(initial_manifest):
raise NoInitialManifestError(initial_manifest, user_supplied) raise NoInitialManifestError(initial_manifest, user_supplied)
message_prefix = "initialmanifest" message_prefix = "initialmanifest"
self.log.verbose("Running initial manifest " + initial_manifest)
self.local.run_script(initial_manifest, self.local.run_script(initial_manifest,
env=self.env_initial_manifest(initial_manifest), env=self.env_initial_manifest(initial_manifest),
message_prefix=message_prefix, message_prefix=message_prefix,
@ -177,6 +176,7 @@ class Manifest(object):
cdist_object.cdist_type.manifest_path) cdist_object.cdist_type.manifest_path)
message_prefix = cdist_object.name message_prefix = cdist_object.name
if os.path.isfile(type_manifest): if os.path.isfile(type_manifest):
self.log.verbose("Running type manifest " + type_manifest)
self.local.run_script(type_manifest, self.local.run_script(type_manifest,
env=self.env_type_manifest(cdist_object), env=self.env_type_manifest(cdist_object),
message_prefix=message_prefix, message_prefix=message_prefix,

36
cdist/core/util.py Normal file
View file

@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
#
# 2017 Darko Poljak (darko.poljak at gmail.com)
#
# This file is part of cdist.
#
# cdist is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cdist is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with cdist. If not, see <http://www.gnu.org/licenses/>.
#
#
import os
def listdir(path='.', include_dot=False):
"""os.listdir but do not include entries whose names begin with a dot('.')
if include_dot is False.
"""
if include_dot:
return os.listdir(path)
else:
return [x for x in os.listdir(path) if not _ishidden(x)]
def _ishidden(path):
return path[0] in ('.', b'.'[0])

View file

@ -98,7 +98,7 @@ class Emulator(object):
self.save_stdin() self.save_stdin()
self.record_requirements() self.record_requirements()
self.record_auto_requirements() self.record_auto_requirements()
self.log.debug("Finished %s %s" % ( self.log.trace("Finished %s %s" % (
self.cdist_object.path, self.parameters)) self.cdist_object.path, self.parameters))
def __init_log(self): def __init_log(self):
@ -148,7 +148,7 @@ class Emulator(object):
# And finally parse/verify parameter # And finally parse/verify parameter
self.args = parser.parse_args(self.argv[1:]) self.args = parser.parse_args(self.argv[1:])
self.log.debug('Args: %s' % self.args) self.log.trace('Args: %s' % self.args)
def setup_object(self): def setup_object(self):
# Setup object - and ensure it is not in args # Setup object - and ensure it is not in args
@ -256,10 +256,10 @@ class Emulator(object):
# (this would leed to an circular dependency) # (this would leed to an circular dependency)
if ("CDIST_ORDER_DEPENDENCY" in self.env and if ("CDIST_ORDER_DEPENDENCY" in self.env and
'CDIST_OVERRIDE' not in self.env): 'CDIST_OVERRIDE' not in self.env):
# load object name created bevor this one from typeorder file ... # load object name created befor this one from typeorder file ...
with open(self.typeorder_path, 'r') as typecreationfile: with open(self.typeorder_path, 'r') as typecreationfile:
typecreationorder = typecreationfile.readlines() typecreationorder = typecreationfile.readlines()
# get the type created bevore this one ... # get the type created before this one ...
try: try:
lastcreatedtype = typecreationorder[-2].strip() lastcreatedtype = typecreationorder[-2].strip()
if 'require' in self.env: if 'require' in self.env:

View file

@ -54,7 +54,8 @@ class Local(object):
exec_path=sys.argv[0], exec_path=sys.argv[0],
initial_manifest=None, initial_manifest=None,
add_conf_dirs=None, add_conf_dirs=None,
cache_path_pattern=None): cache_path_pattern=None,
quiet_mode=False):
self.target_host = target_host self.target_host = target_host
self.hostdir = host_dir_name self.hostdir = host_dir_name
@ -64,6 +65,7 @@ class Local(object):
self.custom_initial_manifest = initial_manifest self.custom_initial_manifest = initial_manifest
self._add_conf_dirs = add_conf_dirs self._add_conf_dirs = add_conf_dirs
self.cache_path_pattern = cache_path_pattern self.cache_path_pattern = cache_path_pattern
self.quiet_mode = quiet_mode
self._init_log() self._init_log()
self._init_permissions() self._init_permissions()
@ -163,7 +165,7 @@ class Local(object):
with open(self.object_marker_file, 'w') as fd: with open(self.object_marker_file, 'w') as fd:
fd.write("%s\n" % self.object_marker_name) fd.write("%s\n" % self.object_marker_name)
self.log.debug("Object marker %s saved in %s" % ( self.log.trace("Object marker %s saved in %s" % (
self.object_marker_name, self.object_marker_file)) self.object_marker_name, self.object_marker_file))
def _init_cache_dir(self, cache_dir): def _init_cache_dir(self, cache_dir):
@ -178,12 +180,12 @@ class Local(object):
def rmdir(self, path): def rmdir(self, path):
"""Remove directory on the local side.""" """Remove directory on the local side."""
self.log.debug("Local rmdir: %s", path) self.log.trace("Local rmdir: %s", path)
shutil.rmtree(path) shutil.rmtree(path)
def mkdir(self, path): def mkdir(self, path):
"""Create directory on the local side.""" """Create directory on the local side."""
self.log.debug("Local mkdir: %s", path) self.log.trace("Local mkdir: %s", path)
os.makedirs(path, exist_ok=True) os.makedirs(path, exist_ok=True)
def run(self, command, env=None, return_output=False, message_prefix=None, def run(self, command, env=None, return_output=False, message_prefix=None,
@ -192,7 +194,6 @@ class Local(object):
Return the output as a string. Return the output as a string.
""" """
self.log.debug("Local run: %s", command)
assert isinstance(command, (list, tuple)), ( assert isinstance(command, (list, tuple)), (
"list or tuple argument expected, got: %s" % command) "list or tuple argument expected, got: %s" % command)
@ -211,19 +212,30 @@ class Local(object):
message = cdist.message.Message(message_prefix, self.messages_path) message = cdist.message.Message(message_prefix, self.messages_path)
env.update(message.env) env.update(message.env)
self.log.trace("Local run: %s", command)
try: try:
if self.quiet_mode:
stderr = subprocess.DEVNULL
else:
stderr = None
if save_output: if save_output:
output, errout = exec_util.call_get_output(command, env=env) output, errout = exec_util.call_get_output(
self.log.debug("Local stdout: {}".format(output)) command, env=env, stderr=stderr)
self.log.trace("Local stdout: {}".format(output))
# Currently, stderr is not captured. # Currently, stderr is not captured.
# self.log.debug("Local stderr: {}".format(errout)) # self.log.trace("Local stderr: {}".format(errout))
if return_output: if return_output:
return output.decode() return output.decode()
else: else:
# In some cases no output is saved. # In some cases no output is saved.
# This is used for shell command, stdout and stderr # This is used for shell command, stdout and stderr
# must not be catched. # must not be catched.
subprocess.check_call(command, env=env) if self.quiet_mode:
stdout = subprocess.DEVNULL
else:
stdout = None
subprocess.check_call(command, env=env, stderr=stderr,
stdout=stdout)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
exec_util.handle_called_process_error(e, command) exec_util.handle_called_process_error(e, command)
except OSError as error: except OSError as error:
@ -279,13 +291,14 @@ class Local(object):
return cache_subpath return cache_subpath
def save_cache(self, start_time=time.time()): def save_cache(self, start_time=time.time()):
self.log.debug("cache subpath pattern: {}".format( self.log.trace("cache subpath pattern: {}".format(
self.cache_path_pattern)) self.cache_path_pattern))
cache_subpath = self._cache_subpath(start_time, cache_subpath = self._cache_subpath(start_time,
self.cache_path_pattern) self.cache_path_pattern)
self.log.debug("cache subpath: {}".format(cache_subpath)) self.log.debug("cache subpath: {}".format(cache_subpath))
destination = os.path.join(self.cache_path, cache_subpath) destination = os.path.join(self.cache_path, cache_subpath)
self.log.debug("Saving " + self.base_path + " to " + destination) self.log.trace(("Saving cache: " + self.base_path + " to " +
destination))
if not os.path.exists(destination): if not os.path.exists(destination):
shutil.move(self.base_path, destination) shutil.move(self.base_path, destination)
@ -340,7 +353,7 @@ class Local(object):
if os.path.exists(dst): if os.path.exists(dst):
os.unlink(dst) os.unlink(dst)
self.log.debug("Linking %s to %s ..." % (src, dst)) self.log.trace("Linking %s to %s ..." % (src, dst))
try: try:
os.symlink(src, dst) os.symlink(src, dst)
except OSError as e: except OSError as e:
@ -352,7 +365,7 @@ class Local(object):
src = os.path.abspath(self.exec_path) src = os.path.abspath(self.exec_path)
for cdist_type in core.CdistType.list_types(self.type_path): for cdist_type in core.CdistType.list_types(self.type_path):
dst = os.path.join(self.bin_path, cdist_type.name) dst = os.path.join(self.bin_path, cdist_type.name)
self.log.debug("Linking emulator: %s to %s", src, dst) self.log.trace("Linking emulator: %s to %s", src, dst)
try: try:
os.symlink(src, dst) os.symlink(src, dst)

View file

@ -62,7 +62,8 @@ class Remote(object):
target_host, target_host,
remote_exec, remote_exec,
remote_copy, remote_copy,
base_path=None): base_path=None,
quiet_mode=None):
self.target_host = target_host self.target_host = target_host
self._exec = remote_exec self._exec = remote_exec
self._copy = remote_copy self._copy = remote_copy
@ -71,6 +72,7 @@ class Remote(object):
self.base_path = base_path self.base_path = base_path
else: else:
self.base_path = "/var/lib/cdist" self.base_path = "/var/lib/cdist"
self.quiet_mode = quiet_mode
self.conf_path = os.path.join(self.base_path, "conf") self.conf_path = os.path.join(self.base_path, "conf")
self.object_path = os.path.join(self.base_path, "object") self.object_path = os.path.join(self.base_path, "object")
@ -111,18 +113,18 @@ class Remote(object):
def rmdir(self, path): def rmdir(self, path):
"""Remove directory on the remote side.""" """Remove directory on the remote side."""
self.log.debug("Remote rmdir: %s", path) self.log.trace("Remote rmdir: %s", path)
self.run(["rm", "-rf", path]) self.run(["rm", "-rf", path])
def mkdir(self, path): def mkdir(self, path):
"""Create directory on the remote side.""" """Create directory on the remote side."""
self.log.debug("Remote mkdir: %s", path) self.log.trace("Remote mkdir: %s", path)
self.run(["mkdir", "-p", path]) self.run(["mkdir", "-p", path])
def transfer(self, source, destination, jobs=None): def transfer(self, source, destination, jobs=None):
"""Transfer a file or directory to the remote side.""" """Transfer a file or directory to the remote side."""
self.log.debug("Remote transfer: %s -> %s", source, destination) self.log.trace("Remote transfer: %s -> %s", source, destination)
self.rmdir(destination) # self.rmdir(destination)
if os.path.isdir(source): if os.path.isdir(source):
self.mkdir(destination) self.mkdir(destination)
if jobs: if jobs:
@ -147,11 +149,11 @@ class Remote(object):
def _transfer_dir_parallel(self, source, destination, jobs): def _transfer_dir_parallel(self, source, destination, jobs):
"""Transfer a directory to the remote side in parallel mode.""" """Transfer a directory to the remote side in parallel mode."""
self.log.info("Remote transfer in {} parallel jobs".format( self.log.debug("Remote transfer in {} parallel jobs".format(
jobs)) jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for parallel " self.log.trace(("Starting multiprocessing Pool for parallel "
"remote transfer")) "remote transfer"))
args = [] args = []
for f in glob.glob1(source, '*'): for f in glob.glob1(source, '*'):
@ -161,7 +163,7 @@ class Remote(object):
_wrap_addr(self.target_host[0]), destination)]) _wrap_addr(self.target_host[0]), destination)])
args.append((command, )) args.append((command, ))
mp_pool_run(self._run_command, args, jobs=jobs) mp_pool_run(self._run_command, args, jobs=jobs)
self.log.debug(("Multiprocessing for parallel transfer " self.log.trace(("Multiprocessing for parallel transfer "
"finished")) "finished"))
def run_script(self, script, env=None, return_output=False): def run_script(self, script, env=None, return_output=False):
@ -226,12 +228,17 @@ class Remote(object):
os_environ['__target_hostname'] = self.target_host[1] os_environ['__target_hostname'] = self.target_host[1]
os_environ['__target_fqdn'] = self.target_host[2] os_environ['__target_fqdn'] = self.target_host[2]
self.log.debug("Remote run: %s", command) self.log.trace("Remote run: %s", command)
try: try:
output, errout = exec_util.call_get_output(command, env=os_environ) if self.quiet_mode:
self.log.debug("Remote stdout: {}".format(output)) stderr = subprocess.DEVNULL
else:
stderr = None
output, errout = exec_util.call_get_output(
command, env=os_environ, stderr=stderr)
self.log.trace("Remote stdout: {}".format(output))
# Currently, stderr is not captured. # Currently, stderr is not captured.
# self.log.debug("Remote stderr: {}".format(errout)) # self.log.trace("Remote stderr: {}".format(errout))
if return_output: if return_output:
return output.decode() return output.decode()
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:

View file

@ -116,14 +116,14 @@ import cdist
# return (result.stdout, result.stderr) # return (result.stdout, result.stderr)
def call_get_output(command, env=None): def call_get_output(command, env=None, stderr=None):
"""Run the given command with the given environment. """Run the given command with the given environment.
Return the tuple of stdout and stderr output as a byte strings. Return the tuple of stdout and stderr output as a byte strings.
""" """
assert isinstance(command, (list, tuple)), ( assert isinstance(command, (list, tuple)), (
"list or tuple argument expected, got: {}".format(command)) "list or tuple argument expected, got: {}".format(command))
return (_call_get_stdout(command, env), None) return (_call_get_stdout(command, env, stderr), None)
def handle_called_process_error(err, command): def handle_called_process_error(err, command):
@ -140,7 +140,7 @@ def handle_called_process_error(err, command):
err.returncode, err.output)) err.returncode, err.output))
def _call_get_stdout(command, env=None): def _call_get_stdout(command, env=None, stderr=None):
"""Run the given command with the given environment. """Run the given command with the given environment.
Return the stdout output as a byte string, stderr is ignored. Return the stdout output as a byte string, stderr is ignored.
""" """
@ -148,7 +148,7 @@ def _call_get_stdout(command, env=None):
"list or tuple argument expected, got: {}".format(command)) "list or tuple argument expected, got: {}".format(command))
with TemporaryFile() as fout: with TemporaryFile() as fout:
subprocess.check_call(command, env=env, stdout=fout) subprocess.check_call(command, env=env, stdout=fout, stderr=stderr)
fout.seek(0) fout.seek(0)
output = fout.read() output = fout.read()

View file

@ -23,6 +23,31 @@
import logging import logging
# Define additional cdist logging levels.
logging.OFF = logging.CRITICAL + 10 # disable logging
logging.addLevelName(logging.OFF, 'OFF')
logging.VERBOSE = logging.INFO - 5
logging.addLevelName(logging.VERBOSE, 'VERBOSE')
def _verbose(msg, *args, **kwargs):
logging.log(logging.VERBOSE, msg, *args, **kwargs)
logging.verbose = _verbose
logging.TRACE = logging.DEBUG - 5
logging.addLevelName(logging.TRACE, 'TRACE')
def _trace(msg, *args, **kwargs):
logging.log(logging.TRACE, msg, *args, **kwargs)
logging.trace = _trace
class Log(logging.Logger): class Log(logging.Logger):
def __init__(self, name): def __init__(self, name):
@ -37,3 +62,13 @@ class Log(logging.Logger):
record.msg = self.name + ": " + str(record.msg) record.msg = self.name + ": " + str(record.msg)
return True return True
def verbose(self, msg, *args, **kwargs):
self.log(logging.VERBOSE, msg, *args, **kwargs)
def trace(self, msg, *args, **kwargs):
self.log(logging.TRACE, msg, *args, **kwargs)
logging.setLoggerClass(Log)
logging.basicConfig(format='%(levelname)s: %(message)s')

View file

@ -86,10 +86,10 @@ class Shell(object):
self._init_files_dirs() self._init_files_dirs()
self._init_environment() self._init_environment()
log.info("Starting shell...") log.trace("Starting shell...")
# save_output=False -> do not catch stdout and stderr # save_output=False -> do not catch stdout and stderr
self.local.run([self.shell], self.env, save_output=False) self.local.run([self.shell], self.env, save_output=False)
log.info("Finished shell.") log.trace("Finished shell.")
@classmethod @classmethod
def commandline(cls, args): def commandline(cls, args):

View file

@ -113,6 +113,16 @@ class TypeTestCase(test.CdistTestCase):
cdist_type = core.CdistType(base_path, '__not_singleton') cdist_type = core.CdistType(base_path, '__not_singleton')
self.assertFalse(cdist_type.is_singleton) self.assertFalse(cdist_type.is_singleton)
def test_nonparallel_is_nonparallel(self):
base_path = fixtures
cdist_type = core.CdistType(base_path, '__nonparallel')
self.assertTrue(cdist_type.is_nonparallel)
def test_not_nonparallel_is_nonparallel(self):
base_path = fixtures
cdist_type = core.CdistType(base_path, '__not_nonparallel')
self.assertFalse(cdist_type.is_nonparallel)
def test_install_is_install(self): def test_install_is_install(self):
base_path = fixtures base_path = fixtures
cdist_type = core.CdistType(base_path, '__install') cdist_type = core.CdistType(base_path, '__install')

View file

@ -45,7 +45,7 @@ def resolve_target_host_name(host):
log.debug("derived host_name for host \"{}\": {}".format( log.debug("derived host_name for host \"{}\": {}".format(
host, host_name)) host, host_name))
except (socket.gaierror, socket.herror) as e: except (socket.gaierror, socket.herror) as e:
log.warn("Could not derive host_name for {}" log.warning("Could not derive host_name for {}"
", $host_name will be empty. Error is: {}".format(host, e)) ", $host_name will be empty. Error is: {}".format(host, e))
# in case of error provide empty value # in case of error provide empty value
host_name = '' host_name = ''
@ -59,7 +59,7 @@ def resolve_target_fqdn(host):
log.debug("derived host_fqdn for host \"{}\": {}".format( log.debug("derived host_fqdn for host \"{}\": {}".format(
host, host_fqdn)) host, host_fqdn))
except socket.herror as e: except socket.herror as e:
log.warn("Could not derive host_fqdn for {}" log.warning("Could not derive host_fqdn for {}"
", $host_fqdn will be empty. Error is: {}".format(host, e)) ", $host_fqdn will be empty. Error is: {}".format(host, e))
# in case of error provide empty value # in case of error provide empty value
host_fqdn = '' host_fqdn = ''

View file

@ -7,11 +7,16 @@ next:
* Core: Allow manifest and gencode scripts to be written in any language (Darko Poljak) * Core: Allow manifest and gencode scripts to be written in any language (Darko Poljak)
* Documentation: Improvements to the english and fix typos (Mesar Hameed) * Documentation: Improvements to the english and fix typos (Mesar Hameed)
* Core: Merge -C custom cache path pattern option from beta branch (Darko Poljak) * Core: Merge -C custom cache path pattern option from beta branch (Darko Poljak)
* Core: Improve and cleanup logging (Darko Poljak, Steven Armstrong)
* Core: Remove deprecated -d option (Darko Poljak)
* Type __file: If no --source then create only if there is no file (Ander Punnar)
* Core: Ignore directory entries that begin with dot('.') (Darko Poljak)
* Core: Fix parallel object prepare and run steps and add nonparallel type marker (Darko Poljak)
4.4.4: 2017-06-16 4.4.4: 2017-06-16
* Core: Support -j parallelization for object prepare and object run (Darko Poljak) * Core: Support -j parallelization for object prepare and object run (Darko Poljak)
* Type __install_mkfs: mkfs.vfat does not support -q (Nico Schottelius) * Type __install_mkfs: mkfs.vfat does not support -q (Nico Schottelius)
* Types __go_get, __daemontools*, __prometheus*: Fix missing dependencies, fix arguments(Kamila Součková) * Types __go_get, __daemontools*, __prometheus*: Fix missing dependencies, fix arguments (Kamila Součková)
4.4.3: 2017-06-13 4.4.3: 2017-06-13
* Type __golang_from_vendor: Install golang from https://golang.org/dl/ (Kamila Součková) * Type __golang_from_vendor: Install golang from https://golang.org/dl/ (Kamila Součková)

View file

@ -11,23 +11,23 @@ SYNOPSIS
:: ::
cdist [-h] [-d] [-v] [-V] {banner,config,shell,install} ... cdist [-h] [-v] [-V] {banner,config,shell,install} ...
cdist banner [-h] [-d] [-v] cdist banner [-h] [-v]
cdist config [-h] [-d] [-v] [-b] [-C CACHE_PATH_PATTERN] [-c CONF_DIR] cdist config [-h] [-v] [-b] [-C CACHE_PATH_PATTERN] [-c CONF_DIR]
[-i MANIFEST] [-j [JOBS]] [-n] [-o OUT_PATH] [-i MANIFEST] [-j [JOBS]] [-n] [-o OUT_PATH]
[--remote-copy REMOTE_COPY] [--remote-exec REMOTE_EXEC] [--remote-copy REMOTE_COPY] [--remote-exec REMOTE_EXEC]
[-f HOSTFILE] [-p] [-r REMOTE_OUT_PATH] [-s] [-f HOSTFILE] [-p] [-r REMOTE_OUT_PATH] [-s]
[host [host ...]] [host [host ...]]
cdist install [-h] [-d] [-v] [-b] [-C CACHE_PATH_PATTERN] [-c CONF_DIR] cdist install [-h] [-v] [-b] [-C CACHE_PATH_PATTERN] [-c CONF_DIR]
[-i MANIFEST] [-j [JOBS]] [-n] [-o OUT_PATH] [-i MANIFEST] [-j [JOBS]] [-n] [-o OUT_PATH]
[--remote-copy REMOTE_COPY] [--remote-exec REMOTE_EXEC] [--remote-copy REMOTE_COPY] [--remote-exec REMOTE_EXEC]
[-f HOSTFILE] [-p] [-r REMOTE_OUT_PATH] [-s] [-f HOSTFILE] [-p] [-r REMOTE_OUT_PATH] [-s]
[host [host ...]] [host [host ...]]
cdist shell [-h] [-d] [-v] [-s SHELL] cdist shell [-h] [-v] [-s SHELL]
DESCRIPTION DESCRIPTION
@ -46,16 +46,16 @@ All commands accept the following options:
Show the help screen Show the help screen
.. option:: -d, --debug .. option:: -q, --quiet
Set log level to debug (deprecated, use -vvv instead) Quiet mode: disables logging, including WARNING and ERROR
.. option:: -v, --verbose .. option:: -v, --verbose
Increase the verbosity level. Every instance of -v increments the verbosity Increase the verbosity level. Every instance of -v increments the verbosity
level by one. Its default value is 0. There are 4 levels of verbosity. The level by one. Its default value is 0 which includes ERROR and WARNING levels.
order of levels from the lowest to the highest are: ERROR (0), WARNING (1), The levels, in order from the lowest to the highest, are:
INFO (2) and DEBUG (3 or higher). ERROR (-1), WARNING (0), INFO (1), VERBOSE (2), DEBUG (3) TRACE (4 or higher).
.. option:: -V, --version .. option:: -V, --version
@ -207,7 +207,7 @@ EXAMPLES
.. code-block:: sh .. code-block:: sh
# Configure ikq05.ethz.ch with debug enabled # Configure ikq05.ethz.ch with debug enabled
% cdist config -d ikq05.ethz.ch % cdist config -vvv ikq05.ethz.ch
# Configure hosts in parallel and use a different configuration directory # Configure hosts in parallel and use a different configuration directory
% cdist config -c ~/p/cdist-nutzung \ % cdist config -c ~/p/cdist-nutzung \
@ -241,7 +241,7 @@ EXAMPLES
[--group GROUP] [--owner OWNER] [--mode MODE] object_id [--group GROUP] [--owner OWNER] [--mode MODE] object_id
# Install ikq05.ethz.ch with debug enabled # Install ikq05.ethz.ch with debug enabled
% cdist install -d ikq05.ethz.ch % cdist install -vvv ikq05.ethz.ch
ENVIRONMENT ENVIRONMENT
----------- -----------

View file

@ -40,12 +40,10 @@ def commandline():
args = parser['main'].parse_args(sys.argv[1:]) args = parser['main'].parse_args(sys.argv[1:])
# Loglevels are handled globally in here # Loglevels are handled globally in here
retval = cdist.argparse.handle_loglevel(args) cdist.argparse.handle_loglevel(args)
if retval:
log.warning(retval)
log.debug(args) log.verbose("version %s" % cdist.VERSION)
log.info("version %s" % cdist.VERSION) log.trace(args)
# Work around python 3.3 bug: # Work around python 3.3 bug:
# http://bugs.python.org/issue16308 # http://bugs.python.org/issue16308
@ -80,10 +78,7 @@ if __name__ == "__main__":
import os import os
import re import re
import cdist import cdist
import cdist.log
logging.setLoggerClass(cdist.log.Log)
logging.basicConfig(format='%(levelname)s: %(message)s')
log = logging.getLogger("cdist") log = logging.getLogger("cdist")
if re.match("__", os.path.basename(sys.argv[0])): if re.match("__", os.path.basename(sys.argv[0])):