First iteration of logging cleanup.

This commit is contained in:
Darko Poljak 2017-06-28 23:36:42 +02:00 committed by Steven Armstrong
parent c63ab44c9c
commit 248656b81f
8 changed files with 49 additions and 49 deletions

View file

@ -144,7 +144,7 @@ class Config(object):
hostcnt += 1 hostcnt += 1
if args.parallel: if args.parallel:
log.debug("Creating child process for %s", host) log.trace("Creating child process for %s", host)
process[host] = multiprocessing.Process( process[host] = multiprocessing.Process(
target=cls.onehost, target=cls.onehost,
args=(host, host_base_path, hostdir, args, True)) args=(host, host_base_path, hostdir, args, True))
@ -159,7 +159,7 @@ class Config(object):
# Catch errors in parallel mode when joining # Catch errors in parallel mode when joining
if args.parallel: if args.parallel:
for host in process.keys(): for host in process.keys():
log.debug("Joining process %s", host) log.trace("Joining process %s", host)
process[host].join() process[host].join()
if not process[host].exitcode == 0: if not process[host].exitcode == 0:
@ -300,7 +300,7 @@ class Config(object):
return objects_changed return objects_changed
def _iterate_once_sequential(self): def _iterate_once_sequential(self):
self.log.info("Iteration in sequential mode") self.log.debug("Iteration in sequential mode")
objects_changed = False objects_changed = False
for cdist_object in self.object_list(): for cdist_object in self.object_list():
@ -327,7 +327,7 @@ class Config(object):
return objects_changed return objects_changed
def _iterate_once_parallel(self): def _iterate_once_parallel(self):
self.log.info("Iteration in parallel mode in {} jobs".format( self.log.debug("Iteration in parallel mode in {} jobs".format(
self.jobs)) self.jobs))
objects_changed = False objects_changed = False
@ -350,15 +350,15 @@ class Config(object):
self.object_prepare(cargo[0]) self.object_prepare(cargo[0])
objects_changed = True objects_changed = True
elif cargo: elif cargo:
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for {} parallel " self.log.trace(("Starting multiprocessing Pool for {} parallel "
"objects preparation".format(n))) "objects preparation".format(n)))
args = [ args = [
(c, ) for c in cargo (c, ) for c in cargo
] ]
mp_pool_run(self.object_prepare, args, jobs=self.jobs) mp_pool_run(self.object_prepare, args, jobs=self.jobs)
self.log.debug(("Multiprocessing for parallel object " self.log.trace(("Multiprocessing for parallel object "
"preparation finished")) "preparation finished"))
objects_changed = True objects_changed = True
@ -386,15 +386,15 @@ class Config(object):
self.object_run(cargo[0]) self.object_run(cargo[0])
objects_changed = True objects_changed = True
elif cargo: elif cargo:
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for {} parallel " self.log.trace(("Starting multiprocessing Pool for {} parallel "
"object run".format(n))) "object run".format(n)))
args = [ args = [
(c, ) for c in cargo (c, ) for c in cargo
] ]
mp_pool_run(self.object_run, args, jobs=self.jobs) mp_pool_run(self.object_run, args, jobs=self.jobs)
self.log.debug(("Multiprocessing for parallel object " self.log.trace(("Multiprocessing for parallel object "
"run finished")) "run finished"))
objects_changed = True objects_changed = True
@ -473,7 +473,7 @@ class Config(object):
def object_run(self, cdist_object): def object_run(self, cdist_object):
"""Run gencode and code for an object""" """Run gencode and code for an object"""
self.log.debug("Trying to run object %s" % (cdist_object.name)) self.log.verbose("Trying to run object %s" % (cdist_object.name))
if cdist_object.state == core.CdistObject.STATE_DONE: if cdist_object.state == core.CdistObject.STATE_DONE:
raise cdist.Error(("Attempting to run an already finished " raise cdist.Error(("Attempting to run an already finished "
"object: %s"), cdist_object) "object: %s"), cdist_object)
@ -500,5 +500,5 @@ class Config(object):
self.log.info("Skipping code execution due to DRY RUN") self.log.info("Skipping code execution due to DRY RUN")
# Mark this object as done # Mark this object as done
self.log.debug("Finishing run of " + cdist_object.name) self.log.trace("Finishing run of " + cdist_object.name)
cdist_object.state = core.CdistObject.STATE_DONE cdist_object.state = core.CdistObject.STATE_DONE

View file

@ -95,7 +95,7 @@ class Explorer(object):
out_path directory. out_path directory.
""" """
self.log.info("Running global explorers") self.log.trace("Running global explorers")
self.transfer_global_explorers() self.transfer_global_explorers()
if self.jobs is None: if self.jobs is None:
self._run_global_explorers_seq(out_path) self._run_global_explorers_seq(out_path)
@ -109,22 +109,22 @@ class Explorer(object):
fd.write(output) fd.write(output)
def _run_global_explorers_seq(self, out_path): def _run_global_explorers_seq(self, out_path):
self.log.info("Running global explorers sequentially") self.log.debug("Running global explorers sequentially")
for explorer in self.list_global_explorer_names(): for explorer in self.list_global_explorer_names():
self._run_global_explorer(explorer, out_path) self._run_global_explorer(explorer, out_path)
def _run_global_explorers_parallel(self, out_path): def _run_global_explorers_parallel(self, out_path):
self.log.info("Running global explorers in {} parallel jobs".format( self.log.debug("Running global explorers in {} parallel jobs".format(
self.jobs)) self.jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for global " self.log.trace(("Starting multiprocessing Pool for global "
"explorers run")) "explorers run"))
args = [ args = [
(e, out_path, ) for e in self.list_global_explorer_names() (e, out_path, ) for e in self.list_global_explorer_names()
] ]
mp_pool_run(self._run_global_explorer, args, jobs=self.jobs) mp_pool_run(self._run_global_explorer, args, jobs=self.jobs)
self.log.debug(("Multiprocessing run for global explorers " self.log.trace(("Multiprocessing run for global explorers "
"finished")) "finished"))
# logger is not pickable, so remove it when we pickle # logger is not pickable, so remove it when we pickle
@ -168,15 +168,15 @@ class Explorer(object):
in the object. in the object.
""" """
self.log.debug("Transfering type explorers for type: %s", self.log.trace("Transfering type explorers for type: %s",
cdist_object.cdist_type) cdist_object.cdist_type)
self.transfer_type_explorers(cdist_object.cdist_type) self.transfer_type_explorers(cdist_object.cdist_type)
self.log.debug("Transfering object parameters for object: %s", self.log.trace("Transfering object parameters for object: %s",
cdist_object.name) cdist_object.name)
self.transfer_object_parameters(cdist_object) self.transfer_object_parameters(cdist_object)
for explorer in self.list_type_explorer_names(cdist_object.cdist_type): for explorer in self.list_type_explorer_names(cdist_object.cdist_type):
output = self.run_type_explorer(explorer, cdist_object) output = self.run_type_explorer(explorer, cdist_object)
self.log.debug("Running type explorer '%s' for object '%s'", self.log.trace("Running type explorer '%s' for object '%s'",
explorer, cdist_object.name) explorer, cdist_object.name)
cdist_object.explorers[explorer] = output cdist_object.explorers[explorer] = output
@ -203,7 +203,7 @@ class Explorer(object):
remote side.""" remote side."""
if cdist_type.explorers: if cdist_type.explorers:
if cdist_type.name in self._type_explorers_transferred: if cdist_type.name in self._type_explorers_transferred:
self.log.debug("Skipping retransfer of type explorers for: %s", self.log.trace("Skipping retransfer of type explorers for: %s",
cdist_type) cdist_type)
else: else:
source = os.path.join(self.local.type_path, source = os.path.join(self.local.type_path,

View file

@ -145,7 +145,7 @@ class Manifest(object):
else: else:
user_supplied = True user_supplied = True
self.log.info("Running initial manifest " + initial_manifest) self.log.trace("Running initial manifest " + initial_manifest)
if not os.path.isfile(initial_manifest): if not os.path.isfile(initial_manifest):
raise NoInitialManifestError(initial_manifest, user_supplied) raise NoInitialManifestError(initial_manifest, user_supplied)

View file

@ -98,7 +98,7 @@ class Emulator(object):
self.save_stdin() self.save_stdin()
self.record_requirements() self.record_requirements()
self.record_auto_requirements() self.record_auto_requirements()
self.log.debug("Finished %s %s" % ( self.log.trace("Finished %s %s" % (
self.cdist_object.path, self.parameters)) self.cdist_object.path, self.parameters))
def __init_log(self): def __init_log(self):
@ -148,7 +148,7 @@ class Emulator(object):
# And finally parse/verify parameter # And finally parse/verify parameter
self.args = parser.parse_args(self.argv[1:]) self.args = parser.parse_args(self.argv[1:])
self.log.debug('Args: %s' % self.args) self.log.trace('Args: %s' % self.args)
def setup_object(self): def setup_object(self):
# Setup object - and ensure it is not in args # Setup object - and ensure it is not in args
@ -240,7 +240,7 @@ class Emulator(object):
self.object_source))) self.object_source)))
raise raise
self.log.debug("Recording requirement: %s", requirement) self.log.trace("Recording requirement: %s", requirement)
# Save the sanitised version, not the user supplied one # Save the sanitised version, not the user supplied one
# (__file//bar => __file/bar) # (__file//bar => __file/bar)

View file

@ -163,7 +163,7 @@ class Local(object):
with open(self.object_marker_file, 'w') as fd: with open(self.object_marker_file, 'w') as fd:
fd.write("%s\n" % self.object_marker_name) fd.write("%s\n" % self.object_marker_name)
self.log.debug("Object marker %s saved in %s" % ( self.log.trace("Object marker %s saved in %s" % (
self.object_marker_name, self.object_marker_file)) self.object_marker_name, self.object_marker_file))
def _init_cache_dir(self, cache_dir): def _init_cache_dir(self, cache_dir):
@ -178,12 +178,12 @@ class Local(object):
def rmdir(self, path): def rmdir(self, path):
"""Remove directory on the local side.""" """Remove directory on the local side."""
self.log.debug("Local rmdir: %s", path) self.log.trace("Local rmdir: %s", path)
shutil.rmtree(path) shutil.rmtree(path)
def mkdir(self, path): def mkdir(self, path):
"""Create directory on the local side.""" """Create directory on the local side."""
self.log.debug("Local mkdir: %s", path) self.log.trace("Local mkdir: %s", path)
os.makedirs(path, exist_ok=True) os.makedirs(path, exist_ok=True)
def run(self, command, env=None, return_output=False, message_prefix=None, def run(self, command, env=None, return_output=False, message_prefix=None,
@ -192,7 +192,7 @@ class Local(object):
Return the output as a string. Return the output as a string.
""" """
self.log.debug("Local run: %s", command) self.log.trace("Local run: %s", command)
assert isinstance(command, (list, tuple)), ( assert isinstance(command, (list, tuple)), (
"list or tuple argument expected, got: %s" % command) "list or tuple argument expected, got: %s" % command)
@ -214,9 +214,9 @@ class Local(object):
try: try:
if save_output: if save_output:
output, errout = exec_util.call_get_output(command, env=env) output, errout = exec_util.call_get_output(command, env=env)
self.log.debug("Local stdout: {}".format(output)) self.log.trace("Local stdout: {}".format(output))
# Currently, stderr is not captured. # Currently, stderr is not captured.
# self.log.debug("Local stderr: {}".format(errout)) # self.log.trace("Local stderr: {}".format(errout))
if return_output: if return_output:
return output.decode() return output.decode()
else: else:
@ -279,7 +279,7 @@ class Local(object):
return cache_subpath return cache_subpath
def save_cache(self, start_time=time.time()): def save_cache(self, start_time=time.time()):
self.log.debug("cache subpath pattern: {}".format( self.log.trace("cache subpath pattern: {}".format(
self.cache_path_pattern)) self.cache_path_pattern))
cache_subpath = self._cache_subpath(start_time, cache_subpath = self._cache_subpath(start_time,
self.cache_path_pattern) self.cache_path_pattern)
@ -340,7 +340,7 @@ class Local(object):
if os.path.exists(dst): if os.path.exists(dst):
os.unlink(dst) os.unlink(dst)
self.log.debug("Linking %s to %s ..." % (src, dst)) self.log.trace("Linking %s to %s ..." % (src, dst))
try: try:
os.symlink(src, dst) os.symlink(src, dst)
except OSError as e: except OSError as e:
@ -352,7 +352,7 @@ class Local(object):
src = os.path.abspath(self.exec_path) src = os.path.abspath(self.exec_path)
for cdist_type in core.CdistType.list_types(self.type_path): for cdist_type in core.CdistType.list_types(self.type_path):
dst = os.path.join(self.bin_path, cdist_type.name) dst = os.path.join(self.bin_path, cdist_type.name)
self.log.debug("Linking emulator: %s to %s", src, dst) self.log.trace("Linking emulator: %s to %s", src, dst)
try: try:
os.symlink(src, dst) os.symlink(src, dst)

View file

@ -111,17 +111,17 @@ class Remote(object):
def rmdir(self, path): def rmdir(self, path):
"""Remove directory on the remote side.""" """Remove directory on the remote side."""
self.log.debug("Remote rmdir: %s", path) self.log.trace("Remote rmdir: %s", path)
self.run(["rm", "-rf", path]) self.run(["rm", "-rf", path])
def mkdir(self, path): def mkdir(self, path):
"""Create directory on the remote side.""" """Create directory on the remote side."""
self.log.debug("Remote mkdir: %s", path) self.log.trace("Remote mkdir: %s", path)
self.run(["mkdir", "-p", path]) self.run(["mkdir", "-p", path])
def transfer(self, source, destination, jobs=None): def transfer(self, source, destination, jobs=None):
"""Transfer a file or directory to the remote side.""" """Transfer a file or directory to the remote side."""
self.log.debug("Remote transfer: %s -> %s", source, destination) self.log.trace("Remote transfer: %s -> %s", source, destination)
self.rmdir(destination) self.rmdir(destination)
if os.path.isdir(source): if os.path.isdir(source):
self.mkdir(destination) self.mkdir(destination)
@ -147,11 +147,11 @@ class Remote(object):
def _transfer_dir_parallel(self, source, destination, jobs): def _transfer_dir_parallel(self, source, destination, jobs):
"""Transfer a directory to the remote side in parallel mode.""" """Transfer a directory to the remote side in parallel mode."""
self.log.info("Remote transfer in {} parallel jobs".format( self.log.debug("Remote transfer in {} parallel jobs".format(
jobs)) jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.trace("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.debug(("Starting multiprocessing Pool for parallel " self.log.trace(("Starting multiprocessing Pool for parallel "
"remote transfer")) "remote transfer"))
args = [] args = []
for f in glob.glob1(source, '*'): for f in glob.glob1(source, '*'):
@ -161,7 +161,7 @@ class Remote(object):
_wrap_addr(self.target_host[0]), destination)]) _wrap_addr(self.target_host[0]), destination)])
args.append((command, )) args.append((command, ))
mp_pool_run(self._run_command, args, jobs=jobs) mp_pool_run(self._run_command, args, jobs=jobs)
self.log.debug(("Multiprocessing for parallel transfer " self.log.trace(("Multiprocessing for parallel transfer "
"finished")) "finished"))
def run_script(self, script, env=None, return_output=False): def run_script(self, script, env=None, return_output=False):
@ -226,12 +226,12 @@ class Remote(object):
os_environ['__target_hostname'] = self.target_host[1] os_environ['__target_hostname'] = self.target_host[1]
os_environ['__target_fqdn'] = self.target_host[2] os_environ['__target_fqdn'] = self.target_host[2]
self.log.debug("Remote run: %s", command) self.log.trace("Remote run: %s", command)
try: try:
output, errout = exec_util.call_get_output(command, env=os_environ) output, errout = exec_util.call_get_output(command, env=os_environ)
self.log.debug("Remote stdout: {}".format(output)) self.log.trace("Remote stdout: {}".format(output))
# Currently, stderr is not captured. # Currently, stderr is not captured.
# self.log.debug("Remote stderr: {}".format(errout)) # self.log.trace("Remote stderr: {}".format(errout))
if return_output: if return_output:
return output.decode() return output.decode()
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:

View file

@ -86,10 +86,10 @@ class Shell(object):
self._init_files_dirs() self._init_files_dirs()
self._init_environment() self._init_environment()
log.info("Starting shell...") log.trace("Starting shell...")
# save_output=False -> do not catch stdout and stderr # save_output=False -> do not catch stdout and stderr
self.local.run([self.shell], self.env, save_output=False) self.local.run([self.shell], self.env, save_output=False)
log.info("Finished shell.") log.trace("Finished shell.")
@classmethod @classmethod
def commandline(cls, args): def commandline(cls, args):

View file

@ -44,8 +44,8 @@ def commandline():
if retval: if retval:
log.warning(retval) log.warning(retval)
log.debug(args) log.trace(args)
log.info("version %s" % cdist.VERSION) log.verbose("version %s" % cdist.VERSION)
# Work around python 3.3 bug: # Work around python 3.3 bug:
# http://bugs.python.org/issue16308 # http://bugs.python.org/issue16308