log.info -> log.debug for debug messages

This commit is contained in:
Darko Poljak 2016-08-14 21:30:09 +02:00
parent 90454c4e6b
commit 51ffc0f037
2 changed files with 8 additions and 8 deletions

View File

@ -117,19 +117,19 @@ class Explorer(object):
self.jobs)) self.jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.debug("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.info(("Starting multiprocessing Pool for global " self.log.debug(("Starting multiprocessing Pool for global "
"explorers run")) "explorers run"))
with multiprocessing.Pool(self.jobs) as pool: with multiprocessing.Pool(self.jobs) as pool:
self.log.info("Starting async for global explorer run") self.log.debug("Starting async for global explorer run")
results = [ results = [
pool.apply_async(self._run_global_explorer, (e, out_path,)) pool.apply_async(self._run_global_explorer, (e, out_path,))
for e in self.list_global_explorer_names() for e in self.list_global_explorer_names()
] ]
self.log.info("Waiting async results for global explorer runs") self.log.debug("Waiting async results for global explorer runs")
for r in results: for r in results:
r.get() # self._run_global_explorer returns None r.get() # self._run_global_explorer returns None
self.log.info(("Multiprocessing run for global explorers " self.log.debug(("Multiprocessing run for global explorers "
"finished")) "finished"))
# logger is not pickable, so remove it when we pickle # logger is not pickable, so remove it when we pickle

View File

@ -136,10 +136,10 @@ class Remote(object):
jobs)) jobs))
self.log.debug("Multiprocessing start method is {}".format( self.log.debug("Multiprocessing start method is {}".format(
multiprocessing.get_start_method())) multiprocessing.get_start_method()))
self.log.info(("Starting multiprocessing Pool for parallel " self.log.debug(("Starting multiprocessing Pool for parallel "
"remote transfer")) "remote transfer"))
with multiprocessing.Pool(jobs) as pool: with multiprocessing.Pool(jobs) as pool:
self.log.info("Starting async for parallel transfer") self.log.debug("Starting async for parallel transfer")
commands = [] commands = []
for f in glob.glob1(source, '*'): for f in glob.glob1(source, '*'):
command = self._copy.split() command = self._copy.split()
@ -152,10 +152,10 @@ class Remote(object):
for cmd in commands for cmd in commands
] ]
self.log.info("Waiting async results for parallel transfer") self.log.debug("Waiting async results for parallel transfer")
for r in results: for r in results:
r.get() # self._run_command returns None r.get() # self._run_command returns None
self.log.info(("Multiprocessing for parallel transfer " self.log.debug(("Multiprocessing for parallel transfer "
"finished")) "finished"))
else: else:
raise cdist.Error("Source {} is not a directory".format(source)) raise cdist.Error("Source {} is not a directory".format(source))