Merge branch 'execution_order_plus_general_changes'

This commit is contained in:
Nico Schottelius 2013-05-15 09:19:52 +02:00
commit 03bd44e135
5 changed files with 118 additions and 280 deletions

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# 2010-2012 Nico Schottelius (nico-cdist at schottelius.org)
# 2010-2013 Nico Schottelius (nico-cdist at schottelius.org)
#
# This file is part of cdist.
#
@ -22,18 +22,12 @@
import logging
import os
import stat
import shutil
import sys
import tempfile
import time
import itertools
import pprint
import cdist
from cdist import core
from cdist import resolver
class ConfigInstall(object):
"""Cdist main class to hold arbitrary data"""
@ -63,45 +57,77 @@ class ConfigInstall(object):
shutil.rmtree(destination)
shutil.move(self.context.local.out_path, destination)
def deploy_to(self):
"""Mimic the old deploy to: Deploy to one host"""
self.stage_prepare()
self.stage_run()
def deploy_and_cleanup(self):
def run(self):
"""Do what is most often done: deploy & cleanup"""
start_time = time.time()
self.deploy_to()
self.cleanup()
self.log.info("Finished successful run in %s seconds",
time.time() - start_time)
def stage_prepare(self):
"""Do everything for a deploy, minus the actual code stage"""
self.explorer.run_global_explorers(self.context.local.global_explorer_out_path)
self.manifest.run_initial_manifest(self.context.initial_manifest)
self.iterate_until_finished()
self.log.info("Running object manifests and type explorers")
self.cleanup()
self.log.info("Finished successful run in %s seconds", time.time() - start_time)
# Continue process until no new objects are created anymore
new_objects_created = True
while new_objects_created:
new_objects_created = False
for cdist_object in core.CdistObject.list_objects(self.context.local.object_path,
def object_list(self):
"""Short name for object list retrieval"""
for cdist_object in core.CdistObject.list_objects(self.context.local.object_path,
self.context.local.type_path):
if cdist_object.state == core.CdistObject.STATE_PREPARED:
self.log.debug("Skipping re-prepare of object %s", cdist_object)
continue
else:
self.object_prepare(cdist_object)
new_objects_created = True
yield cdist_object
def object_prepare(self, cdist_object):
"""Prepare object: Run type explorer + manifest"""
self.log.info("Running manifest and explorers for " + cdist_object.name)
self.explorer.run_type_explorers(cdist_object)
self.manifest.run_type_manifest(cdist_object)
cdist_object.state = core.CdistObject.STATE_PREPARED
def iterate_until_finished(self):
# Continue process until no new objects are created anymore
objects_changed = True
while objects_changed:
objects_changed = False
for cdist_object in self.object_list():
if cdist_object.requirements_unfinished(cdist_object.requirements):
"""We cannot do anything for this poor object"""
continue
if cdist_object.state == core.CdistObject.STATE_UNDEF:
"""Prepare the virgin object"""
self.object_prepare(cdist_object)
objects_changed = True
if cdist_object.requirements_unfinished(cdist_object.autorequire):
"""The previous step created objects we depend on - wait for them"""
continue
if cdist_object.state == core.CdistObject.STATE_PREPARED:
self.object_run(cdist_object)
objects_changed = True
# Check whether all objects have been finished
unfinished_objects = []
for cdist_object in self.object_list():
if not cdist_object.state == cdist_object.STATE_DONE:
unfinished_objects.append(cdist_object)
if unfinished_objects:
info_string = []
for cdist_object in unfinished_objects:
requirement_names = []
autorequire_names = []
for requirement in cdist_object.requirements_unfinished(cdist_object.requirements):
requirement_names.append(requirement.name)
for requirement in cdist_object.requirements_unfinished(cdist_object.autorequire):
autorequire_names.append(requirement.name)
requirements = ", ".join(requirement_names)
autorequire = ", ".join(autorequire_names)
info_string.append("%s requires: %s autorequires: %s" % (cdist_object.name, requirements, autorequire))
raise cdist.Error("The requirements of the following objects could not be resolved: %s" %
("; ".join(info_string)))
def object_run(self, cdist_object, dry_run=False):
"""Run gencode and code for an object"""
@ -129,18 +155,3 @@ class ConfigInstall(object):
# Mark this object as done
self.log.debug("Finishing run of " + cdist_object.name)
cdist_object.state = core.CdistObject.STATE_DONE
def stage_run(self):
"""The final (and real) step of deployment"""
self.log.info("Generating and executing code")
objects = core.CdistObject.list_objects(
self.context.local.object_path,
self.context.local.type_path)
dependency_resolver = resolver.DependencyResolver(objects)
self.log.debug(pprint.pformat(dependency_resolver.dependencies))
for cdist_object in dependency_resolver:
self.log.debug("Run object: %s", cdist_object)
self.object_run(cdist_object)

View file

@ -50,7 +50,6 @@ class MissingObjectIdError(cdist.Error):
def __str__(self):
return '%s' % (self.message)
class CdistObject(object):
"""Represents a cdist object.
@ -61,6 +60,7 @@ class CdistObject(object):
"""
# Constants for use with Object.state
STATE_UNDEF = ""
STATE_PREPARED = "prepared"
STATE_RUNNING = "running"
STATE_DONE = "done"
@ -223,62 +223,18 @@ class CdistObject(object):
except EnvironmentError as error:
raise cdist.Error('Error creating directories for cdist object: %s: %s' % (self, error))
@property
def satisfied_requirements(self):
"""Return state whether all of our dependencies have been resolved already"""
def requirements_unfinished(self, requirements):
"""Return state whether requirements are satisfied"""
satisfied = True
object_list = []
for requirement in self.all_requirements:
log.debug("%s: Checking requirement %s (%s) .." % (self.name, requirement.name, requirement.state))
if not requirement.state == self.STATE_DONE:
satisfied = False
break
log.debug("%s is satisfied: %s" % (self.name, satisfied))
for requirement in requirements:
cdist_object = self.object_from_name(requirement)
return satisfied
def find_requirements_by_name(self, requirements):
"""Takes a list of requirement patterns and returns a list of matching object instances.
Patterns are expected to be Unix shell-style wildcards for use with fnmatch.filter.
find_requirements_by_name(['__type/object_id', '__other_type/*']) ->
[<Object __type/object_id>, <Object __other_type/any>, <Object __other_type/match>]
"""
# FIXME: think about where/when to store this - probably not here
self.objects = dict((o.name, o) for o in self.list_objects(self.base_path, self.cdist_type.base_path))
object_names = self.objects.keys()
for pattern in requirements:
found = False
for requirement in fnmatch.filter(object_names, pattern):
found = True
yield self.objects[requirement]
if not found:
# FIXME: get rid of the singleton object_id, it should be invisible to the code -> hide it in Object
singleton = os.path.join(pattern, 'singleton')
if singleton in self.objects:
yield self.objects[singleton]
else:
raise RequirementNotFoundError(pattern)
@property
def all_requirements(self):
"""
Return resolved autorequirements and requirements so that
a complete list of requirements is returned
"""
all_reqs= []
all_reqs.extend(self.find_requirements_by_name(self.requirements))
all_reqs.extend(self.find_requirements_by_name(self.autorequire))
return set(all_reqs)
if not cdist_object.state == self.STATE_DONE:
object_list.append(cdist_object)
return object_list
class RequirementNotFoundError(cdist.Error):
def __init__(self, requirement):

View file

@ -1,175 +0,0 @@
# -*- coding: utf-8 -*-
#
# 2011 Steven Armstrong (steven-cdist at armstrong.cc)
#
# This file is part of cdist.
#
# cdist is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cdist is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with cdist. If not, see <http://www.gnu.org/licenses/>.
#
#
import logging
import os
import itertools
import fnmatch
import pprint
import cdist
log = logging.getLogger(__name__)
class CircularReferenceError(cdist.Error):
def __init__(self, cdist_object, required_object):
self.cdist_object = cdist_object
self.required_object = required_object
def __str__(self):
return 'Circular reference detected: %s -> %s' % (self.cdist_object.name, self.required_object.name)
class RequirementNotFoundError(cdist.Error):
def __init__(self, requirement):
self.requirement = requirement
def __str__(self):
return 'Requirement could not be found: %s' % self.requirement
class DependencyResolver(object):
"""Cdist's dependency resolver.
Usage:
>> resolver = DependencyResolver(list_of_objects)
# Easy access to the objects we are working with
>> resolver.objects['__some_type/object_id']
<CdistObject __some_type/object_id>
# Easy access to a specific objects dependencies
>> resolver.dependencies['__some_type/object_id']
[<CdistObject __other_type/dependency>, <CdistObject __some_type/object_id>]
# Pretty print the dependency graph
>> from pprint import pprint
>> pprint(resolver.dependencies)
# Iterate over all existing objects in the correct order
>> for cdist_object in resolver:
>> do_something_with(cdist_object)
"""
def __init__(self, objects, logger=None):
self.objects = dict((o.name, o) for o in objects)
self._dependencies = None
self.log = logger or log
@property
def dependencies(self):
"""Build the dependency graph.
Returns a dict where the keys are the object names and the values are
lists of all dependencies including the key object itself.
"""
if self._dependencies is None:
self.log.info("Resolving dependencies...")
self._dependencies = {}
self._preprocess_requirements()
for name,cdist_object in self.objects.items():
resolved = []
unresolved = []
self._resolve_object_dependencies(cdist_object, resolved, unresolved)
self._dependencies[name] = resolved
self.log.debug(self._dependencies)
return self._dependencies
def find_requirements_by_name(self, requirements):
"""Takes a list of requirement patterns and returns a list of matching object instances.
Patterns are expected to be Unix shell-style wildcards for use with fnmatch.filter.
find_requirements_by_name(['__type/object_id', '__other_type/*']) ->
[<Object __type/object_id>, <Object __other_type/any>, <Object __other_type/match>]
"""
object_names = self.objects.keys()
for pattern in requirements:
found = False
for requirement in fnmatch.filter(object_names, pattern):
found = True
yield self.objects[requirement]
if not found:
# FIXME: get rid of the singleton object_id, it should be invisible to the code -> hide it in Object
singleton = os.path.join(pattern, 'singleton')
if singleton in self.objects:
yield self.objects[singleton]
else:
raise RequirementNotFoundError(pattern)
def _preprocess_requirements(self):
"""Find all autorequire dependencies and merge them to be just requirements
for further processing.
"""
for cdist_object in self.objects.values():
if cdist_object.autorequire:
# The objects (children) that this cdist_object (parent) defined
# in it's type manifest shall inherit all explicit requirements
# that the parent has so that user defined requirements are
# fullfilled and processed in the expected order.
for auto_requirement in self.find_requirements_by_name(cdist_object.autorequire):
for requirement in self.find_requirements_by_name(cdist_object.requirements):
requirement_object_all_requirements = list(requirement.requirements) + list(requirement.autorequire)
if (requirement.name not in auto_requirement.requirements
and auto_requirement.name not in requirement_object_all_requirements):
self.log.debug('Adding %s to %s.requirements', requirement.name, auto_requirement)
auto_requirement.requirements.append(requirement.name)
# On the other hand the parent shall depend on all the children
# it created so that the user can setup dependencies on it as a
# whole without having to know anything about the parents
# internals.
cdist_object.requirements.extend(cdist_object.autorequire)
# As we changed the object on disc, we have to ensure it is not
# preprocessed again if someone would call us multiple times.
cdist_object.autorequire = []
def _resolve_object_dependencies(self, cdist_object, resolved, unresolved):
"""Resolve all dependencies for the given cdist_object and store them
in the list which is passed as the 'resolved' arguments.
e.g.
resolved = []
unresolved = []
resolve_object_dependencies(some_object, resolved, unresolved)
print("Dependencies for %s: %s" % (some_object, resolved))
"""
self.log.debug('Resolving dependencies for: %s' % cdist_object.name)
try:
unresolved.append(cdist_object)
for required_object in self.find_requirements_by_name(cdist_object.requirements):
self.log.debug("Object %s requires %s", cdist_object, required_object)
if required_object not in resolved:
if required_object in unresolved:
error = CircularReferenceError(cdist_object, required_object)
self.log.error('%s: %s', error, pprint.pformat(self._dependencies))
raise error
self._resolve_object_dependencies(required_object, resolved, unresolved)
resolved.append(cdist_object)
unresolved.remove(cdist_object)
except RequirementNotFoundError as e:
raise cdist.CdistObjectError(cdist_object, "requires non-existing " + e.requirement)
def __iter__(self):
"""Iterate over all unique objects and yield them in the correct order.
"""
iterable = itertools.chain(*self.dependencies.values())
# Keep record of objects that have already been seen
seen = set()
seen_add = seen.add
for cdist_object in itertools.filterfalse(seen.__contains__, iterable):
seen_add(cdist_object)
yield cdist_object

View file

@ -0,0 +1,44 @@
Old:
- global explores (all)
- initial manifest
- for each object
execute type explorers
execute manifest
continue until all objects (including newly created)
have their type explorers/manifests run
- build dependency tree
- for each object
execute gencode-*
execute code-*
New:
- run all global explorers
- run initial manifest
creates zero or more cdist_objects
- for each cdist_object
if not cdist_object.has_unfullfilled_requirements:
execute type explorers
execute manifest
may create new objects, resulting in autorequirements
# Gained requirements during manifest run
if object.has_auto_requirements():
continue
cdist_object.execute gencode-*
cdist_object.execute code-*
Requirements / Test cases for requirments / resolver:
- omnipotence
-
--------------------------------------------------------------------------------
ERROR: localhost: The following objects could not be resolved: __cdistmarker/singleton requires autorequires ; __directory/etc/sudoers.d requires autorequires ; __file/etc/sudoers.d/nico requires __directory/etc/sudoers.d autorequires ; __file/etc/motd requires autorequires ; __package_pacman/atop requires autorequires ; __package_pacman/screen requires autorequires ; __package_pacman/strace requires autorequires ; __package_pacman/vim requires autorequires ; __package_pacman/zsh requires autorequires ; __package_pacman/lftp requires autorequires ; __package_pacman/nmap requires autorequires ; __package_pacman/ntp requires autorequires ; __package_pacman/rsync requires autorequires ; __package_pacman/rtorrent requires autorequires ; __package_pacman/wget requires autorequires ; __package_pacman/nload requires autorequires ; __package_pacman/iftop requires autorequires ; __package_pacman/mosh requires autorequires ; __package_pacman/git requires autorequires ; __package_pacman/mercurial requires autorequires ; __package_pacman/netcat requires autorequires ; __package_pacman/python-virtualenv requires autorequires ; __package_pacman/wireshark-cli requires autorequires ; __package_pacman/sudo requires autorequires
INFO: Total processing time for 1 host(s): 32.30426597595215
ERROR: Failed to deploy to the following hosts: localhost

View file

@ -62,6 +62,8 @@ def commandline():
parser['configinstall'].add_argument('-i', '--initial-manifest',
help='Path to a cdist manifest or \'-\' to read from stdin.',
dest='manifest', required=False)
parser['configinstall'].add_argument('-n', '--dry-run',
help='Do not execute code', action='store_true')
parser['configinstall'].add_argument('-p', '--parallel',
help='Operate on multiple hosts in parallel',
action='store_true', dest='parallel')