Allow conditional imports, see examples/playbook3.yml comments for a full explanation. Extensive

refactoring of playbooks now warranted, which we'll do before we move on.   This variable assignment
system makes nearly all possible magic possible, for we can use these variables however we like,
even as module names!
This commit is contained in:
Michael DeHaan
2012-03-19 22:42:31 -04:00
parent 5ed2b894d9
commit 4de7bbb169
15 changed files with 410 additions and 77 deletions

View File

@@ -21,7 +21,6 @@ import ansible.runner
import ansible.constants as C
from ansible import utils
from ansible import errors
import yaml
import shlex
import os
import time
@@ -89,15 +88,6 @@ class PlayBook(object):
vars = play.get('vars', {})
if type(vars) != dict:
raise errors.AnsibleError("'vars' section must contain only key/value pairs")
vars_files = play.get('vars_files', [])
for f in vars_files:
path = utils.path_dwim(dirname, f)
# FIXME: better error handling if not valid YAML
# or file not found
# raise typed exception
data = file(path).read()
data = yaml.load(data)
vars.update(data)
return vars
def _include_tasks(self, play, task, dirname, new_tasks):
@@ -111,7 +101,7 @@ class PlayBook(object):
(k,v) = x.split("=")
inject_vars[k] = v
included = utils.template_from_file(path, inject_vars)
included = yaml.load(included)
included = utils.parse_yaml(included)
for x in included:
new_tasks.append(x)
@@ -119,7 +109,7 @@ class PlayBook(object):
path = utils.path_dwim(dirname, handler['include'])
inject_vars = self._get_vars(play, dirname)
included = utils.template_from_file(path, inject_vars)
included = yaml.load(included)
included = utils.parse_yaml(included)
for x in included:
new_handlers.append(x)
@@ -127,7 +117,7 @@ class PlayBook(object):
''' load YAML file, including handling for imported files '''
dirname = os.path.dirname(playbook)
playbook = yaml.load(file(playbook).read())
playbook = utils.parse_yaml_from_file(playbook)
for play in playbook:
tasks = play.get('tasks',[])
@@ -355,7 +345,7 @@ class PlayBook(object):
# walk through the results and build up
# summary information about successes and
# failures. TODO: split into subfunction
# failures. FIXME: TODO: split into subfunction!
dark = results.get("dark", {})
contacted = results.get("contacted", {})
@@ -422,22 +412,51 @@ class PlayBook(object):
x['run'] = []
x['run'].append(host)
def _run_play(self, pg):
'''
run a list of tasks for a given pattern, in order
'''
def _do_setup_step(self, pattern, vars, user, host_list, vars_files=None):
''' push variables down to the systems and get variables+facts back up '''
# get configuration information about the pattern
pattern = pg['hosts']
# this enables conditional includes like $facter_os.yml and is only done
# after the original pass when we have that data.
#
# FIXME: refactor into subfunction
# FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
# FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's
# not super optimized yet either, because we wouldn't have hit them, ergo
# it will raise false errors if there is no defaults variable file without any $vars
# in it, which could happen on uncontacted hosts.
vars = self._get_vars(pg, self.basedir)
tasks = pg['tasks']
handlers = pg['handlers']
user = pg.get('user', C.DEFAULT_REMOTE_USER)
if vars_files is not None:
self.callbacks.on_setup_secondary()
for host in host_list:
cache_vars = SETUP_CACHE.get(host,{})
SETUP_CACHE[host] = {}
for filename in vars_files:
if type(filename) == list:
# loop over all filenames, loading the first one, and failing if
# none found
found = False
sequence = []
for real_filename in filename:
filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars))
sequence.append(filename2)
if os.path.exists(filename2):
found = True
data = utils.parse_yaml_from_file(filename2)
SETUP_CACHE[host].update(data)
self.callbacks.on_import_for_host(host, filename2)
break
if not found:
raise errors.AnsibleError("no files matched for vars_files import sequence: %s" % sequence)
self.host_list, groups = ansible.runner.Runner.parse_hosts(self.host_list)
self.callbacks.on_play_start(pattern)
else:
filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars))
if not os.path.exists(filename2):
raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2)
data = utils.parse_yaml_from_file(filename2)
SETUP_CACHE[host].update(data)
self.callbacks.on_import_for_host(host, filename2)
else:
self.callbacks.on_setup_primary()
# first run the setup task on every node, which gets the variables
# written to the JSON file and will also bubble facts back up via
@@ -473,13 +492,62 @@ class PlayBook(object):
# now for each result, load into the setup cache so we can
# let runner template out future commands
setup_ok = setup_results.get('contacted', {})
for (host, result) in setup_ok.iteritems():
SETUP_CACHE[host] = result
if vars_files is None:
# first pass only or we'll erase good work
for (host, result) in setup_ok.iteritems():
SETUP_CACHE[host] = result
host_list = self._prune_failed_hosts(host_list)
return host_list
def _run_play(self, pg):
'''
run a list of tasks for a given pattern, in order
'''
# get configuration information about the pattern
pattern = pg['hosts']
vars = self._get_vars(pg, self.basedir)
vars_files = pg.get('vars_files', {})
tasks = pg.get('tasks', [])
handlers = pg.get('handlers', [])
user = pg.get('user', C.DEFAULT_REMOTE_USER)
self.host_list, groups = ansible.runner.Runner.parse_hosts(self.host_list)
self.callbacks.on_play_start(pattern)
# push any variables down to the system # and get facts/ohai/other data back up
host_list = self._do_setup_step(pattern, vars, user, self.host_list, None)
# now with that data, handle contentional variable file imports!
if len(vars_files) > 0:
host_list = self._do_setup_step(pattern, vars, user, host_list, vars_files)
# FIXME: DUPLICATE CODE
# dark_hosts = setup_results.get('dark',{})
#contacted_hosts = setup_results.get('contacted',{})
#for (host, error) in dark_hosts.iteritems():
# self.callbacks.on_dark_host(host, error)
# self.dark[host] = 1
#for (host, host_result) in contacted_hosts.iteritems():
# if 'failed' in host_result:
# self.callbacks.on_failed(host, host_result)
# self.failures[host] = 1
# FIXME: DUPLICATE CODE
# now for each result, load into the setup cache so we can
# let runner template out future commands
#setup_ok = setup_results.get('contacted', {})
#for (host, result) in setup_ok.iteritems():
# SETUP_CACHE[host] = result
# run all the top level tasks, these get run on every node
for task in tasks:
self._run_task(
pattern=pattern,
pattern=pattern,
host_list=host_list,
task=task,
handlers=handlers,
remote_user=user

View File

@@ -74,7 +74,7 @@ class Runner(object):
remote_pass=C.DEFAULT_REMOTE_PASS,
background=0,
basedir=None,
setup_cache={},
setup_cache=None,
transport='paramiko',
verbose=False):
@@ -92,6 +92,8 @@ class Runner(object):
setup_cache -- used only by playbook (complex explanation pending)
'''
if setup_cache is None:
setup_cache = {}
self.setup_cache = setup_cache
self.host_list, self.groups = self.parse_hosts(host_list)
@@ -152,7 +154,7 @@ class Runner(object):
# looks like a group
group_name = item.replace("[","").replace("]","").lstrip().rstrip()
groups[group_name] = []
else:
elif item != "":
# looks like a regular host
groups[group_name].append(item)
results.append(item)
@@ -286,13 +288,14 @@ class Runner(object):
args = module_args
if type(args) == list:
args = [ str(x) for x in module_args ]
args = " ".join(args)
args = " ".join([ str(x) for x in module_args ])
# by default the args to substitute in the action line are those from the setup cache
inject_vars = self.setup_cache.get(conn.host,{})
inject2 = {}
# if the host file was an external script, execute it with the hostname
# as a first parameter to get the variables to use for the host
inject2 = {}
if Runner._external_variable_script is not None:
host = conn.host
cmd = subprocess.Popen([Runner._external_variable_script, host],
@@ -309,12 +312,15 @@ class Runner(object):
Runner._external_variable_script,
host
))
inject_vars.update(inject2)
# store injected variables in the templates
# store injected variables in the templates
inject_vars.update(inject2)
if self.module_name == 'setup':
for (k,v) in inject2.iteritems():
args = "%s %s=%s" % (args, k, v)
for (k,v) in inject_vars.iteritems():
if not k.startswith('facter_') and not k.startswith('ohai_'):
if v.find(" ") != -1:
v = "\"%s\"" % v
args += " %s=%s" % (k, v)
# the metadata location for the setup module is transparently managed
# since it's an 'internals' module, kind of a black box. See playbook
@@ -338,7 +344,7 @@ class Runner(object):
# *****************************************************
def _execute_normal_module(self, conn, host, tmp):
def _execute_normal_module(self, conn, host, tmp, module_name):
'''
transfer & execute a module that is not 'copy' or 'template'
because those require extra work.
@@ -346,18 +352,19 @@ class Runner(object):
# hack to make the 'shell' module keyword really be executed
# by the command module
if self.module_name == 'shell':
self.module_name = 'command'
self.module_args.append("#USE_SHELL")
module_args = self.module_args
if module_name == 'shell':
module_name = 'command'
module_args.append("#USE_SHELL")
module = self._transfer_module(conn, tmp, self.module_name)
result = self._execute_module(conn, tmp, module, self.module_args)
module = self._transfer_module(conn, tmp, module_name)
result = self._execute_module(conn, tmp, module, module_args)
# when running the setup module, which pushes vars to the host and ALSO
# returns them (+factoids), store the variables that were returned such that commands
# run AFTER setup use these variables for templating when executed
# from playbooks
if self.module_name == 'setup':
if module_name == 'setup':
host = conn.host
try:
var_result = utils.parse_json(result)
@@ -377,7 +384,7 @@ class Runner(object):
# *****************************************************
def _execute_async_module(self, conn, host, tmp):
def _execute_async_module(self, conn, host, tmp, module_name):
'''
transfer the given module name, plus the async module
and then run the async module wrapping the other module
@@ -385,13 +392,14 @@ class Runner(object):
# hack to make the 'shell' module keyword really be executed
# by the command module
if self.module_name == 'shell':
self.module_name = 'command'
self.module_args.append("#USE_SHELL")
module_args = self.module_args
if module_name == 'shell':
module_name = 'command'
module_args.append("#USE_SHELL")
async = self._transfer_module(conn, tmp, 'async_wrapper')
module = self._transfer_module(conn, tmp, self.module_name)
result = self._execute_module(conn, tmp, async, self.module_args,
module = self._transfer_module(conn, tmp, module_name)
result = self._execute_module(conn, tmp, async, module_args,
async_module=module,
async_jid=self.generated_jid,
async_limit=self.background
@@ -518,17 +526,21 @@ class Runner(object):
# or a request to use the copy or template
# module, call the appropriate executor function
ok, conn = self._connect(host)
if not ok:
return [ host, False, conn ]
cache = self.setup_cache.get(host, {})
module_name = utils.template(self.module_name, cache)
tmp = self._get_tmp_path(conn)
result = None
if self.module_name not in [ 'copy', 'template' ]:
if self.background == 0:
result = self._execute_normal_module(conn, host, tmp)
result = self._execute_normal_module(conn, host, tmp, module_name)
else:
result = self._execute_async_module(conn, host, tmp)
result = self._execute_async_module(conn, host, tmp, module_name)
elif self.module_name == 'copy':
result = self._execute_copy(conn, host, tmp)

View File

@@ -22,6 +22,7 @@ import os
import shlex
import re
import jinja2
import yaml
try:
import json
@@ -267,6 +268,12 @@ def template_from_file(path, vars):
''' run a file through the templating engine '''
data = file(path).read()
return template(data, vars)
def parse_yaml(data):
return yaml.load(data)
def parse_yaml_from_file(path):
data = file(path).read()
return parse_yaml(data)