mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-08 14:22:46 +00:00
Merge branch 'v2_final' into devel_switch_v2
Conflicts: lib/ansible/inventory/__init__.py lib/ansible/modules/core lib/ansible/utils/__init__.py lib/ansible/utils/module_docs.py
This commit is contained in:
File diff suppressed because it is too large
Load Diff
29
lib/ansible/utils/boolean.py
Normal file
29
lib/ansible/utils/boolean.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def boolean(value):
|
||||
val = str(value)
|
||||
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shlex
|
||||
import subprocess
|
||||
import select
|
||||
|
||||
def run_cmd(cmd, live=False, readsize=10):
|
||||
|
||||
#readsize = 10
|
||||
|
||||
cmdargs = shlex.split(cmd)
|
||||
p = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
stdout = ''
|
||||
stderr = ''
|
||||
rpipes = [p.stdout, p.stderr]
|
||||
while True:
|
||||
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
|
||||
|
||||
if p.stdout in rfd:
|
||||
dat = os.read(p.stdout.fileno(), readsize)
|
||||
if live:
|
||||
sys.stdout.write(dat)
|
||||
stdout += dat
|
||||
if dat == '':
|
||||
rpipes.remove(p.stdout)
|
||||
if p.stderr in rfd:
|
||||
dat = os.read(p.stderr.fileno(), readsize)
|
||||
stderr += dat
|
||||
if live:
|
||||
sys.stdout.write(dat)
|
||||
if dat == '':
|
||||
rpipes.remove(p.stderr)
|
||||
# only break out if we've emptied the pipes, or there is nothing to
|
||||
# read from and the process has finished.
|
||||
if (not rpipes or not rfd) and p.poll() is not None:
|
||||
break
|
||||
# Calling wait while there are still pipes to read can cause a lock
|
||||
elif not rpipes and p.poll() == None:
|
||||
p.wait()
|
||||
|
||||
return p.returncode, stdout, stderr
|
||||
94
lib/ansible/utils/color.py
Normal file
94
lib/ansible/utils/color.py
Normal file
@@ -0,0 +1,94 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
|
||||
from ansible import constants as C
|
||||
|
||||
ANSIBLE_COLOR=True
|
||||
if C.ANSIBLE_NOCOLOR:
|
||||
ANSIBLE_COLOR=False
|
||||
elif not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
|
||||
ANSIBLE_COLOR=False
|
||||
else:
|
||||
try:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
if curses.tigetnum('colors') < 0:
|
||||
ANSIBLE_COLOR=False
|
||||
except ImportError:
|
||||
# curses library was not found
|
||||
pass
|
||||
except curses.error:
|
||||
# curses returns an error (e.g. could not find terminal)
|
||||
ANSIBLE_COLOR=False
|
||||
|
||||
if C.ANSIBLE_FORCE_COLOR:
|
||||
ANSIBLE_COLOR=True
|
||||
|
||||
# --- begin "pretty"
|
||||
#
|
||||
# pretty - A miniature library that provides a Python print and stdout
|
||||
# wrapper that makes colored terminal text easier to use (e.g. without
|
||||
# having to mess around with ANSI escape sequences). This code is public
|
||||
# domain - there is no license except that you must leave this header.
|
||||
#
|
||||
# Copyright (C) 2008 Brian Nez <thedude at bri1 dot com>
|
||||
#
|
||||
# http://nezzen.net/2008/06/23/colored-text-in-python-using-ansi-escape-sequences/
|
||||
|
||||
codeCodes = {
|
||||
'black': '0;30', 'bright gray': '0;37',
|
||||
'blue': '0;34', 'white': '1;37',
|
||||
'green': '0;32', 'bright blue': '1;34',
|
||||
'cyan': '0;36', 'bright green': '1;32',
|
||||
'red': '0;31', 'bright cyan': '1;36',
|
||||
'purple': '0;35', 'bright red': '1;31',
|
||||
'yellow': '0;33', 'bright purple': '1;35',
|
||||
'dark gray': '1;30', 'bright yellow': '1;33',
|
||||
'normal': '0'
|
||||
}
|
||||
|
||||
def stringc(text, color):
|
||||
"""String in color."""
|
||||
|
||||
if ANSIBLE_COLOR:
|
||||
return "\033["+codeCodes[color]+"m"+text+"\033[0m"
|
||||
else:
|
||||
return text
|
||||
|
||||
# --- end "pretty"
|
||||
|
||||
def colorize(lead, num, color):
|
||||
""" Print 'lead' = 'num' in 'color' """
|
||||
if num != 0 and ANSIBLE_COLOR and color is not None:
|
||||
return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
|
||||
else:
|
||||
return "%s=%-4s" % (lead, str(num))
|
||||
|
||||
def hostcolor(host, stats, color=True):
|
||||
if ANSIBLE_COLOR and color:
|
||||
if stats['failures'] != 0 or stats['unreachable'] != 0:
|
||||
return "%-37s" % stringc(host, 'red')
|
||||
elif stats['changed'] != 0:
|
||||
return "%-37s" % stringc(host, 'yellow')
|
||||
else:
|
||||
return "%-37s" % stringc(host, 'green')
|
||||
return "%-26s" % host
|
||||
|
||||
18
lib/ansible/utils/debug.py
Normal file
18
lib/ansible/utils/debug.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
|
||||
from multiprocessing import Lock
|
||||
|
||||
from ansible import constants as C
|
||||
|
||||
global_debug_lock = Lock()
|
||||
def debug(msg):
|
||||
if C.DEFAULT_DEBUG:
|
||||
global_debug_lock.acquire()
|
||||
print("%6d %0.5f: %s" % (os.getpid(), time.time(), msg))
|
||||
sys.stdout.flush()
|
||||
global_debug_lock.release()
|
||||
142
lib/ansible/utils/display.py
Normal file
142
lib/ansible/utils/display.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# FIXME: copied mostly from old code, needs py3 improvements
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import textwrap
|
||||
import sys
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.utils.color import stringc
|
||||
|
||||
class Display:
|
||||
|
||||
def __init__(self, verbosity=0):
|
||||
|
||||
self.verbosity = verbosity
|
||||
|
||||
# list of all deprecation messages to prevent duplicate display
|
||||
self._deprecations = {}
|
||||
self._warns = {}
|
||||
self._errors = {}
|
||||
|
||||
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
|
||||
msg2 = msg
|
||||
if color:
|
||||
msg2 = stringc(msg, color)
|
||||
if not log_only:
|
||||
if not stderr:
|
||||
try:
|
||||
print(msg2)
|
||||
except UnicodeEncodeError:
|
||||
print(msg2.encode('utf-8'))
|
||||
else:
|
||||
try:
|
||||
print(msg2, file=sys.stderr)
|
||||
except UnicodeEncodeError:
|
||||
print(msg2.encode('utf-8'), file=sys.stderr)
|
||||
if C.DEFAULT_LOG_PATH != '':
|
||||
while msg.startswith("\n"):
|
||||
msg = msg.replace("\n","")
|
||||
# FIXME: logger stuff needs to be implemented
|
||||
#if not screen_only:
|
||||
# if color == 'red':
|
||||
# logger.error(msg)
|
||||
# else:
|
||||
# logger.info(msg)
|
||||
|
||||
def vv(self, msg, host=None):
|
||||
return self.verbose(msg, host=host, caplevel=1)
|
||||
|
||||
def vvv(self, msg, host=None):
|
||||
return self.verbose(msg, host=host, caplevel=2)
|
||||
|
||||
def vvvv(self, msg, host=None):
|
||||
return self.verbose(msg, host=host, caplevel=3)
|
||||
|
||||
def vvvvv(self, msg, host=None):
|
||||
return self.verbose(msg, host=host, caplevel=4)
|
||||
|
||||
def vvvvvv(self, msg, host=None):
|
||||
return self.verbose(msg, host=host, caplevel=5)
|
||||
|
||||
def verbose(self, msg, host=None, caplevel=2):
|
||||
# FIXME: this needs to be implemented
|
||||
#msg = utils.sanitize_output(msg)
|
||||
if self.verbosity > caplevel:
|
||||
if host is None:
|
||||
self.display(msg, color='blue')
|
||||
else:
|
||||
self.display("<%s> %s" % (host, msg), color='blue', screen_only=True)
|
||||
|
||||
def deprecated(self, msg, version, removed=False):
|
||||
''' used to print out a deprecation message.'''
|
||||
|
||||
if not removed and not C.DEPRECATION_WARNINGS:
|
||||
return
|
||||
|
||||
if not removed:
|
||||
if version:
|
||||
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (msg, version)
|
||||
else:
|
||||
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (msg)
|
||||
new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n"
|
||||
else:
|
||||
raise AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg)
|
||||
|
||||
wrapped = textwrap.wrap(new_msg, 79)
|
||||
new_msg = "\n".join(wrapped) + "\n"
|
||||
|
||||
if new_msg not in self._deprecations:
|
||||
self.display(new_msg, color='purple', stderr=True)
|
||||
self._deprecations[new_msg] = 1
|
||||
|
||||
def warning(self, msg):
|
||||
new_msg = "\n[WARNING]: %s" % msg
|
||||
wrapped = textwrap.wrap(new_msg, 79)
|
||||
new_msg = "\n".join(wrapped) + "\n"
|
||||
if new_msg not in self._warns:
|
||||
self.display(new_msg, color='bright purple', stderr=True)
|
||||
self._warns[new_msg] = 1
|
||||
|
||||
def system_warning(self, msg):
|
||||
if C.SYSTEM_WARNINGS:
|
||||
self.warning(msg)
|
||||
|
||||
def banner(self, msg, color=None):
|
||||
'''
|
||||
Prints a header-looking line with stars taking up to 80 columns
|
||||
of width (3 columns, minimum)
|
||||
'''
|
||||
msg = msg.strip()
|
||||
star_len = (80 - len(msg))
|
||||
if star_len < 0:
|
||||
star_len = 3
|
||||
stars = "*" * star_len
|
||||
self.display("\n%s %s" % (msg, stars), color=color)
|
||||
|
||||
def error(self, msg):
|
||||
new_msg = "\n[ERROR]: %s" % msg
|
||||
wrapped = textwrap.wrap(new_msg, 79)
|
||||
new_msg = "\n".join(wrapped) + "\n"
|
||||
if new_msg not in self._errors:
|
||||
self.display(new_msg, color='red', stderr=True)
|
||||
self._errors[new_msg] = 1
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import textwrap
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible import errors
|
||||
from ansible.callbacks import display
|
||||
|
||||
__all__ = ['deprecated', 'warning', 'system_warning']
|
||||
|
||||
# list of all deprecation messages to prevent duplicate display
|
||||
deprecations = {}
|
||||
warns = {}
|
||||
|
||||
def deprecated(msg, version, removed=False):
|
||||
''' used to print out a deprecation message.'''
|
||||
|
||||
if not removed and not C.DEPRECATION_WARNINGS:
|
||||
return
|
||||
|
||||
if not removed:
|
||||
if version:
|
||||
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (msg, version)
|
||||
else:
|
||||
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (msg)
|
||||
new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n"
|
||||
else:
|
||||
raise errors.AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg)
|
||||
|
||||
wrapped = textwrap.wrap(new_msg, 79)
|
||||
new_msg = "\n".join(wrapped) + "\n"
|
||||
|
||||
if new_msg not in deprecations:
|
||||
display(new_msg, color='purple', stderr=True)
|
||||
deprecations[new_msg] = 1
|
||||
|
||||
def warning(msg):
|
||||
new_msg = "\n[WARNING]: %s" % msg
|
||||
wrapped = textwrap.wrap(new_msg, 79)
|
||||
new_msg = "\n".join(wrapped) + "\n"
|
||||
if new_msg not in warns:
|
||||
display(new_msg, color='bright purple', stderr=True)
|
||||
warns[new_msg] = 1
|
||||
|
||||
def system_warning(msg):
|
||||
if C.SYSTEM_WARNINGS:
|
||||
warning(msg)
|
||||
|
||||
49
lib/ansible/utils/encrypt.py
Normal file
49
lib/ansible/utils/encrypt.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
PASSLIB_AVAILABLE = False
|
||||
try:
|
||||
import passlib.hash
|
||||
PASSLIB_AVAILABLE = True
|
||||
except:
|
||||
pass
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
__all__ = ['do_encrypt']
|
||||
|
||||
def do_encrypt(result, encrypt, salt_size=None, salt=None):
|
||||
if PASSLIB_AVAILABLE:
|
||||
try:
|
||||
crypt = getattr(passlib.hash, encrypt)
|
||||
except:
|
||||
raise AnsibleError("passlib does not support '%s' algorithm" % encrypt)
|
||||
|
||||
if salt_size:
|
||||
result = crypt.encrypt(result, salt_size=salt_size)
|
||||
elif salt:
|
||||
result = crypt.encrypt(result, salt=salt)
|
||||
else:
|
||||
result = crypt.encrypt(result)
|
||||
else:
|
||||
raise AnsibleError("passlib must be installed to encrypt vars_prompt values")
|
||||
|
||||
return result
|
||||
|
||||
@@ -20,6 +20,7 @@ from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
# Note, sha1 is the only hash algorithm compatible with python2.4 and with
|
||||
# FIPS-140 mode (as of 11-2014)
|
||||
@@ -43,6 +44,8 @@ def secure_hash_s(data, hash_func=sha1):
|
||||
|
||||
digest = hash_func()
|
||||
try:
|
||||
if not isinstance(data, basestring):
|
||||
data = "%s" % data
|
||||
digest.update(data)
|
||||
except UnicodeEncodeError:
|
||||
digest.update(data.encode('utf-8'))
|
||||
@@ -62,8 +65,8 @@ def secure_hash(filename, hash_func=sha1):
|
||||
digest.update(block)
|
||||
block = infile.read(blocksize)
|
||||
infile.close()
|
||||
except IOError, e:
|
||||
raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
|
||||
except IOError as e:
|
||||
raise AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
|
||||
return digest.hexdigest()
|
||||
|
||||
# The checksum algorithm must match with the algorithm in ShellModule.checksum() method
|
||||
|
||||
66
lib/ansible/utils/listify.py
Normal file
66
lib/ansible/utils/listify.py
Normal file
@@ -0,0 +1,66 @@
|
||||
# (c) 2014 Michael DeHaan, <michael@ansible.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import iteritems, string_types
|
||||
|
||||
import re
|
||||
|
||||
from ansible.template import Templar
|
||||
from ansible.template.safe_eval import safe_eval
|
||||
|
||||
__all__ = ['listify_lookup_plugin_terms']
|
||||
|
||||
LOOKUP_REGEX = re.compile(r'lookup\s*\(')
|
||||
|
||||
def listify_lookup_plugin_terms(terms, variables, loader):
|
||||
|
||||
if isinstance(terms, basestring):
|
||||
# someone did:
|
||||
# with_items: alist
|
||||
# OR
|
||||
# with_items: {{ alist }}
|
||||
|
||||
stripped = terms.strip()
|
||||
templar = Templar(loader=loader, variables=variables)
|
||||
if not (stripped.startswith('{') or stripped.startswith('[')) and not stripped.startswith("/") and not stripped.startswith('set([') and not LOOKUP_REGEX.search(terms):
|
||||
# if not already a list, get ready to evaluate with Jinja2
|
||||
# not sure why the "/" is in above code :)
|
||||
try:
|
||||
new_terms = templar.template("{{ %s }}" % terms)
|
||||
if isinstance(new_terms, basestring) and "{{" in new_terms:
|
||||
pass
|
||||
else:
|
||||
terms = new_terms
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
terms = templar.template(terms)
|
||||
|
||||
if '{' in terms or '[' in terms:
|
||||
# Jinja2 already evaluated a variable to a list.
|
||||
# Jinja2-ified list needs to be converted back to a real type
|
||||
return safe_eval(terms)
|
||||
|
||||
if isinstance(terms, basestring):
|
||||
terms = [ terms ]
|
||||
|
||||
return terms
|
||||
|
||||
@@ -23,9 +23,7 @@ import ast
|
||||
import yaml
|
||||
import traceback
|
||||
|
||||
from collections import MutableMapping, MutableSet, MutableSequence
|
||||
|
||||
from ansible import utils
|
||||
from ansible.plugins import fragment_loader
|
||||
|
||||
# modules that are ok that they do not have documentation strings
|
||||
BLACKLIST_MODULES = [
|
||||
@@ -68,7 +66,7 @@ def get_docstring(filename, verbose=False):
|
||||
|
||||
|
||||
if fragment_slug != 'doesnotexist':
|
||||
fragment_class = utils.plugins.fragment_loader.get(fragment_name)
|
||||
fragment_class = fragment_loader.get(fragment_name)
|
||||
assert fragment_class is not None
|
||||
|
||||
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
|
||||
|
||||
37
lib/ansible/utils/path.py
Normal file
37
lib/ansible/utils/path.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import stat
|
||||
|
||||
__all__ = ['is_executable', 'unfrackpath']
|
||||
|
||||
def is_executable(path):
|
||||
'''is the given path executable?'''
|
||||
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE] or stat.S_IXGRP & os.stat(path)[stat.ST_MODE] or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
|
||||
|
||||
def unfrackpath(path):
|
||||
'''
|
||||
returns a path that is free of symlinks, environment
|
||||
variables, relative path traversals and symbols (~)
|
||||
example:
|
||||
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
||||
'''
|
||||
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
|
||||
|
||||
@@ -1,304 +0,0 @@
|
||||
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import glob
|
||||
import imp
|
||||
from ansible import constants as C
|
||||
from ansible import errors
|
||||
|
||||
MODULE_CACHE = {}
|
||||
PATH_CACHE = {}
|
||||
PLUGIN_PATH_CACHE = {}
|
||||
_basedirs = []
|
||||
|
||||
def push_basedir(basedir):
|
||||
# avoid pushing the same absolute dir more than once
|
||||
basedir = os.path.realpath(basedir)
|
||||
if basedir not in _basedirs:
|
||||
_basedirs.insert(0, basedir)
|
||||
|
||||
class PluginLoader(object):
|
||||
|
||||
'''
|
||||
PluginLoader loads plugins from the configured plugin directories.
|
||||
|
||||
It searches for plugins by iterating through the combined list of
|
||||
play basedirs, configured paths, and the python path.
|
||||
The first match is used.
|
||||
'''
|
||||
|
||||
def __init__(self, class_name, package, config, subdir, aliases={}):
|
||||
|
||||
self.class_name = class_name
|
||||
self.package = package
|
||||
self.config = config
|
||||
self.subdir = subdir
|
||||
self.aliases = aliases
|
||||
|
||||
if not class_name in MODULE_CACHE:
|
||||
MODULE_CACHE[class_name] = {}
|
||||
if not class_name in PATH_CACHE:
|
||||
PATH_CACHE[class_name] = None
|
||||
if not class_name in PLUGIN_PATH_CACHE:
|
||||
PLUGIN_PATH_CACHE[class_name] = {}
|
||||
|
||||
self._module_cache = MODULE_CACHE[class_name]
|
||||
self._paths = PATH_CACHE[class_name]
|
||||
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
|
||||
|
||||
self._extra_dirs = []
|
||||
self._searched_paths = set()
|
||||
|
||||
def print_paths(self):
|
||||
''' Returns a string suitable for printing of the search path '''
|
||||
|
||||
# Uses a list to get the order right
|
||||
ret = []
|
||||
for i in self._get_paths():
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return os.pathsep.join(ret)
|
||||
|
||||
def _all_directories(self, dir):
|
||||
results = []
|
||||
results.append(dir)
|
||||
for root, subdirs, files in os.walk(dir):
|
||||
if '__init__.py' in files:
|
||||
for x in subdirs:
|
||||
results.append(os.path.join(root,x))
|
||||
return results
|
||||
|
||||
def _get_package_paths(self):
|
||||
''' Gets the path of a Python package '''
|
||||
|
||||
paths = []
|
||||
if not self.package:
|
||||
return []
|
||||
if not hasattr(self, 'package_path'):
|
||||
m = __import__(self.package)
|
||||
parts = self.package.split('.')[1:]
|
||||
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
|
||||
paths.extend(self._all_directories(self.package_path))
|
||||
return paths
|
||||
|
||||
def _get_paths(self):
|
||||
''' Return a list of paths to search for plugins in '''
|
||||
|
||||
if self._paths is not None:
|
||||
return self._paths
|
||||
|
||||
ret = self._extra_dirs[:]
|
||||
for basedir in _basedirs:
|
||||
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
|
||||
if os.path.isdir(fullpath):
|
||||
|
||||
files = glob.glob("%s/*" % fullpath)
|
||||
|
||||
# allow directories to be two levels deep
|
||||
files2 = glob.glob("%s/*/*" % fullpath)
|
||||
|
||||
if files2 is not None:
|
||||
files.extend(files2)
|
||||
|
||||
for file in files:
|
||||
if os.path.isdir(file) and file not in ret:
|
||||
ret.append(file)
|
||||
if fullpath not in ret:
|
||||
ret.append(fullpath)
|
||||
|
||||
# look in any configured plugin paths, allow one level deep for subcategories
|
||||
if self.config is not None:
|
||||
configured_paths = self.config.split(os.pathsep)
|
||||
for path in configured_paths:
|
||||
path = os.path.realpath(os.path.expanduser(path))
|
||||
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
|
||||
for c in contents:
|
||||
if os.path.isdir(c) and c not in ret:
|
||||
ret.append(c)
|
||||
if path not in ret:
|
||||
ret.append(path)
|
||||
|
||||
# look for any plugins installed in the package subtree
|
||||
ret.extend(self._get_package_paths())
|
||||
|
||||
# cache and return the result
|
||||
self._paths = ret
|
||||
return ret
|
||||
|
||||
|
||||
def add_directory(self, directory, with_subdir=False):
|
||||
''' Adds an additional directory to the search path '''
|
||||
|
||||
directory = os.path.realpath(directory)
|
||||
|
||||
if directory is not None:
|
||||
if with_subdir:
|
||||
directory = os.path.join(directory, self.subdir)
|
||||
if directory not in self._extra_dirs:
|
||||
# append the directory and invalidate the path cache
|
||||
self._extra_dirs.append(directory)
|
||||
self._paths = None
|
||||
|
||||
def find_plugin(self, name, suffixes=None):
|
||||
''' Find a plugin named name '''
|
||||
|
||||
if not suffixes:
|
||||
if self.class_name:
|
||||
suffixes = ['.py']
|
||||
else:
|
||||
suffixes = ['.py', '']
|
||||
|
||||
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
|
||||
for full_name in potential_names:
|
||||
if full_name in self._plugin_path_cache:
|
||||
return self._plugin_path_cache[full_name]
|
||||
|
||||
found = None
|
||||
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
|
||||
if os.path.isdir(path):
|
||||
full_paths = (os.path.join(path, f) for f in os.listdir(path))
|
||||
for full_path in (f for f in full_paths if os.path.isfile(f)):
|
||||
for suffix in suffixes:
|
||||
if full_path.endswith(suffix):
|
||||
full_name = os.path.basename(full_path)
|
||||
break
|
||||
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
|
||||
continue
|
||||
|
||||
if full_name not in self._plugin_path_cache:
|
||||
self._plugin_path_cache[full_name] = full_path
|
||||
|
||||
self._searched_paths.add(path)
|
||||
for full_name in potential_names:
|
||||
if full_name in self._plugin_path_cache:
|
||||
return self._plugin_path_cache[full_name]
|
||||
|
||||
# if nothing is found, try finding alias/deprecated
|
||||
if not name.startswith('_'):
|
||||
for alias_name in ('_%s' % n for n in potential_names):
|
||||
# We've already cached all the paths at this point
|
||||
if alias_name in self._plugin_path_cache:
|
||||
return self._plugin_path_cache[alias_name]
|
||||
|
||||
return None
|
||||
|
||||
def has_plugin(self, name):
|
||||
''' Checks if a plugin named name exists '''
|
||||
|
||||
return self.find_plugin(name) is not None
|
||||
|
||||
__contains__ = has_plugin
|
||||
|
||||
def get(self, name, *args, **kwargs):
|
||||
''' instantiates a plugin of the given name using arguments '''
|
||||
|
||||
if name in self.aliases:
|
||||
name = self.aliases[name]
|
||||
path = self.find_plugin(name)
|
||||
if path is None:
|
||||
return None
|
||||
if path not in self._module_cache:
|
||||
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
|
||||
return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
|
||||
|
||||
def all(self, *args, **kwargs):
|
||||
''' instantiates all plugins with the same arguments '''
|
||||
|
||||
for i in self._get_paths():
|
||||
matches = glob.glob(os.path.join(i, "*.py"))
|
||||
matches.sort()
|
||||
for path in matches:
|
||||
name, ext = os.path.splitext(os.path.basename(path))
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if path not in self._module_cache:
|
||||
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
|
||||
yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
|
||||
|
||||
action_loader = PluginLoader(
|
||||
'ActionModule',
|
||||
'ansible.runner.action_plugins',
|
||||
C.DEFAULT_ACTION_PLUGIN_PATH,
|
||||
'action_plugins'
|
||||
)
|
||||
|
||||
cache_loader = PluginLoader(
|
||||
'CacheModule',
|
||||
'ansible.cache',
|
||||
C.DEFAULT_CACHE_PLUGIN_PATH,
|
||||
'cache_plugins'
|
||||
)
|
||||
|
||||
callback_loader = PluginLoader(
|
||||
'CallbackModule',
|
||||
'ansible.callback_plugins',
|
||||
C.DEFAULT_CALLBACK_PLUGIN_PATH,
|
||||
'callback_plugins'
|
||||
)
|
||||
|
||||
connection_loader = PluginLoader(
|
||||
'Connection',
|
||||
'ansible.runner.connection_plugins',
|
||||
C.DEFAULT_CONNECTION_PLUGIN_PATH,
|
||||
'connection_plugins',
|
||||
aliases={'paramiko': 'paramiko_ssh'}
|
||||
)
|
||||
|
||||
shell_loader = PluginLoader(
|
||||
'ShellModule',
|
||||
'ansible.runner.shell_plugins',
|
||||
'shell_plugins',
|
||||
'shell_plugins',
|
||||
)
|
||||
|
||||
module_finder = PluginLoader(
|
||||
'',
|
||||
'ansible.modules',
|
||||
C.DEFAULT_MODULE_PATH,
|
||||
'library'
|
||||
)
|
||||
|
||||
lookup_loader = PluginLoader(
|
||||
'LookupModule',
|
||||
'ansible.runner.lookup_plugins',
|
||||
C.DEFAULT_LOOKUP_PLUGIN_PATH,
|
||||
'lookup_plugins'
|
||||
)
|
||||
|
||||
vars_loader = PluginLoader(
|
||||
'VarsModule',
|
||||
'ansible.inventory.vars_plugins',
|
||||
C.DEFAULT_VARS_PLUGIN_PATH,
|
||||
'vars_plugins'
|
||||
)
|
||||
|
||||
filter_loader = PluginLoader(
|
||||
'FilterModule',
|
||||
'ansible.runner.filter_plugins',
|
||||
C.DEFAULT_FILTER_PLUGIN_PATH,
|
||||
'filter_plugins'
|
||||
)
|
||||
|
||||
fragment_loader = PluginLoader(
|
||||
'ModuleDocFragment',
|
||||
'ansible.utils.module_docs_fragments',
|
||||
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
|
||||
'',
|
||||
)
|
||||
@@ -1,18 +0,0 @@
|
||||
def isprintable(instring):
|
||||
if isinstance(instring, str):
|
||||
#http://stackoverflow.com/a/3637294
|
||||
import string
|
||||
printset = set(string.printable)
|
||||
isprintable = set(instring).issubset(printset)
|
||||
return isprintable
|
||||
else:
|
||||
return True
|
||||
|
||||
def count_newlines_from_end(str):
|
||||
i = len(str)
|
||||
while i > 0:
|
||||
if str[i-1] != '\n':
|
||||
break
|
||||
i -= 1
|
||||
return len(str) - i
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
|
||||
SU_PROMPT_LOCALIZATIONS = [
|
||||
'Password',
|
||||
'암호',
|
||||
'パスワード',
|
||||
'Adgangskode',
|
||||
'Contraseña',
|
||||
'Contrasenya',
|
||||
'Hasło',
|
||||
'Heslo',
|
||||
'Jelszó',
|
||||
'Lösenord',
|
||||
'Mật khẩu',
|
||||
'Mot de passe',
|
||||
'Parola',
|
||||
'Parool',
|
||||
'Pasahitza',
|
||||
'Passord',
|
||||
'Passwort',
|
||||
'Salasana',
|
||||
'Sandi',
|
||||
'Senha',
|
||||
'Wachtwoord',
|
||||
'ססמה',
|
||||
'Лозинка',
|
||||
'Парола',
|
||||
'Пароль',
|
||||
'गुप्तशब्द',
|
||||
'शब्दकूट',
|
||||
'సంకేతపదము',
|
||||
'හස්පදය',
|
||||
'密码',
|
||||
'密碼',
|
||||
]
|
||||
|
||||
SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE)
|
||||
|
||||
def check_su_prompt(data):
|
||||
return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
|
||||
|
||||
@@ -1,404 +0,0 @@
|
||||
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import re
|
||||
import codecs
|
||||
import jinja2
|
||||
from jinja2.runtime import StrictUndefined
|
||||
from jinja2.exceptions import TemplateSyntaxError
|
||||
import yaml
|
||||
import json
|
||||
from ansible import errors
|
||||
import ansible.constants as C
|
||||
import time
|
||||
import subprocess
|
||||
import datetime
|
||||
import pwd
|
||||
import ast
|
||||
import traceback
|
||||
from numbers import Number
|
||||
|
||||
from ansible.utils.string_functions import count_newlines_from_end
|
||||
from ansible.utils import to_bytes, to_unicode
|
||||
|
||||
class Globals(object):
|
||||
|
||||
FILTERS = None
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _get_filters():
|
||||
''' return filter plugin instances '''
|
||||
|
||||
if Globals.FILTERS is not None:
|
||||
return Globals.FILTERS
|
||||
|
||||
from ansible import utils
|
||||
plugins = [ x for x in utils.plugins.filter_loader.all()]
|
||||
filters = {}
|
||||
for fp in plugins:
|
||||
filters.update(fp.filters())
|
||||
Globals.FILTERS = filters
|
||||
|
||||
return Globals.FILTERS
|
||||
|
||||
def _get_extensions():
|
||||
''' return jinja2 extensions to load '''
|
||||
|
||||
'''
|
||||
if some extensions are set via jinja_extensions in ansible.cfg, we try
|
||||
to load them with the jinja environment
|
||||
'''
|
||||
jinja_exts = []
|
||||
if C.DEFAULT_JINJA2_EXTENSIONS:
|
||||
'''
|
||||
Let's make sure the configuration directive doesn't contain spaces
|
||||
and split extensions in an array
|
||||
'''
|
||||
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
|
||||
|
||||
return jinja_exts
|
||||
|
||||
class Flags:
|
||||
LEGACY_TEMPLATE_WARNING = False
|
||||
|
||||
# TODO: refactor this file
|
||||
|
||||
FILTER_PLUGINS = None
|
||||
_LISTRE = re.compile(r"(\w+)\[(\d+)\]")
|
||||
|
||||
# A regex for checking to see if a variable we're trying to
|
||||
# expand is just a single variable name.
|
||||
SINGLE_VAR = re.compile(r"^{{\s*(\w*)\s*}}$")
|
||||
|
||||
JINJA2_OVERRIDE = '#jinja2:'
|
||||
JINJA2_ALLOWED_OVERRIDES = ['trim_blocks', 'lstrip_blocks', 'newline_sequence', 'keep_trailing_newline']
|
||||
|
||||
def lookup(name, *args, **kwargs):
|
||||
from ansible import utils
|
||||
instance = utils.plugins.lookup_loader.get(name.lower(), basedir=kwargs.get('basedir',None))
|
||||
tvars = kwargs.get('vars', None)
|
||||
|
||||
wantlist = kwargs.pop('wantlist', False)
|
||||
|
||||
if instance is not None:
|
||||
try:
|
||||
ran = instance.run(*args, inject=tvars, **kwargs)
|
||||
except errors.AnsibleError:
|
||||
raise
|
||||
except jinja2.exceptions.UndefinedError, e:
|
||||
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
|
||||
except Exception, e:
|
||||
raise errors.AnsibleError('Unexpected error in during lookup: %s' % e)
|
||||
if ran and not wantlist:
|
||||
ran = ",".join(ran)
|
||||
return ran
|
||||
else:
|
||||
raise errors.AnsibleError("lookup plugin (%s) not found" % name)
|
||||
|
||||
def template(basedir, varname, templatevars, lookup_fatal=True, depth=0, expand_lists=True, convert_bare=False, fail_on_undefined=False, filter_fatal=True):
|
||||
''' templates a data structure by traversing it and substituting for other data structures '''
|
||||
from ansible import utils
|
||||
try:
|
||||
if convert_bare and isinstance(varname, basestring):
|
||||
first_part = varname.split(".")[0].split("[")[0]
|
||||
if first_part in templatevars and '{{' not in varname and '$' not in varname:
|
||||
varname = "{{%s}}" % varname
|
||||
|
||||
if isinstance(varname, basestring):
|
||||
if '{{' in varname or '{%' in varname:
|
||||
try:
|
||||
varname = template_from_string(basedir, varname, templatevars, fail_on_undefined)
|
||||
except errors.AnsibleError, e:
|
||||
raise errors.AnsibleError("Failed to template %s: %s" % (varname, str(e)))
|
||||
|
||||
# template_from_string may return non strings for the case where the var is just
|
||||
# a reference to a single variable, so we should re_check before we do further evals
|
||||
if isinstance(varname, basestring):
|
||||
if (varname.startswith("{") and not varname.startswith("{{")) or varname.startswith("["):
|
||||
eval_results = utils.safe_eval(varname, locals=templatevars, include_exceptions=True)
|
||||
if eval_results[1] is None:
|
||||
varname = eval_results[0]
|
||||
|
||||
return varname
|
||||
|
||||
elif isinstance(varname, (list, tuple)):
|
||||
return [template(basedir, v, templatevars, lookup_fatal, depth, expand_lists, convert_bare, fail_on_undefined, filter_fatal) for v in varname]
|
||||
elif isinstance(varname, dict):
|
||||
d = {}
|
||||
for (k, v) in varname.iteritems():
|
||||
d[k] = template(basedir, v, templatevars, lookup_fatal, depth, expand_lists, convert_bare, fail_on_undefined, filter_fatal)
|
||||
return d
|
||||
else:
|
||||
return varname
|
||||
except errors.AnsibleFilterError:
|
||||
if filter_fatal:
|
||||
raise
|
||||
else:
|
||||
return varname
|
||||
|
||||
|
||||
class _jinja2_vars(object):
|
||||
'''
|
||||
Helper class to template all variable content before jinja2 sees it.
|
||||
This is done by hijacking the variable storage that jinja2 uses, and
|
||||
overriding __contains__ and __getitem__ to look like a dict. Added bonus
|
||||
is avoiding duplicating the large hashes that inject tends to be.
|
||||
To facilitate using builtin jinja2 things like range, globals are handled
|
||||
here.
|
||||
extras is a list of locals to also search for variables.
|
||||
'''
|
||||
|
||||
def __init__(self, basedir, vars, globals, fail_on_undefined, *extras):
|
||||
self.basedir = basedir
|
||||
self.vars = vars
|
||||
self.globals = globals
|
||||
self.fail_on_undefined = fail_on_undefined
|
||||
self.extras = extras
|
||||
|
||||
def __contains__(self, k):
|
||||
if k in self.vars:
|
||||
return True
|
||||
for i in self.extras:
|
||||
if k in i:
|
||||
return True
|
||||
if k in self.globals:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __getitem__(self, varname):
|
||||
from ansible.runner import HostVars
|
||||
if varname not in self.vars:
|
||||
for i in self.extras:
|
||||
if varname in i:
|
||||
return i[varname]
|
||||
if varname in self.globals:
|
||||
return self.globals[varname]
|
||||
else:
|
||||
raise KeyError("undefined variable: %s" % varname)
|
||||
var = self.vars[varname]
|
||||
# HostVars is special, return it as-is, as is the special variable
|
||||
# 'vars', which contains the vars structure
|
||||
var = to_unicode(var, nonstring="passthru")
|
||||
if isinstance(var, dict) and varname == "vars" or isinstance(var, HostVars):
|
||||
return var
|
||||
else:
|
||||
return template(self.basedir, var, self.vars, fail_on_undefined=self.fail_on_undefined)
|
||||
|
||||
def add_locals(self, locals):
|
||||
'''
|
||||
If locals are provided, create a copy of self containing those
|
||||
locals in addition to what is already in this variable proxy.
|
||||
'''
|
||||
if locals is None:
|
||||
return self
|
||||
return _jinja2_vars(self.basedir, self.vars, self.globals, self.fail_on_undefined, locals, *self.extras)
|
||||
|
||||
class J2Template(jinja2.environment.Template):
|
||||
'''
|
||||
This class prevents Jinja2 from running _jinja2_vars through dict()
|
||||
Without this, {% include %} and similar will create new contexts unlike
|
||||
the special one created in template_from_file. This ensures they are all
|
||||
alike, except for potential locals.
|
||||
'''
|
||||
def new_context(self, vars=None, shared=False, locals=None):
|
||||
return jinja2.runtime.Context(self.environment, vars.add_locals(locals), self.name, self.blocks)
|
||||
|
||||
def template_from_file(basedir, path, vars, vault_password=None):
|
||||
''' run a file through the templating engine '''
|
||||
|
||||
fail_on_undefined = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
|
||||
|
||||
from ansible import utils
|
||||
realpath = utils.path_dwim(basedir, path)
|
||||
loader=jinja2.FileSystemLoader([basedir,os.path.dirname(realpath)])
|
||||
|
||||
def my_lookup(*args, **kwargs):
|
||||
kwargs['vars'] = vars
|
||||
return lookup(*args, basedir=basedir, **kwargs)
|
||||
def my_finalize(thing):
|
||||
return thing if thing is not None else ''
|
||||
|
||||
environment = jinja2.Environment(loader=loader, trim_blocks=True, extensions=_get_extensions())
|
||||
environment.filters.update(_get_filters())
|
||||
environment.globals['lookup'] = my_lookup
|
||||
environment.globals['finalize'] = my_finalize
|
||||
if fail_on_undefined:
|
||||
environment.undefined = StrictUndefined
|
||||
|
||||
try:
|
||||
data = codecs.open(realpath, encoding="utf8").read()
|
||||
except UnicodeDecodeError:
|
||||
raise errors.AnsibleError("unable to process as utf-8: %s" % realpath)
|
||||
except:
|
||||
raise errors.AnsibleError("unable to read %s" % realpath)
|
||||
|
||||
# Get jinja env overrides from template
|
||||
if data.startswith(JINJA2_OVERRIDE):
|
||||
eol = data.find('\n')
|
||||
line = data[len(JINJA2_OVERRIDE):eol]
|
||||
data = data[eol+1:]
|
||||
for pair in line.split(','):
|
||||
(key,val) = pair.split(':')
|
||||
key = key.strip()
|
||||
if key in JINJA2_ALLOWED_OVERRIDES:
|
||||
setattr(environment, key, ast.literal_eval(val.strip()))
|
||||
|
||||
|
||||
environment.template_class = J2Template
|
||||
try:
|
||||
t = environment.from_string(data)
|
||||
except TemplateSyntaxError, e:
|
||||
# Throw an exception which includes a more user friendly error message
|
||||
values = {'name': realpath, 'lineno': e.lineno, 'error': str(e)}
|
||||
msg = 'file: %(name)s, line number: %(lineno)s, error: %(error)s' % \
|
||||
values
|
||||
error = errors.AnsibleError(msg)
|
||||
raise error
|
||||
vars = vars.copy()
|
||||
try:
|
||||
template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name
|
||||
except:
|
||||
template_uid = os.stat(realpath).st_uid
|
||||
vars['template_host'] = os.uname()[1]
|
||||
vars['template_path'] = realpath
|
||||
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(realpath))
|
||||
vars['template_uid'] = template_uid
|
||||
vars['template_fullpath'] = os.path.abspath(realpath)
|
||||
vars['template_run_date'] = datetime.datetime.now()
|
||||
|
||||
managed_default = C.DEFAULT_MANAGED_STR
|
||||
managed_str = managed_default.format(
|
||||
host = vars['template_host'],
|
||||
uid = vars['template_uid'],
|
||||
file = to_bytes(vars['template_path'])
|
||||
)
|
||||
vars['ansible_managed'] = time.strftime(
|
||||
managed_str,
|
||||
time.localtime(os.path.getmtime(realpath))
|
||||
)
|
||||
|
||||
# This line performs deep Jinja2 magic that uses the _jinja2_vars object for vars
|
||||
# Ideally, this could use some API where setting shared=True and the object won't get
|
||||
# passed through dict(o), but I have not found that yet.
|
||||
try:
|
||||
res = jinja2.utils.concat(t.root_render_func(t.new_context(_jinja2_vars(basedir, vars, t.globals, fail_on_undefined), shared=True)))
|
||||
except jinja2.exceptions.UndefinedError, e:
|
||||
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
|
||||
except jinja2.exceptions.TemplateNotFound, e:
|
||||
# Throw an exception which includes a more user friendly error message
|
||||
# This likely will happen for included sub-template. Not that besides
|
||||
# pure "file not found" it may happen due to Jinja2's "security"
|
||||
# checks on path.
|
||||
values = {'name': realpath, 'subname': str(e)}
|
||||
msg = 'file: %(name)s, error: Cannot find/not allowed to load (include) template %(subname)s' % \
|
||||
values
|
||||
error = errors.AnsibleError(msg)
|
||||
raise error
|
||||
|
||||
# The low level calls above do not preserve the newline
|
||||
# characters at the end of the input data, so we use the
|
||||
# calculate the difference in newlines and append them
|
||||
# to the resulting output for parity
|
||||
res_newlines = count_newlines_from_end(res)
|
||||
data_newlines = count_newlines_from_end(data)
|
||||
if data_newlines > res_newlines:
|
||||
res += '\n' * (data_newlines - res_newlines)
|
||||
|
||||
if isinstance(res, unicode):
|
||||
# do not try to re-template a unicode string
|
||||
result = res
|
||||
else:
|
||||
result = template(basedir, res, vars)
|
||||
|
||||
return result
|
||||
|
||||
def template_from_string(basedir, data, vars, fail_on_undefined=False):
|
||||
''' run a string through the (Jinja2) templating engine '''
|
||||
try:
|
||||
if type(data) == str:
|
||||
data = unicode(data, 'utf-8')
|
||||
|
||||
# Check to see if the string we are trying to render is just referencing a single
|
||||
# var. In this case we don't want to accidentally change the type of the variable
|
||||
# to a string by using the jinja template renderer. We just want to pass it.
|
||||
only_one = SINGLE_VAR.match(data)
|
||||
if only_one:
|
||||
var_name = only_one.group(1)
|
||||
if var_name in vars:
|
||||
resolved_val = vars[var_name]
|
||||
if isinstance(resolved_val, (bool, Number)):
|
||||
return resolved_val
|
||||
|
||||
def my_finalize(thing):
|
||||
return thing if thing is not None else ''
|
||||
|
||||
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions(), finalize=my_finalize)
|
||||
environment.filters.update(_get_filters())
|
||||
environment.template_class = J2Template
|
||||
|
||||
if '_original_file' in vars:
|
||||
basedir = os.path.dirname(vars['_original_file'])
|
||||
filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
|
||||
if os.path.exists(filesdir):
|
||||
basedir = filesdir
|
||||
|
||||
# 6227
|
||||
if isinstance(data, unicode):
|
||||
try:
|
||||
data = data.decode('utf-8')
|
||||
except UnicodeEncodeError, e:
|
||||
pass
|
||||
|
||||
try:
|
||||
t = environment.from_string(data)
|
||||
except TemplateSyntaxError, e:
|
||||
raise errors.AnsibleError("template error while templating string: %s" % str(e))
|
||||
except Exception, e:
|
||||
if 'recursion' in str(e):
|
||||
raise errors.AnsibleError("recursive loop detected in template string: %s" % data)
|
||||
else:
|
||||
return data
|
||||
|
||||
def my_lookup(*args, **kwargs):
|
||||
kwargs['vars'] = vars
|
||||
return lookup(*args, basedir=basedir, **kwargs)
|
||||
|
||||
t.globals['lookup'] = my_lookup
|
||||
t.globals['finalize'] = my_finalize
|
||||
jvars =_jinja2_vars(basedir, vars, t.globals, fail_on_undefined)
|
||||
new_context = t.new_context(jvars, shared=True)
|
||||
rf = t.root_render_func(new_context)
|
||||
try:
|
||||
res = jinja2.utils.concat(rf)
|
||||
except TypeError, te:
|
||||
if 'StrictUndefined' in str(te):
|
||||
raise errors.AnsibleUndefinedVariable(
|
||||
"Unable to look up a name or access an attribute in template string. " + \
|
||||
"Make sure your variable name does not contain invalid characters like '-'."
|
||||
)
|
||||
else:
|
||||
raise errors.AnsibleError("an unexpected type error occurred. Error was %s" % te)
|
||||
return res
|
||||
except (jinja2.exceptions.UndefinedError, errors.AnsibleUndefinedVariable):
|
||||
if fail_on_undefined:
|
||||
raise
|
||||
else:
|
||||
return data
|
||||
|
||||
@@ -19,6 +19,8 @@
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import string_types, text_type, binary_type, PY3
|
||||
|
||||
# to_bytes and to_unicode were written by Toshio Kuratomi for the
|
||||
# python-kitchen library https://pypi.python.org/pypi/kitchen
|
||||
# They are licensed in kitchen under the terms of the GPLv2+
|
||||
@@ -35,6 +37,9 @@ _LATIN1_ALIASES = frozenset(('latin-1', 'LATIN-1', 'latin1', 'LATIN1',
|
||||
|
||||
# EXCEPTION_CONVERTERS is defined below due to using to_unicode
|
||||
|
||||
if PY3:
|
||||
basestring = (str, bytes)
|
||||
|
||||
def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
|
||||
'''Convert an object into a :class:`unicode` string
|
||||
|
||||
@@ -89,12 +94,12 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
|
||||
# Could use isbasestring/isunicode here but we want this code to be as
|
||||
# fast as possible
|
||||
if isinstance(obj, basestring):
|
||||
if isinstance(obj, unicode):
|
||||
if isinstance(obj, text_type):
|
||||
return obj
|
||||
if encoding in _UTF8_ALIASES:
|
||||
return unicode(obj, 'utf-8', errors)
|
||||
return text_type(obj, 'utf-8', errors)
|
||||
if encoding in _LATIN1_ALIASES:
|
||||
return unicode(obj, 'latin-1', errors)
|
||||
return text_type(obj, 'latin-1', errors)
|
||||
return obj.decode(encoding, errors)
|
||||
|
||||
if not nonstring:
|
||||
@@ -110,19 +115,19 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
|
||||
simple = None
|
||||
if not simple:
|
||||
try:
|
||||
simple = str(obj)
|
||||
simple = text_type(obj)
|
||||
except UnicodeError:
|
||||
try:
|
||||
simple = obj.__str__()
|
||||
except (UnicodeError, AttributeError):
|
||||
simple = u''
|
||||
if isinstance(simple, str):
|
||||
return unicode(simple, encoding, errors)
|
||||
if isinstance(simple, binary_type):
|
||||
return text_type(simple, encoding, errors)
|
||||
return simple
|
||||
elif nonstring in ('repr', 'strict'):
|
||||
obj_repr = repr(obj)
|
||||
if isinstance(obj_repr, str):
|
||||
obj_repr = unicode(obj_repr, encoding, errors)
|
||||
if isinstance(obj_repr, binary_type):
|
||||
obj_repr = text_type(obj_repr, encoding, errors)
|
||||
if nonstring == 'repr':
|
||||
return obj_repr
|
||||
raise TypeError('to_unicode was given "%(obj)s" which is neither'
|
||||
@@ -198,19 +203,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
|
||||
# Could use isbasestring, isbytestring here but we want this to be as fast
|
||||
# as possible
|
||||
if isinstance(obj, basestring):
|
||||
if isinstance(obj, str):
|
||||
if isinstance(obj, binary_type):
|
||||
return obj
|
||||
return obj.encode(encoding, errors)
|
||||
if not nonstring:
|
||||
nonstring = 'simplerepr'
|
||||
|
||||
if nonstring == 'empty':
|
||||
return ''
|
||||
return b''
|
||||
elif nonstring == 'passthru':
|
||||
return obj
|
||||
elif nonstring == 'simplerepr':
|
||||
try:
|
||||
simple = str(obj)
|
||||
simple = binary_type(obj)
|
||||
except UnicodeError:
|
||||
try:
|
||||
simple = obj.__str__()
|
||||
@@ -220,19 +225,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
|
||||
try:
|
||||
simple = obj.__unicode__()
|
||||
except (AttributeError, UnicodeError):
|
||||
simple = ''
|
||||
if isinstance(simple, unicode):
|
||||
simple = b''
|
||||
if isinstance(simple, text_type):
|
||||
simple = simple.encode(encoding, 'replace')
|
||||
return simple
|
||||
elif nonstring in ('repr', 'strict'):
|
||||
try:
|
||||
obj_repr = obj.__repr__()
|
||||
except (AttributeError, UnicodeError):
|
||||
obj_repr = ''
|
||||
if isinstance(obj_repr, unicode):
|
||||
obj_repr = b''
|
||||
if isinstance(obj_repr, text_type):
|
||||
obj_repr = obj_repr.encode(encoding, errors)
|
||||
else:
|
||||
obj_repr = str(obj_repr)
|
||||
obj_repr = binary_type(obj_repr)
|
||||
if nonstring == 'repr':
|
||||
return obj_repr
|
||||
raise TypeError('to_bytes was given "%(obj)s" which is neither'
|
||||
|
||||
51
lib/ansible/utils/vars.py
Normal file
51
lib/ansible/utils/vars.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible import constants as C
|
||||
|
||||
def combine_vars(a, b):
|
||||
|
||||
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
|
||||
return merge_hash(a, b)
|
||||
else:
|
||||
return dict(a.items() + b.items())
|
||||
|
||||
def merge_hash(a, b):
|
||||
''' recursively merges hash b into a
|
||||
keys from b take precedence over keys from a '''
|
||||
|
||||
result = {}
|
||||
|
||||
for dicts in a, b:
|
||||
# next, iterate over b keys and values
|
||||
for k, v in dicts.iteritems():
|
||||
# if there's already such key in a
|
||||
# and that key contains dict
|
||||
if k in result and isinstance(result[k], dict):
|
||||
# merge those dicts recursively
|
||||
result[k] = merge_hash(a[k], v)
|
||||
else:
|
||||
# otherwise, just copy a value from b to a
|
||||
result[k] = v
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# (c) 2014, James Tanner <tanner.jc@gmail.com>
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -12,574 +14,43 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ansible-pull is a script that runs ansible in local mode
|
||||
# after checking out a playbooks directory from source repo. There is an
|
||||
# example playbook to bootstrap this script in the examples/ dir which
|
||||
# installs ansible and sets it up to run on cron.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import tempfile
|
||||
from io import BytesIO
|
||||
from subprocess import call
|
||||
from ansible import errors
|
||||
from hashlib import sha256
|
||||
import subprocess
|
||||
|
||||
# Note: Only used for loading obsolete VaultAES files. All files are written
|
||||
# using the newer VaultAES256 which does not require md5
|
||||
try:
|
||||
from hashlib import md5
|
||||
except ImportError:
|
||||
try:
|
||||
from md5 import md5
|
||||
except ImportError:
|
||||
# MD5 unavailable. Possibly FIPS mode
|
||||
md5 = None
|
||||
|
||||
from binascii import hexlify
|
||||
from binascii import unhexlify
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.utils.path import is_executable
|
||||
|
||||
try:
|
||||
from Crypto.Hash import SHA256, HMAC
|
||||
HAS_HASH = True
|
||||
except ImportError:
|
||||
HAS_HASH = False
|
||||
def read_vault_file(vault_password_file):
|
||||
"""
|
||||
Read a vault password from a file or if executable, execute the script and
|
||||
retrieve password from STDOUT
|
||||
"""
|
||||
|
||||
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
|
||||
try:
|
||||
from Crypto.Util import Counter
|
||||
HAS_COUNTER = True
|
||||
except ImportError:
|
||||
HAS_COUNTER = False
|
||||
this_path = os.path.realpath(os.path.expanduser(vault_password_file))
|
||||
if not os.path.exists(this_path):
|
||||
raise AnsibleError("The vault password file %s was not found" % this_path)
|
||||
|
||||
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
|
||||
try:
|
||||
from Crypto.Protocol.KDF import PBKDF2
|
||||
HAS_PBKDF2 = True
|
||||
except ImportError:
|
||||
HAS_PBKDF2 = False
|
||||
|
||||
# AES IMPORTS
|
||||
try:
|
||||
from Crypto.Cipher import AES as AES
|
||||
HAS_AES = True
|
||||
except ImportError:
|
||||
HAS_AES = False
|
||||
|
||||
CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
|
||||
|
||||
HEADER='$ANSIBLE_VAULT'
|
||||
CIPHER_WHITELIST=['AES', 'AES256']
|
||||
|
||||
class VaultLib(object):
|
||||
|
||||
def __init__(self, password):
|
||||
self.password = password
|
||||
self.cipher_name = None
|
||||
self.version = '1.1'
|
||||
|
||||
def is_encrypted(self, data):
|
||||
if data.startswith(HEADER):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def encrypt(self, data):
|
||||
|
||||
if self.is_encrypted(data):
|
||||
raise errors.AnsibleError("data is already encrypted")
|
||||
|
||||
if not self.cipher_name:
|
||||
self.cipher_name = "AES256"
|
||||
#raise errors.AnsibleError("the cipher must be set before encrypting data")
|
||||
|
||||
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
|
||||
cipher = globals()['Vault' + self.cipher_name]
|
||||
this_cipher = cipher()
|
||||
else:
|
||||
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
|
||||
|
||||
"""
|
||||
# combine sha + data
|
||||
this_sha = sha256(data).hexdigest()
|
||||
tmp_data = this_sha + "\n" + data
|
||||
"""
|
||||
|
||||
# encrypt sha + data
|
||||
enc_data = this_cipher.encrypt(data, self.password)
|
||||
|
||||
# add header
|
||||
tmp_data = self._add_header(enc_data)
|
||||
return tmp_data
|
||||
|
||||
def decrypt(self, data):
|
||||
if self.password is None:
|
||||
raise errors.AnsibleError("A vault password must be specified to decrypt data")
|
||||
|
||||
if not self.is_encrypted(data):
|
||||
raise errors.AnsibleError("data is not encrypted")
|
||||
|
||||
# clean out header
|
||||
data = self._split_header(data)
|
||||
|
||||
# create the cipher object
|
||||
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
|
||||
cipher = globals()['Vault' + self.cipher_name]
|
||||
this_cipher = cipher()
|
||||
else:
|
||||
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
|
||||
|
||||
# try to unencrypt data
|
||||
data = this_cipher.decrypt(data, self.password)
|
||||
if data is None:
|
||||
raise errors.AnsibleError("Decryption failed")
|
||||
|
||||
return data
|
||||
|
||||
def _add_header(self, data):
|
||||
# combine header and encrypted data in 80 char columns
|
||||
|
||||
#tmpdata = hexlify(data)
|
||||
tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
|
||||
|
||||
if not self.cipher_name:
|
||||
raise errors.AnsibleError("the cipher must be set before adding a header")
|
||||
|
||||
dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
|
||||
|
||||
for l in tmpdata:
|
||||
dirty_data += l + '\n'
|
||||
|
||||
return dirty_data
|
||||
|
||||
|
||||
def _split_header(self, data):
|
||||
# used by decrypt
|
||||
|
||||
tmpdata = data.split('\n')
|
||||
tmpheader = tmpdata[0].strip().split(';')
|
||||
|
||||
self.version = str(tmpheader[1].strip())
|
||||
self.cipher_name = str(tmpheader[2].strip())
|
||||
clean_data = '\n'.join(tmpdata[1:])
|
||||
|
||||
"""
|
||||
# strip out newline, join, unhex
|
||||
clean_data = [ x.strip() for x in clean_data ]
|
||||
clean_data = unhexlify(''.join(clean_data))
|
||||
"""
|
||||
|
||||
return clean_data
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *err):
|
||||
pass
|
||||
|
||||
class VaultEditor(object):
|
||||
# uses helper methods for write_file(self, filename, data)
|
||||
# to write a file so that code isn't duplicated for simple
|
||||
# file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
|
||||
# ... "Don't Repeat Yourself", etc.
|
||||
|
||||
def __init__(self, cipher_name, password, filename):
|
||||
# instantiates a member variable for VaultLib
|
||||
self.cipher_name = cipher_name
|
||||
self.password = password
|
||||
self.filename = filename
|
||||
|
||||
def _edit_file_helper(self, existing_data=None, cipher=None):
|
||||
# make sure the umask is set to a sane value
|
||||
old_umask = os.umask(0o077)
|
||||
|
||||
# Create a tempfile
|
||||
_, tmp_path = tempfile.mkstemp()
|
||||
|
||||
if existing_data:
|
||||
self.write_data(existing_data, tmp_path)
|
||||
|
||||
# drop the user into an editor on the tmp file
|
||||
if is_executable(this_path):
|
||||
try:
|
||||
call(self._editor_shell_command(tmp_path))
|
||||
except OSError, e:
|
||||
raise Exception("Failed to open editor (%s): %s" % (self._editor_shell_command(tmp_path)[0],str(e)))
|
||||
tmpdata = self.read_data(tmp_path)
|
||||
|
||||
# create new vault
|
||||
this_vault = VaultLib(self.password)
|
||||
if cipher:
|
||||
this_vault.cipher_name = cipher
|
||||
|
||||
# encrypt new data and write out to tmp
|
||||
enc_data = this_vault.encrypt(tmpdata)
|
||||
self.write_data(enc_data, tmp_path)
|
||||
|
||||
# shuffle tmp file into place
|
||||
self.shuffle_files(tmp_path, self.filename)
|
||||
|
||||
# and restore umask
|
||||
os.umask(old_umask)
|
||||
|
||||
def create_file(self):
|
||||
""" create a new encrypted file """
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if os.path.isfile(self.filename):
|
||||
raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
|
||||
|
||||
# Let the user specify contents and save file
|
||||
self._edit_file_helper(cipher=self.cipher_name)
|
||||
|
||||
def decrypt_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if not os.path.isfile(self.filename):
|
||||
raise errors.AnsibleError("%s does not exist" % self.filename)
|
||||
|
||||
tmpdata = self.read_data(self.filename)
|
||||
this_vault = VaultLib(self.password)
|
||||
if this_vault.is_encrypted(tmpdata):
|
||||
dec_data = this_vault.decrypt(tmpdata)
|
||||
if dec_data is None:
|
||||
raise errors.AnsibleError("Decryption failed")
|
||||
else:
|
||||
self.write_data(dec_data, self.filename)
|
||||
else:
|
||||
raise errors.AnsibleError("%s is not encrypted" % self.filename)
|
||||
|
||||
def edit_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
# decrypt to tmpfile
|
||||
tmpdata = self.read_data(self.filename)
|
||||
this_vault = VaultLib(self.password)
|
||||
dec_data = this_vault.decrypt(tmpdata)
|
||||
|
||||
# let the user edit the data and save
|
||||
self._edit_file_helper(existing_data=dec_data)
|
||||
###we want the cipher to default to AES256 (get rid of files
|
||||
# encrypted with the AES cipher)
|
||||
#self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
|
||||
|
||||
|
||||
def view_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
# decrypt to tmpfile
|
||||
tmpdata = self.read_data(self.filename)
|
||||
this_vault = VaultLib(self.password)
|
||||
dec_data = this_vault.decrypt(tmpdata)
|
||||
old_umask = os.umask(0o077)
|
||||
_, tmp_path = tempfile.mkstemp()
|
||||
self.write_data(dec_data, tmp_path)
|
||||
os.umask(old_umask)
|
||||
|
||||
# drop the user into pager on the tmp file
|
||||
call(self._pager_shell_command(tmp_path))
|
||||
os.remove(tmp_path)
|
||||
|
||||
def encrypt_file(self):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
if not os.path.isfile(self.filename):
|
||||
raise errors.AnsibleError("%s does not exist" % self.filename)
|
||||
|
||||
tmpdata = self.read_data(self.filename)
|
||||
this_vault = VaultLib(self.password)
|
||||
this_vault.cipher_name = self.cipher_name
|
||||
if not this_vault.is_encrypted(tmpdata):
|
||||
enc_data = this_vault.encrypt(tmpdata)
|
||||
self.write_data(enc_data, self.filename)
|
||||
else:
|
||||
raise errors.AnsibleError("%s is already encrypted" % self.filename)
|
||||
|
||||
def rekey_file(self, new_password):
|
||||
|
||||
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
# decrypt
|
||||
tmpdata = self.read_data(self.filename)
|
||||
this_vault = VaultLib(self.password)
|
||||
dec_data = this_vault.decrypt(tmpdata)
|
||||
|
||||
# create new vault
|
||||
new_vault = VaultLib(new_password)
|
||||
|
||||
# we want to force cipher to the default
|
||||
#new_vault.cipher_name = this_vault.cipher_name
|
||||
|
||||
# re-encrypt data and re-write file
|
||||
enc_data = new_vault.encrypt(dec_data)
|
||||
self.write_data(enc_data, self.filename)
|
||||
|
||||
def read_data(self, filename):
|
||||
f = open(filename, "rb")
|
||||
tmpdata = f.read()
|
||||
f.close()
|
||||
return tmpdata
|
||||
|
||||
def write_data(self, data, filename):
|
||||
if os.path.isfile(filename):
|
||||
os.remove(filename)
|
||||
f = open(filename, "wb")
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
def shuffle_files(self, src, dest):
|
||||
# overwrite dest with src
|
||||
if os.path.isfile(dest):
|
||||
os.remove(dest)
|
||||
shutil.move(src, dest)
|
||||
|
||||
def _editor_shell_command(self, filename):
|
||||
EDITOR = os.environ.get('EDITOR','vim')
|
||||
editor = shlex.split(EDITOR)
|
||||
editor.append(filename)
|
||||
|
||||
return editor
|
||||
|
||||
def _pager_shell_command(self, filename):
|
||||
PAGER = os.environ.get('PAGER','less')
|
||||
pager = shlex.split(PAGER)
|
||||
pager.append(filename)
|
||||
|
||||
return pager
|
||||
|
||||
########################################
|
||||
# CIPHERS #
|
||||
########################################
|
||||
|
||||
class VaultAES(object):
|
||||
|
||||
# this version has been obsoleted by the VaultAES256 class
|
||||
# which uses encrypt-then-mac (fixing order) and also improving the KDF used
|
||||
# code remains for upgrade purposes only
|
||||
# http://stackoverflow.com/a/16761459
|
||||
|
||||
def __init__(self):
|
||||
if not md5:
|
||||
raise errors.AnsibleError('md5 hash is unavailable (Could be due to FIPS mode). Legacy VaultAES format is unavailable.')
|
||||
if not HAS_AES:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
|
||||
|
||||
""" Create a key and an initialization vector """
|
||||
|
||||
d = d_i = ''
|
||||
while len(d) < key_length + iv_length:
|
||||
d_i = md5(d_i + password + salt).digest()
|
||||
d += d_i
|
||||
|
||||
key = d[:key_length]
|
||||
iv = d[key_length:key_length+iv_length]
|
||||
|
||||
return key, iv
|
||||
|
||||
def encrypt(self, data, password, key_length=32):
|
||||
|
||||
""" Read plaintext data from in_file and write encrypted to out_file """
|
||||
|
||||
|
||||
# combine sha + data
|
||||
this_sha = sha256(data).hexdigest()
|
||||
tmp_data = this_sha + "\n" + data
|
||||
|
||||
in_file = BytesIO(tmp_data)
|
||||
in_file.seek(0)
|
||||
out_file = BytesIO()
|
||||
|
||||
bs = AES.block_size
|
||||
|
||||
# Get a block of random data. EL does not have Crypto.Random.new()
|
||||
# so os.urandom is used for cross platform purposes
|
||||
salt = os.urandom(bs - len('Salted__'))
|
||||
|
||||
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
out_file.write('Salted__' + salt)
|
||||
finished = False
|
||||
while not finished:
|
||||
chunk = in_file.read(1024 * bs)
|
||||
if len(chunk) == 0 or len(chunk) % bs != 0:
|
||||
padding_length = (bs - len(chunk) % bs) or bs
|
||||
chunk += padding_length * chr(padding_length)
|
||||
finished = True
|
||||
out_file.write(cipher.encrypt(chunk))
|
||||
|
||||
out_file.seek(0)
|
||||
enc_data = out_file.read()
|
||||
tmp_data = hexlify(enc_data)
|
||||
|
||||
return tmp_data
|
||||
|
||||
|
||||
def decrypt(self, data, password, key_length=32):
|
||||
|
||||
""" Read encrypted data from in_file and write decrypted to out_file """
|
||||
|
||||
# http://stackoverflow.com/a/14989032
|
||||
|
||||
data = ''.join(data.split('\n'))
|
||||
data = unhexlify(data)
|
||||
|
||||
in_file = BytesIO(data)
|
||||
in_file.seek(0)
|
||||
out_file = BytesIO()
|
||||
|
||||
bs = AES.block_size
|
||||
salt = in_file.read(bs)[len('Salted__'):]
|
||||
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
next_chunk = ''
|
||||
finished = False
|
||||
|
||||
while not finished:
|
||||
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
|
||||
if len(next_chunk) == 0:
|
||||
padding_length = ord(chunk[-1])
|
||||
chunk = chunk[:-padding_length]
|
||||
finished = True
|
||||
out_file.write(chunk)
|
||||
|
||||
# reset the stream pointer to the beginning
|
||||
out_file.seek(0)
|
||||
new_data = out_file.read()
|
||||
|
||||
# split out sha and verify decryption
|
||||
split_data = new_data.split("\n")
|
||||
this_sha = split_data[0]
|
||||
this_data = '\n'.join(split_data[1:])
|
||||
test_sha = sha256(this_data).hexdigest()
|
||||
|
||||
if this_sha != test_sha:
|
||||
raise errors.AnsibleError("Decryption failed")
|
||||
|
||||
#return out_file.read()
|
||||
return this_data
|
||||
|
||||
|
||||
class VaultAES256(object):
|
||||
|
||||
"""
|
||||
Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
|
||||
Keys are derived using PBKDF2
|
||||
"""
|
||||
|
||||
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
|
||||
|
||||
def __init__(self):
|
||||
|
||||
if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
|
||||
raise errors.AnsibleError(CRYPTO_UPGRADE)
|
||||
|
||||
def gen_key_initctr(self, password, salt):
|
||||
# 16 for AES 128, 32 for AES256
|
||||
keylength = 32
|
||||
|
||||
# match the size used for counter.new to avoid extra work
|
||||
ivlength = 16
|
||||
|
||||
hash_function = SHA256
|
||||
|
||||
# make two keys and one iv
|
||||
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
|
||||
|
||||
|
||||
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
|
||||
count=10000, prf=pbkdf2_prf)
|
||||
|
||||
key1 = derivedkey[:keylength]
|
||||
key2 = derivedkey[keylength:(keylength * 2)]
|
||||
iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
|
||||
|
||||
return key1, key2, hexlify(iv)
|
||||
|
||||
|
||||
def encrypt(self, data, password):
|
||||
|
||||
salt = os.urandom(32)
|
||||
key1, key2, iv = self.gen_key_initctr(password, salt)
|
||||
|
||||
# PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
|
||||
bs = AES.block_size
|
||||
padding_length = (bs - len(data) % bs) or bs
|
||||
data += padding_length * chr(padding_length)
|
||||
|
||||
# COUNTER.new PARAMETERS
|
||||
# 1) nbits (integer) - Length of the counter, in bits.
|
||||
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
|
||||
|
||||
ctr = Counter.new(128, initial_value=long(iv, 16))
|
||||
|
||||
# AES.new PARAMETERS
|
||||
# 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
|
||||
# 2) MODE_CTR, is the recommended mode
|
||||
# 3) counter=<CounterObject>
|
||||
|
||||
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
|
||||
|
||||
# ENCRYPT PADDED DATA
|
||||
cryptedData = cipher.encrypt(data)
|
||||
|
||||
# COMBINE SALT, DIGEST AND DATA
|
||||
hmac = HMAC.new(key2, cryptedData, SHA256)
|
||||
message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
|
||||
message = hexlify(message)
|
||||
return message
|
||||
|
||||
def decrypt(self, data, password):
|
||||
|
||||
# SPLIT SALT, DIGEST, AND DATA
|
||||
data = ''.join(data.split("\n"))
|
||||
data = unhexlify(data)
|
||||
salt, cryptedHmac, cryptedData = data.split("\n", 2)
|
||||
salt = unhexlify(salt)
|
||||
cryptedData = unhexlify(cryptedData)
|
||||
|
||||
key1, key2, iv = self.gen_key_initctr(password, salt)
|
||||
|
||||
# EXIT EARLY IF DIGEST DOESN'T MATCH
|
||||
hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
|
||||
if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
|
||||
return None
|
||||
|
||||
# SET THE COUNTER AND THE CIPHER
|
||||
ctr = Counter.new(128, initial_value=long(iv, 16))
|
||||
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
|
||||
|
||||
# DECRYPT PADDED DATA
|
||||
decryptedData = cipher.decrypt(cryptedData)
|
||||
|
||||
# UNPAD DATA
|
||||
padding_length = ord(decryptedData[-1])
|
||||
decryptedData = decryptedData[:-padding_length]
|
||||
|
||||
return decryptedData
|
||||
|
||||
def is_equal(self, a, b):
|
||||
# http://codahale.com/a-lesson-in-timing-attacks/
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
|
||||
result = 0
|
||||
for x, y in zip(a, b):
|
||||
result |= ord(x) ^ ord(y)
|
||||
return result == 0
|
||||
|
||||
# STDERR not captured to make it easier for users to prompt for input in their scripts
|
||||
p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
|
||||
except OSError as e:
|
||||
raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
|
||||
stdout, stderr = p.communicate()
|
||||
vault_pass = stdout.strip('\r\n')
|
||||
else:
|
||||
try:
|
||||
f = open(this_path, "rb")
|
||||
vault_pass=f.read().strip()
|
||||
f.close()
|
||||
except (OSError, IOError) as e:
|
||||
raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
|
||||
|
||||
return vault_pass
|
||||
|
||||
|
||||
Reference in New Issue
Block a user