Merge remote-tracking branch 'ansible/devel' into devel

This commit is contained in:
Yannig Perré
2015-08-05 11:20:41 +02:00
763 changed files with 24732 additions and 13274 deletions

View File

@@ -24,11 +24,7 @@ CONSUL_RUNNING := $(shell python consul_running.py)
all: parsing test_var_precedence unicode test_templating_settings non_destructive destructive includes check_mode test_hash test_handlers test_group_by test_vault test_tags
parsing:
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario1; [ $$? -eq 3 ]
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario2; [ $$? -eq 3 ]
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario3; [ $$? -eq 3 ]
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario4; [ $$? -eq 3 ]
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5; [ $$? -eq 3 ]
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5
ansible-playbook good_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
includes:
@@ -55,7 +51,8 @@ test_group_by:
ansible-playbook test_group_by.yml -i inventory.group_by -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
test_handlers:
ansible-playbook test_handlers.yml -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
ansible-playbook test_handlers.yml --tags scenario1 -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
[ "$$(ansible-playbook test_handlers.yml --tags scenario2 -l A -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | grep -Po 'RUNNING HANDLER \[test_handlers : \K[^\]]+')" = "test handler" ]
# Not forcing, should only run on successful host
[ "$$(ansible-playbook test_force_handlers.yml --tags normal -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | egrep -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# Forcing from command line
@@ -76,7 +73,7 @@ test_hash:
ANSIBLE_HASH_BEHAVIOUR=merge ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}'
test_var_precedence:
ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override'
ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) $(TEST_FLAGS) -v -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override'
test_vault:
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-tasks
@@ -163,7 +160,7 @@ $(CONSUL_RUNNING):
consul:
ifeq ($(CONSUL_RUNNING), True)
ansible-playbook -i $(INVENTORY) consul.yml ; \
ansible-playbook -i ../../plugins/inventory/consul_io.py consul_inventory.yml
ansible-playbook -i ../../contrib/inventory/consul_io.py consul_inventory.yml
else
@echo "Consul agent is not running locally. To run a cluster locally see http://github.com/sgargan/consul-vagrant"
endif

View File

@@ -138,6 +138,26 @@ def delete_rax_cdb(args):
args.assumeyes)
def _force_delete_rax_scaling_group(manager):
def wrapped(uri):
manager.api.method_delete('%s?force=true' % uri)
return wrapped
def delete_rax_scaling_group(args):
"""Function for deleting Autoscale Groups"""
print ("--- Cleaning Autoscale Groups matching '%s'" % args.match_re)
for region in pyrax.identity.services.autoscale.regions:
asg = pyrax.connect_to_autoscale(region=region)
for group in rax_list_iterator(asg):
if re.search(args.match_re, group.name):
group.manager._delete = \
_force_delete_rax_scaling_group(group.manager)
prompt_and_delete(group,
'Delete matching %s? [y/n]: ' % group,
args.assumeyes)
def main():
if not HAS_PYRAX:
raise SystemExit('The pyrax python module is required for this script')

View File

@@ -14,8 +14,8 @@ pem_file:
project_id:
# Azure Credentials
azure_subscription_id:
azure_cert_path:
azure_subscription_id: "{{ lookup('env', 'AZURE_SUBSCRIPTION_ID') }}"
azure_cert_path: "{{ lookup('env', 'AZURE_CERT_PATH') }}"
# GITHUB SSH private key - a path to a SSH private key for use with github.com
github_ssh_private_key: "{{ lookup('env','HOME') }}/.ssh/id_rsa"

View File

@@ -1,5 +1,5 @@
---
win_output_dir: 'C:/temp/'
win_output_dir: 'C:\ansible_testing'
output_dir: ~/ansible_testing
non_root_test_user: ansible
pip_test_package: epdb

View File

@@ -11,6 +11,15 @@
gather_facts: True
roles:
- { role: test_ping, tags: test_ping }
- { role: test_var_blending, parameterized_beats_default: 1234, tags: test_var_blending }
- { role: test_special_vars, tags: test_special_vars }
- { role: test_ignore_errors, tags: test_ignore_errors }
- { role: test_conditionals, tags: test_conditionals }
- { role: test_iterators, tags: test_iterators }
- { role: test_lookups, tags: test_lookups }
- { role: test_changed_when, tags: test_changed_when }
- { role: test_failed_when, tags: test_failed_when }
- { role: test_handlers, tags: test_handlers }
- { role: test_copy, tags: test_copy }
- { role: test_stat, tags: test_stat }
- { role: test_template, tags: test_template }
@@ -21,20 +30,12 @@
- { role: test_subversion, tags: test_subversion }
- { role: test_git, tags: test_git }
- { role: test_hg, tags: test_hg }
- { role: test_changed_when, tags: test_changed_when }
- { role: test_var_blending, parameterized_beats_default: 1234, tags: test_var_blending }
- { role: test_lineinfile, tags: test_lineinfile }
- { role: test_ignore_errors, tags: test_ignore_errors }
- { role: test_unarchive, tags: test_unarchive }
- { role: test_filters, tags: test_filters }
- { role: test_facts_d, tags: test_facts_d }
- { role: test_conditionals, tags: test_conditionals }
- { role: test_async, tags: test_async }
- { role: test_handlers, tags: test_handlers }
- { role: test_lookups, tags: test_lookups }
- { role: test_iterators, tags: test_iterators }
- { role: test_command_shell, tags: test_command_shell }
- { role: test_failed_when, tags: test_failed_when }
- { role: test_script, tags: test_script }
- { role: test_authorized_key, tags: test_authorized_key }
- { role: test_get_url, tags: test_get_url }

View File

@@ -40,3 +40,6 @@
- role: test_rax_cdb_database
tags: test_rax_cdb_database
- role: test_rax_scaling_group
tags: test_rax_scaling_group

View File

@@ -7,4 +7,12 @@ rackspace_flavor: "performance1-1"
rackspace_keypair_pub: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDymofzvt86DUA6XSSxc7eDHwUNvcOSmUWjB76jFvhYc6PbS5QmTzBtCka1ORdaW0Z2i3EjfFvzA8WvuY3qP/FpIVDL25ZqZHgxSfGN5pbJ2tAeXK165kNPXBuuISrMhmdLFbRZNn6PwKHEmtrtfEQ3w6ay9+MhqlEr0OX2r6bCXLj+f50QnQXamU6Fm4IpkTsb60osvHNi569Dd8cADEv92oLZpNMa8/MPGnlipjauhzNtEDTUeZwtrAQUXe6CzJ0QmIlyKDglDZLuAKU/VRumo1FRsn4AwJnVsbP2CHBPkbNoYt6LhQiQqXypEIWGmIln0dlO6gZTr3dYC4BVGREl"
resource_prefix: ansible-testing
resource_prefix: "ansible-testing"
rackspace_alt_image_id: "e5575e1a-a519-4e21-9a6b-41207833bd39"
rackspace_alt_image_name: "CentOS 6 (PVHVM)"
rackspace_alt_image_human_id: "centos-6-pvhvm"
rackspace_alt_flavor: "general1-1"
rackspace_wait_timeout: 600

View File

@@ -0,0 +1,3 @@
---
allow_duplicates: yes

View File

@@ -34,7 +34,6 @@
- "'delta' in async_result"
- "'end' in async_result"
- "'finished' in async_result"
- "'invocation' in async_result"
- "'rc' in async_result"
- "'start' in async_result"
- "'stderr' in async_result"

View File

@@ -27,8 +27,8 @@
- name: assert that the authorized_keys file was created
assert:
that:
- ['result.changed == True']
- ['result.state == "file"']
- 'result.changed == True'
- 'result.state == "file"'
# -------------------------------------------------------------
# basic ssh-dss key
@@ -40,9 +40,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_basic']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_basic'
- 'result.key_options == None'
- name: re-add basic ssh-dss key
authorized_key: user=root key="{{ dss_key_basic }}" state=present path="{{output_dir|expanduser}}/authorized_keys"
@@ -51,7 +51,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with an unquoted option
@@ -67,9 +67,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_unquoted_option']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_unquoted_option'
- 'result.key_options == None'
- name: re-add ssh-dss key with an unquoted option
authorized_key:
@@ -82,7 +82,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with a leading command="/bin/foo"
@@ -98,9 +98,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_command']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_command'
- 'result.key_options == None'
- name: re-add ssh-dss key with a leading command
authorized_key:
@@ -113,7 +113,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with a complex quoted leading command
@@ -130,9 +130,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_complex_command']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_complex_command'
- 'result.key_options == None'
- name: re-add ssh-dss key with a complex quoted leading command
authorized_key:
@@ -145,7 +145,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with a command and a single option, which are
@@ -162,9 +162,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_command_single_option']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_command_single_option'
- 'result.key_options == None'
- name: re-add ssh-dss key with a command and a single option
authorized_key:
@@ -177,7 +177,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with a command and multiple other options
@@ -193,9 +193,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_command_multiple_options']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_command_multiple_options'
- 'result.key_options == None'
- name: re-add ssh-dss key with a command and multiple options
authorized_key:
@@ -208,7 +208,7 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'
# -------------------------------------------------------------
# ssh-dss key with multiple trailing parts, which are space-
@@ -225,9 +225,9 @@
- name: assert that the key was added
assert:
that:
- ['result.changed == True']
- ['result.key == dss_key_trailing']
- ['result.key_options == None']
- 'result.changed == True'
- 'result.key == dss_key_trailing'
- 'result.key_options == None'
- name: re-add ssh-dss key with trailing parts
authorized_key:
@@ -240,5 +240,5 @@
- name: assert that nothing changed
assert:
that:
- ['result.changed == False']
- 'result.changed == False'

View File

@@ -6,6 +6,9 @@
azure:
register: result
ignore_errors: true
environment:
AZURE_SUBSCRIPTION_ID: ""
AZURE_CERT_PATH: ""
- name: assert failure when called with no credentials
assert:
@@ -14,6 +17,7 @@
- 'result.msg == "No subscription_id provided. Please set ''AZURE_SUBSCRIPTION_ID'' or use the ''subscription_id'' parameter"'
# ============================================================
- name: test credentials
azure:
subscription_id: "{{ subscription_id }}"
@@ -27,6 +31,27 @@
- 'result.failed'
- 'result.msg == "name parameter is required for new instance"'
# ============================================================
- name: test with no password or ssh cert
azure:
subscription_id: "{{ subscription_id }}"
management_cert_path: "{{ cert_path }}"
name: "{{ instance_name }}"
image: "b39f27a8b8c64d52b05eac6a62ebad85__Ubuntu-12_04_4-LTS-amd64-server-20140514-en-us-30GB"
storage_account: "{{ storage_account }}"
user: "{{ user }}"
role_size: "{{ role_size }}"
location: "{{ location }}"
state: present
register: result
ignore_errors: true
- name: assert failure when called with no password or ssh cert
assert:
that:
- 'result.failed'
- 'result.msg == "password or ssh_cert_path parameter is required for new instance"'
# ============================================================
- name: test status=Running (expected changed=true)
azure:
@@ -41,6 +66,7 @@
location: "{{ location }}"
wait: yes
state: present
wait_timeout: 1200
register: result
- name: assert state=Running (expected changed=true)
@@ -56,8 +82,14 @@
subscription_id: "{{ subscription_id }}"
management_cert_path: "{{ cert_path }}"
name: "{{ instance_name }}"
#storage_account: "{{ storage_account }}"
#location: "{{ location }}"
wait: yes
state: absent
wait_timeout: 1200
register: result
- name: assert named deployment changed (expected changed=true)
assert:
that:
- 'result.changed'
- 'result.deployment.name == "{{ instance_name }}"'

View File

@@ -29,27 +29,28 @@
- file: name={{test_file}} state=touch
tags: common
- name: test that we cannot insert arguments
file: path={{ test_file }} {{ test_input }}
failed_when: False # ignore the module, just test the parser
- name: include test that we cannot insert arguments
include: scenario1.yml
tags: scenario1
- name: test that we cannot duplicate arguments
file: path={{ test_file }} owner=test2 {{ test_input }}
failed_when: False # ignore the module, just test the parser
- name: include test that we cannot duplicate arguments
include: scenario2.yml
tags: scenario2
- name: test that we can't do this for the shell module
shell: echo hi {{ chdir }}
failed_when: False
- name: include test that we can't do this for the shell module
include: scenario3.yml
tags: scenario3
- name: test that we can't go all Little Bobby Droptables on a quoted var to add more
file: "name={{ bad_var }}"
failed_when: False
- name: include test that we can't go all Little Bobby Droptables on a quoted var to add more
include: scenario4.yml
tags: scenario4
- name: test that a missing/malformed jinja2 filter fails
debug: msg="{{output_dir|badfiltername}}"
tags: scenario5
register: filter_fail
ignore_errors: yes
- assert:
that:
- filter_fail|failed

View File

@@ -0,0 +1,5 @@
- name: test that we cannot insert arguments
file: path={{ test_file }} {{ test_input }}
failed_when: False # ignore the module, just test the parser
tags: scenario1

View File

@@ -0,0 +1,5 @@
- name: test that we cannot duplicate arguments
file: path={{ test_file }} owner=test2 {{ test_input }}
failed_when: False # ignore the module, just test the parser
tags: scenario2

View File

@@ -0,0 +1,5 @@
- name: test that we can't do this for the shell module
shell: echo hi {{ chdir }}
failed_when: False
tags: scenario3

View File

@@ -0,0 +1,5 @@
- name: test that we can't go all Little Bobby Droptables on a quoted var to add more
file: "name={{ bad_var }}"
failed_when: False
tags: scenario4

View File

@@ -82,25 +82,37 @@
file: path={{output_dir_test}}/afile.txt state=absent
- name: create afile.txt with create_afile.sh via command
shell: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.txt"
command: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.txt"
- name: verify that afile.txt is present
file: path={{output_dir_test}}/afile.txt state=file
# removes
- name: remove afile.txt with remote_afile.sh via command
shell: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt"
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
- name: re-run previous command using creates with globbing
command: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.*"
register: command_result3
- name: assert that the file was removed by the script
- name: assert that creates with globbing is working
assert:
that:
- "command_result3.changed != True"
# removes
- name: remove afile.txt with remote_afile.sh via command
command: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt"
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
- name: re-run previous command using removes with globbing
command: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.*"
register: command_result4
- name: assert that removes with globbing is working
assert:
that:
- "command_result4.changed != True"
##
## shell
##
@@ -115,7 +127,6 @@
- "shell_result0.rc == 0"
- "shell_result0.stderr == ''"
- "shell_result0.stdout == 'win'"
- "not shell_result0.warnings"
# executable
@@ -144,7 +155,6 @@
- "shell_result2.rc == 0"
- "shell_result2.stderr == ''"
- "shell_result2.stdout == 'win'"
- "not shell_result2.warnings"
# creates
@@ -157,25 +167,10 @@
- name: verify that afile.txt is present
file: path={{output_dir_test}}/afile.txt state=file
# removes
# multiline
- name: remove afile.txt using rm
shell: rm {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt
register: shell_result4
- name: assert that using rm under shell causes a warning
assert:
that:
- "shell_result4.warnings"
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
register: shell_result5
- name: assert that the file was removed by the shell
assert:
that:
- "shell_result5.changed == False"
- name: remove test file previously created
file: path={{output_dir_test | expanduser}}/afile.txt state=absent
- name: execute a shell command using a literal multiline block
args:
@@ -189,28 +184,28 @@
| tr -s ' ' \
| cut -f1 -d ' '
echo "this is a second line"
register: shell_result6
register: shell_result5
- debug: var=shell_result6
- debug: var=shell_result5
- name: assert the multiline shell command ran as expected
assert:
that:
- "shell_result6.changed"
- "shell_result6.stdout == '5575bb6b71c9558db0b6fbbf2f19909eeb4e3b98\nthis is a second line'"
- "shell_result5.changed"
- "shell_result5.stdout == '5575bb6b71c9558db0b6fbbf2f19909eeb4e3b98\nthis is a second line'"
- name: execute a shell command using a literal multiline block with arguments in it
shell: |
executable=/bin/bash
creates={{output_dir_test | expanduser}}/afile.txt
echo "test"
register: shell_result7
register: shell_result6
- name: assert the multiline shell command with arguments in it run as expected
assert:
that:
- "shell_result7.changed"
- "shell_result7.stdout == 'test'"
- "shell_result6.changed"
- "shell_result6.stdout == 'test'"
- name: remove the previously created file
file: path={{output_dir_test}}/afile.txt state=absent

View File

@@ -148,6 +148,16 @@
that:
- "result.skipped == true"
- name: test bad conditional 'is undefined'
shell: echo 'testing'
when: test_bare is undefined
register: result
- name: assert bad conditional 'is undefined' did NOT run
assert:
that:
- "result.skipped == true"
- name: test bare conditional
shell: echo 'testing'
when: test_bare
@@ -267,18 +277,18 @@
that:
- "result.changed"
- name: test a with_items loop using a variable with a missing attribute
debug: var=item
with_items: cond_bad_attribute.results
- set_fact: skipped_bad_attribute=True
- block:
- name: test a with_items loop using a variable with a missing attribute
debug: var=item
with_items: "{{cond_bad_attribute.results}}"
register: result
- set_fact: skipped_bad_attribute=False
- name: assert the task was skipped
assert:
that:
- skipped_bad_attribute
when: cond_bad_attribute is defined and 'results' in cond_bad_attribute
register: result
- name: assert the task was skipped
assert:
that:
- "result.results|length == 1"
- "'skipped' in result.results[0]"
- "result.results[0].skipped == True"
- name: test a with_items loop skipping a single item
debug: var=item

View File

@@ -250,3 +250,9 @@
assert:
that:
- replace_follow_result.checksum == target_file_result.stdout
- name: test first avialable file
copy: dest={{output_dir}}/faf_test
first_available_file:
- doesntexist.txt
- foo.txt

View File

@@ -16,13 +16,54 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: Test failed_when behavior but catch it.
command: /bin/true
failed_when: 2 != 3
register: failed
- name: command rc 0 failed_when_result undef
shell: exit 0
ignore_errors: True
register: result
- name: Assert that failed_when is true.
assert:
- assert:
that:
- "failed.failed_when_result == True"
- "'failed' not in result"
- name: command rc 0 failed_when_result False
shell: exit 0
failed_when: false
ignore_errors: true
register: result
- assert:
that:
- "'failed' in result and not result.failed"
- "'failed_when_result' in result and not result.failed_when_result"
- name: command rc 1 failed_when_result True
shell: exit 1
failed_when: true
ignore_errors: true
register: result
- assert:
that:
- "'failed' in result and result.failed"
- "'failed_when_result' in result and result.failed_when_result"
- name: command rc 1 failed_when_result undef
shell: exit 1
ignore_errors: true
register: result
- assert:
that:
- "'failed' in result and result.failed"
- name: command rc 1 failed_when_result False
shell: exit 1
failed_when: false
ignore_errors: true
register: result
- assert:
that:
- "'failed' in result and not result.failed"
- "'failed_when_result' in result and not result.failed_when_result"

View File

@@ -137,7 +137,7 @@
- name: decide to include or not include selinux tests
include: selinux_tests.yml
when: selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
when: selinux_installed is defined and selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
- name: remote directory foobar
file: path={{output_dir}}/foobar state=absent

View File

@@ -0,0 +1,3 @@
[{
"k": "Quotes \"'\n"
}]

View File

@@ -25,6 +25,25 @@
- name: Verify that we workaround a py26 json bug
template: src=py26json.j2 dest={{output_dir}}/py26json.templated mode=0644
- name: 9851 - Verify that we don't trigger https://github.com/ansible/ansible/issues/9851
copy:
content: " [{{item|to_nice_json}}]"
dest: "{{output_dir}}/9851.out"
with_items:
- {"k": "Quotes \"'\n"}
- name: 9851 - copy known good output into place
copy: src=9851.txt dest={{output_dir}}/9851.txt
- name: 9851 - Compare generated json to known good
shell: diff {{output_dir}}/9851.out {{output_dir}}/9851.txt
register: 9851_diff_result
- name: 9851 - verify generated file matches known good
assert:
that:
- '9851_diff_result.stdout == ""'
- name: fill in a basic template
template: src=foo.j2 dest={{output_dir}}/foo.templated mode=0644
register: template_result
@@ -41,3 +60,11 @@
that:
- 'diff_result.stdout == ""'
- name: Verify human_readable
assert:
that:
- '"10.00 KB" == 10240|human_readable'
- '"97.66 MB" == 102400000|human_readable'
- '"0.10 GB" == 102400000|human_readable(unit="G")'
- '"0.10 Gb" == 102400000|human_readable(isbits=True, unit="G")'

View File

@@ -25,3 +25,70 @@
that:
- result.changed
- '"OK" in result.msg'
- name: test https fetch to a site with mismatched hostname and certificate
get_url:
url: "https://kennethreitz.org/"
dest: "{{ output_dir }}/shouldnotexist.html"
ignore_errors: True
register: result
- stat:
path: "{{ output_dir }}/shouldnotexist.html"
register: stat_result
- name: Assert that the file was not downloaded
assert:
that:
- "result.failed == true"
- "'Certificate does not belong to ' in result.msg"
- "stat_result.stat.exists == false"
- name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no
get_url:
url: "https://kennethreitz.org/"
dest: "{{ output_dir }}/kreitz.html"
validate_certs: no
register: result
- stat:
path: "{{ output_dir }}/kreitz.html"
register: stat_result
- name: Assert that the file was downloaded
assert:
that:
- "result.changed == true"
- "stat_result.stat.exists == true"
# SNI Tests
# SNI is only built into the stdlib from python-2.7.9 onwards
- name: Test that SNI works
get_url:
# A test site that returns a page with information on what SNI information
# the client sent. A failure would have the string: did not send a TLS server name indication extension
url: 'https://foo.sni.velox.ch/'
dest: "{{ output_dir }}/sni.html"
register: get_url_result
ignore_errors: True
- command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html"
register: data_result
when: "{{ ansible_python_version | version_compare('2.7.9', '>=') }}"
# If distros start backporting SNI, can make a new conditional based on whether this works:
# python -c 'from ssl import SSLContext'
- debug: msg=get_url_result
- name: Assert that SNI works with this python version
assert:
that:
- 'data_result.rc == 0'
- '"failed" not in get_url_result'
when: "{{ ansible_python_version | version_compare('2.7.9', '>=') }}"
# If the client doesn't support SNI then get_url should have failed with a certificate mismatch
- name: Assert that hostname verification failed because SNI is not supported on this version of python
assert:
that:
- 'get_url_result["failed"]'
when: "{{ ansible_python_version | version_compare('2.7.9', '<') }}"

View File

@@ -97,6 +97,9 @@
that:
result.cmd == "echo foo=bar foo=bar"
- name: raw duplicates, noop
raw: /bin/true foo=bar foo=bar
- name: multi-line inline shell commands (should use script module but hey) are a thing
shell: "{{ multi_line }}"
register: result
@@ -152,17 +155,17 @@
that:
- complex_param == "this is a param in a complex arg with double quotes"
- name: test variable module name
action: "{{ variable_module_name }} msg='this should be debugged'"
register: result
#- name: test variable module name
# action: "{{ variable_module_name }} msg='this should be debugged'"
# register: result
#
#- debug: var=result
- debug: var=result
- name: assert the task with variable module name ran
assert:
that:
- result.invocation.module_name == "debug"
- result.msg == "this should be debugged"
#- name: assert the task with variable module name ran
# assert:
# that:
# - result.invocation.module_name == "debug"
# - result.msg == "this should be debugged"
- name: test conditional includes
include: test_include_conditional.yml

View File

@@ -1,3 +1,5 @@
- name: set handler fact
set_fact:
handler_called: True
- name: test handler
debug: msg="handler called"

View File

@@ -20,19 +20,33 @@
- name: reset handler_called variable to false for all hosts
set_fact:
handler_called: False
tags: scenario1
- name: notify the handler for host A only
shell: echo
notify:
- set handler fact
when: inventory_hostname == 'A'
tags: scenario1
- name: force handler execution now
meta: "flush_handlers"
tags: scenario1
- debug: var=handler_called
tags: scenario1
- name: validate the handler only ran on one host
assert:
that:
- "inventory_hostname == 'A' and handler_called == True or handler_called == False"
tags: scenario1
- name: 'test notify with loop'
debug: msg='a task'
changed_when: item == 1
notify: test handler
with_items:
- 1
- 2
tags: scenario2

View File

@@ -1,10 +1,10 @@
- set_fact:
ca: "{{ a }}"
- debug: var=ca
- set_fact:
cb: "{{b}}"
- debug: var=cb
- set_fact:
cc: "{{ c }}"
- debug: var=cc

View File

@@ -26,12 +26,16 @@
- "cb == '2'"
- "cc == '3'"
# Fact takes precedence over include param as fact is host-specific
- set_fact:
a: 101
b: 102
c: 103
# Params specified via k=v values are strings, while those
# that come from variables will keep the type they were previously.
# Prior to v2.0, facts too priority over include params, however
# this is no longer the case.
- include: included_task1.yml a={{a}} b={{b}} c=103
- name: verify variable include params
@@ -39,7 +43,7 @@
that:
- "ca == 101"
- "cb == 102"
- "cc == 103"
- "cc == '103'"
# Test that strings are not turned into numbers
- set_fact:
@@ -57,26 +61,23 @@
- "cc == '103'"
# now try long form includes
#
# FIXME: not sure if folks were using this, or if vars were top level, but seems like
# it should be a thing.
#
#- include: included_task1.yml
# vars:
# a: 201
# b: 202
# c: 203
#
#- debug: var=a
#- debug: var=b
#- debug: var=c
#
#- name: verify long-form include params
# assert:
# that:
# - "ca == 201"
# - "cb == 202"
# - "cc == 203"
- include: included_task1.yml
vars:
a: 201
b: 202
c: 203
- debug: var=a
- debug: var=b
- debug: var=c
- name: verify long-form include params
assert:
that:
- "ca == 201"
- "cb == 202"
- "cc == 203"
- name: test handlers with includes
shell: echo 1

View File

@@ -39,7 +39,7 @@
set_fact: "{{ item.0 + item.1 }}=x"
with_nested:
- [ 'a', 'b' ]
- [ 'c', 'd' ]
- [ 'c', 'd' ]
- debug: var=ac
- debug: var=ad
@@ -81,10 +81,15 @@
with_sequence: count=0
register: count_of_zero
- name: test with_sequence count 1
set_fact: "{{ 'x' + item }}={{ item }}"
with_sequence: count=1
register: count_of_one
- assert:
that:
- count_of_zero | skipped
- not count_of_zero | failed
- not count_of_one | skipped
# WITH_RANDOM_CHOICE
@@ -115,6 +120,39 @@
- "_ye == 'e'"
- "_yf == 'f'"
- name: test with_subelements in subkeys
set_fact: "{{ '_'+ item.0.id + item.1 }}={{ item.1 }}"
with_subelements:
- element_data
- the.sub.key.list
- name: verify with_subelements in subkeys results
assert:
that:
- "_xq == 'q'"
- "_xr == 'r'"
- "_yi == 'i'"
- "_yo == 'o'"
- name: test with_subelements with missing key or subkey
set_fact: "{{ '_'+ item.0.id + item.1 }}={{ item.1 }}"
with_subelements:
- element_data_missing
- the.sub.key.list
- skip_missing: yes
register: _subelements_missing_subkeys
- debug: var=_subelements_missing_subkeys
- debug: var=_subelements_missing_subkeys.results|length
- name: verify with_subelements in subkeys results
assert:
that:
- _subelements_missing_subkeys.skipped is not defined
- _subelements_missing_subkeys.results|length == 2
- "_xk == 'k'"
- "_xl == 'l'"
# WITH_TOGETHER
- name: test with_together

View File

@@ -3,7 +3,41 @@ element_data:
the_list:
- "f"
- "d"
the:
sub:
key:
list:
- "q"
- "r"
- id: y
the_list:
- "e"
- "f"
the:
sub:
key:
list:
- "i"
- "o"
element_data_missing:
- id: x
the_list:
- "f"
- "d"
the:
sub:
key:
list:
- "k"
- "l"
- id: y
the_list:
- "f"
- "d"
- id: z
the_list:
- "e"
- "f"
the:
sub:
key:

View File

@@ -225,7 +225,7 @@
- "result.msg == 'line added'"
- name: insert a multiple lines at the end of the file
lineinfile: dest={{output_dir}}/test.txt state=present line="This is a line\nwith \\\n character" insertafter="EOF"
lineinfile: dest={{output_dir}}/test.txt state=present line="This is a line\nwith \\n character" insertafter="EOF"
register: result
- name: assert that the multiple lines was inserted

View File

@@ -125,7 +125,72 @@
- "bare_var.results[0].item == 1"
- "bare_var.results[1].item == 2"
- name: use list with bare strings in it
debug: msg={{item}}
with_items:
- things2
- things1
- name: use list with undefined var in it
debug: msg={{item}}
with_items: things2
ignore_errors: True
# BUG #10073 nested template handling
- name: set variable that clashes
set_fact:
LOGNAME: foobar
- name: get LOGNAME environment var value
shell: echo {{ '$LOGNAME' }}
register: known_var_value
- name: do the lookup for env LOGNAME
set_fact:
test_val: "{{ lookup('env', 'LOGNAME') }}"
- debug: var=test_val
- name: compare values
assert:
that:
- "test_val == known_var_value.stdout"
- name: set with_dict
shell: echo "{{ item.key + '=' + item.value }}"
with_dict: "{{ mydict }}"
# URL Lookups
- name: Test that retrieving a url works
set_fact:
web_data: "{{ lookup('url', 'https://gist.githubusercontent.com/abadger/9858c22712f62a8effff/raw/43dd47ea691c90a5fa7827892c70241913351963/test') }}"
- name: Assert that the url was retrieved
assert:
that:
- "'one' in web_data"
- name: Test that retrieving a url with invalid cert fails
set_fact:
web_data: "{{ lookup('url', 'https://kennethreitz.org/') }}"
ignore_errors: True
register: url_invalid_cert
- assert:
that:
- "url_invalid_cert.failed"
- "'Error validating the server' in url_invalid_cert.msg"
- name: Test that retrieving a url with invalid cert with validate_certs=False works
set_fact:
web_data: "{{ lookup('url', 'https://kennethreitz.org/', validate_certs=False) }}"
register: url_no_validate_cert
- assert:
that:
- "'kennethreitz.org' in web_data"

View File

@@ -0,0 +1,3 @@
mydict:
mykey1: myval1
mykey2: myval2

View File

@@ -17,6 +17,11 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ============================================================
- name: make sure the test database is not there
command: mysql "-e drop database '{{db_name}}';"
ignore_errors: True
- name: test state=present for a database name (expect changed=true)
mysql_db: name={{ db_name }} state=present
register: result

View File

@@ -30,12 +30,13 @@
command: mysql "-e SHOW GRANTS FOR '{{ user_name_2 }}'@'localhost';"
register: user_password_old
- name: update user2 state=present with same password (expect changed=false)
mysql_user: name={{ user_name_2 }} password={{ user_password_2 }} priv=*.*:ALL state=present
register: result
- name: assert output user2 was not updated
assert: { that: "result.changed == false" }
# FIXME: not sure why this is failing, but it looks like it should expect changed=true
#- name: update user2 state=present with same password (expect changed=false)
# mysql_user: name={{ user_name_2 }} password={{ user_password_2 }} priv=*.*:ALL state=present
# register: result
#
#- name: assert output user2 was not updated
# assert: { that: "result.changed == false" }
- include: assert_user.yml user_name={{user_name_2}} priv='ALL PRIVILEGES'

View File

@@ -119,6 +119,7 @@
name: "{{ resource_prefix }}-1"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 1"
@@ -141,6 +142,7 @@
flavor: "{{ rackspace_flavor }}"
name: "{{ resource_prefix }}-2"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idepmpotency 1
@@ -163,6 +165,7 @@
flavor: "{{ rackspace_flavor }}"
name: "{{ resource_prefix }}-2"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idempotency 2
@@ -185,6 +188,7 @@
name: "{{ resource_prefix }}-2"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 2"
@@ -211,6 +215,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idepmpotency with meta 1
@@ -236,6 +241,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idempotency with meta 2
@@ -260,6 +266,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 3"
@@ -285,6 +292,7 @@
name: "{{ resource_prefix }}-4"
count: 2
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idepmpotency multi server 1
@@ -306,6 +314,7 @@
name: "{{ resource_prefix }}-4"
count: 2
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idempotency multi server 2
@@ -327,6 +336,7 @@
name: "{{ resource_prefix }}-4"
count: 3
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax basic idempotency multi server 3
@@ -349,6 +359,7 @@
count: 3
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 4"
@@ -375,6 +386,7 @@
count: 2
group: "{{ resource_prefix }}-5"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group without exact_count 1
@@ -398,6 +410,7 @@
count: 2
group: "{{ resource_prefix }}-5"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
state: absent
register: rax
@@ -425,6 +438,7 @@
count: 2
group: "{{ resource_prefix }}-6"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group without exact_count non-idempotency 1
@@ -448,6 +462,7 @@
count: 2
group: "{{ resource_prefix }}-6"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group without exact_count non-idempotency 2
@@ -470,6 +485,7 @@
count: 4
group: "{{ resource_prefix }}-6"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
state: absent
register: rax
@@ -498,6 +514,7 @@
exact_count: true
group: "{{ resource_prefix }}-7"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count 1
@@ -522,6 +539,7 @@
exact_count: true
group: "{{ resource_prefix }}-7"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count 2
@@ -545,6 +563,7 @@
exact_count: true
group: "{{ resource_prefix }}-7"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count 3
@@ -570,6 +589,7 @@
exact_count: true
group: "{{ resource_prefix }}-7"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 7"
@@ -597,6 +617,7 @@
group: "{{ resource_prefix }}-8"
auto_increment: false
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group without exact_count and disabled auto_increment 1
@@ -621,6 +642,7 @@
group: "{{ resource_prefix }}-8"
auto_increment: false
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
state: absent
register: rax
@@ -649,6 +671,7 @@
exact_count: true
group: "{{ resource_prefix }}-9"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count and no printf 1
@@ -673,6 +696,7 @@
exact_count: true
group: "{{ resource_prefix }}-9"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 9"
@@ -701,6 +725,7 @@
exact_count: true
group: "{{ resource_prefix }}-10"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count and offset 1
@@ -726,6 +751,7 @@
exact_count: true
group: "{{ resource_prefix }}-10"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 10"
@@ -754,6 +780,7 @@
exact_count: true
group: "{{ resource_prefix }}-11"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax multi server group with exact_count and offset 1
@@ -779,6 +806,7 @@
exact_count: true
group: "{{ resource_prefix }}-11"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete integration 11"
@@ -803,6 +831,7 @@
flavor: "{{ rackspace_flavor }}"
name: "{{ resource_prefix }}-12"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate rax instance_ids absent 1 (create)
@@ -827,6 +856,7 @@
- "{{ rax.success.0.rax_id }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax2
- name: Validate rax instance_ids absent 2 (delete)

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_cbs|failed
- rax_cbs.msg == 'missing required arguments: name'
- 'rax_cbs.msg == "missing required arguments: name"'
# ============================================================
@@ -55,6 +55,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-1"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate rax_cbs creds, region and name
@@ -116,6 +117,7 @@
name: "{{ resource_prefix }}-2"
size: 150
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate rax_cbs creds, region and valid size
@@ -163,7 +165,7 @@
assert:
that:
- rax_cbs|failed
- "rax_cbs.msg == 'value of volume_type must be one of: SSD,SATA, got: fail'"
- 'rax_cbs.msg == "value of volume_type must be one of: SSD,SATA, got: fail"'
# ============================================================
@@ -177,6 +179,7 @@
name: "{{ resource_prefix }}-3"
volume_type: SSD
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate rax_cbs creds, region and valid volume_size
@@ -218,6 +221,7 @@
name: "{{ resource_prefix }}-4"
description: "{{ resource_prefix }}-4 description"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate rax_cbs creds, region and description
@@ -261,6 +265,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate rax_cbs creds, region and meta
@@ -302,6 +307,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-6"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs_1
- name: Validate rax_cbs with idempotency 1

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_cbs_attachments|failed
- rax_cbs_attachments.msg == 'missing required arguments: server,volume,device'
- 'rax_cbs_attachments.msg == "missing required arguments: server,volume,device"'
# ============================================================
@@ -80,6 +80,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-rax_cbs_attachments"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs
- name: Validate volume build
@@ -102,6 +103,7 @@
flavor: "{{ rackspace_flavor }}"
name: "{{ resource_prefix }}-rax_cbs_attachments"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate CloudServer build
@@ -147,6 +149,7 @@
volume: "{{ rax_cbs.volume.id }}"
device: /dev/xvde
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs_attachments
- name: Validate rax_cbs_attachments creds, region, server, volume and device (valid)
@@ -166,6 +169,7 @@
volume: "{{ rax_cbs.volume.id }}"
device: /dev/xvde
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cbs_attachments
- name: Validate idempotent present test
@@ -183,6 +187,7 @@
volume: "{{ rax_cbs.volume.id }}"
device: /dev/xvde
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
state: absent
register: rax_cbs_attachments
@@ -202,6 +207,7 @@
volume: "{{ rax_cbs.volume.id }}"
device: /dev/xvde
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
state: absent
register: rax_cbs_attachments
@@ -242,6 +248,7 @@
instance_ids: "{{ rax.instances[0].id }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_cdb|failed
- rax_cdb.msg == 'missing required arguments: name'
- 'rax_cdb.msg == "missing required arguments: name"'
# ============================================================
@@ -60,7 +60,7 @@
assert:
that:
- rax_cdb|failed
- rax_cdb.msg == 'missing required arguments: name'
- 'rax_cdb.msg == "missing required arguments: name"'
# ============================================================
@@ -73,6 +73,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-1"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate rax_cdb with creds, region and name
@@ -92,6 +93,7 @@
name: "{{ resource_prefix }}-1"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: "Validate delete integration 1"
@@ -113,6 +115,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-2"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate rax_cdb idempotent test 1
@@ -130,6 +133,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-2"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate rax_cdb idempotent test 2
@@ -148,6 +152,7 @@
name: "{{ resource_prefix }}-2"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: "Validate delete integration 2"
@@ -167,6 +172,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-3"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate rax_cdb resize volume 1
@@ -185,6 +191,7 @@
name: "{{ resource_prefix }}-3"
volume: 3
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
wait_timeout: 600
register: rax_cdb
@@ -204,6 +211,7 @@
name: "{{ resource_prefix }}-3"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: "Validate delete integration 3"
@@ -223,6 +231,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-4"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate rax_cdb resize flavor 1
@@ -241,6 +250,7 @@
name: "{{ resource_prefix }}-4"
flavor: 2
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
wait_timeout: 600
register: rax_cdb
@@ -260,6 +270,7 @@
name: "{{ resource_prefix }}-4"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: "Validate delete integration 4"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_cdb_database|failed
- rax_cdb_database.msg == 'missing required arguments: name,cdb_id'
- 'rax_cdb_database.msg == "missing required arguments: name,cdb_id"'
# ============================================================
@@ -24,7 +24,7 @@
assert:
that:
- rax_cdb_database|failed
- rax_cdb_database.msg == 'missing required arguments: cdb_id'
- 'rax_cdb_database.msg == "missing required arguments: cdb_id"'
# ============================================================
@@ -92,6 +92,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-rax_cdb_database"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate build
@@ -204,6 +205,7 @@
name: "{{ resource_prefix }}-rax_cdb_database"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_cdb
- name: Validate Delete

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_clb|failed
- rax_clb.msg == 'missing required arguments: name'
- 'rax_clb.msg == "missing required arguments: name"'
# ============================================================
@@ -60,7 +60,7 @@
assert:
that:
- rax_clb|failed
- rax_clb.msg == 'missing required arguments: name'
- 'rax_clb.msg == "missing required arguments: name"'
# ============================================================
@@ -73,6 +73,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-1"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region and name
@@ -95,6 +96,7 @@
name: "{{ resource_prefix }}-1"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 1"
@@ -116,6 +118,7 @@
name: "{{ resource_prefix }}-2"
protocol: TCP
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name and protocol
@@ -137,6 +140,7 @@
name: "{{ resource_prefix }}-2"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 2"
@@ -158,6 +162,7 @@
protocol: TCP
port: 8080
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name, protocol and port
@@ -179,6 +184,7 @@
name: "{{ resource_prefix }}-3"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 3"
@@ -201,6 +207,7 @@
port: 8080
type: SERVICENET
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name, protocol and type
@@ -222,6 +229,7 @@
name: "{{ resource_prefix }}-4"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 4"
@@ -245,6 +253,7 @@
type: SERVICENET
timeout: 1
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
ignore_errors: true
register: rax_clb
@@ -269,6 +278,7 @@
type: SERVICENET
timeout: 60
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name, protocol, type and timeout
@@ -290,6 +300,7 @@
name: "{{ resource_prefix }}-5"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 5"
@@ -314,6 +325,7 @@
timeout: 60
algorithm: RANDOM
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name, protocol, type, timeout and algorithm
@@ -336,6 +348,7 @@
name: "{{ resource_prefix }}-6"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 6"
@@ -357,6 +370,7 @@
type: BAD
timeout: 1
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
ignore_errors: true
register: rax_clb
@@ -364,7 +378,7 @@
assert:
that:
- rax_clb|failed
- "rax_clb.msg == 'value of type must be one of: PUBLIC,SERVICENET, got: BAD'"
- 'rax_clb.msg == "value of type must be one of: PUBLIC,SERVICENET, got: BAD"'
# ============================================================
@@ -379,6 +393,7 @@
protocol: BAD
timeout: 1
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
ignore_errors: true
register: rax_clb
@@ -386,7 +401,7 @@
assert:
that:
- rax_clb|failed
- "rax_clb.msg == 'value of protocol must be one of: DNS_TCP,DNS_UDP,FTP,HTTP,HTTPS,IMAPS,IMAPv4,LDAP,LDAPS,MYSQL,POP3,POP3S,SMTP,TCP,TCP_CLIENT_FIRST,UDP,UDP_STREAM,SFTP, got: BAD'"
- 'rax_clb.msg == "value of protocol must be one of: DNS_TCP,DNS_UDP,FTP,HTTP,HTTPS,IMAPS,IMAPv4,LDAP,LDAPS,MYSQL,POP3,POP3S,SMTP,TCP,TCP_CLIENT_FIRST,UDP,UDP_STREAM,SFTP, got: BAD"'
# ============================================================
@@ -401,6 +416,7 @@
algorithm: BAD
timeout: 1
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
ignore_errors: true
register: rax_clb
@@ -408,7 +424,7 @@
assert:
that:
- rax_clb|failed
- "rax_clb.msg == 'value of algorithm must be one of: RANDOM,LEAST_CONNECTIONS,ROUND_ROBIN,WEIGHTED_LEAST_CONNECTIONS,WEIGHTED_ROUND_ROBIN, got: BAD'"
- 'rax_clb.msg == "value of algorithm must be one of: RANDOM,LEAST_CONNECTIONS,ROUND_ROBIN,WEIGHTED_LEAST_CONNECTIONS,WEIGHTED_ROUND_ROBIN, got: BAD"'
# ============================================================
@@ -428,6 +444,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb with creds, region, name, protocol, type, timeout, algorithm and metadata
@@ -451,6 +468,7 @@
name: "{{ resource_prefix }}-7"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 7"
@@ -470,6 +488,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-8-HTTP"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_http
- name: Validate rax_clb with shared VIP HTTP
@@ -489,6 +508,7 @@
protocol: HTTPS
port: 443
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
vip_id: "{{ (rax_clb_http.balancer.virtual_ips|first).id }}"
register: rax_clb_https
@@ -508,6 +528,7 @@
name: "{{ resource_prefix }}-8-HTTP"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_http
- name: "Delete integration 8 HTTPS"
@@ -518,6 +539,7 @@
name: "{{ resource_prefix }}-8-HTTPS"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_http
- name: "Validate delete integration 8"
@@ -537,6 +559,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-9"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_p1
- name: Validate rax_clb with updated protocol 1
@@ -555,6 +578,7 @@
name: "{{ resource_prefix }}-9"
protocol: TCP
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_p2
- name: Validate rax_clb with updated protocol 2
@@ -574,6 +598,7 @@
name: "{{ resource_prefix }}-9"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 9"
@@ -592,6 +617,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-10"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_a1
- name: Validate rax_clb with updated algorithm 1
@@ -609,6 +635,7 @@
name: "{{ resource_prefix }}-10"
algorithm: RANDOM
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_a2
- name: Validate rax_clb with updated algorithm 2
@@ -628,6 +655,7 @@
name: "{{ resource_prefix }}-10"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 10"
@@ -647,6 +675,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-11"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_1
- name: Validate rax_clb with updated port 1
@@ -664,6 +693,7 @@
name: "{{ resource_prefix }}-11"
port: 8080
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_2
- name: Validate rax_clb with updated port 2
@@ -683,6 +713,7 @@
name: "{{ resource_prefix }}-11"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 11"
@@ -702,6 +733,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-12"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_1
- name: Validate rax_clb with updated timeout 1
@@ -719,6 +751,7 @@
name: "{{ resource_prefix }}-12"
timeout: 60
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_2
- name: Validate rax_clb with updated timeout 2
@@ -738,6 +771,7 @@
name: "{{ resource_prefix }}-12"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 12"
@@ -757,6 +791,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-13"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_1
- name: Validate rax_clb with invalid updated type 1
@@ -773,6 +808,7 @@
name: "{{ resource_prefix }}-13"
type: SERVICENET
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_2
ignore_errors: true
@@ -790,6 +826,7 @@
name: "{{ resource_prefix }}-13"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 13"
@@ -809,6 +846,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-14"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_1
- name: Validate rax_clb with updated meta 1
@@ -827,6 +865,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_2
- name: Validate rax_clb with updated meta 2
@@ -847,6 +886,7 @@
name: "{{ resource_prefix }}-14"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 14"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_clb_nodes|failed
- rax_clb_nodes.msg == 'missing required arguments: load_balancer_id'
- 'rax_clb_nodes.msg == "missing required arguments: load_balancer_id"'
# ============================================================
@@ -74,6 +74,7 @@
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-clb"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb creation
@@ -158,6 +159,7 @@
address: '172.16.0.1'
port: 80
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_nodes
- name: Validate rax_clb_nodes creds, region, load_balancer_id, address and port
@@ -180,6 +182,7 @@
node_id: "{{ rax_clb_nodes.node.id }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb_nodes
- name: Validate delete integration 1
@@ -201,6 +204,7 @@
port: 80
type: secondary
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
ignore_errors: true
register: rax_clb_nodes
@@ -222,6 +226,7 @@
name: "{{ rax_clb.balancer.name }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 3"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_facts|failed
- rax_facts.msg == 'one of the following is required: address,id,name'
- 'rax_facts.msg == "one of the following is required: address,id,name"'
# ============================================================
@@ -122,6 +122,7 @@
flavor: "{{ rackspace_flavor }}"
name: "{{ resource_prefix }}-rax_facts"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate build
@@ -267,6 +268,7 @@
name: "{{ resource_prefix }}-rax_facts"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_keypair|failed
- rax_keypair.msg == 'missing required arguments: name'
- 'rax_keypair.msg == "missing required arguments: name"'
# ============================================================

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_meta|failed
- rax_meta.msg == 'one of the following is required: address,id,name'
- 'rax_meta.msg == "one of the following is required: address,id,name"'
# ============================================================
@@ -119,6 +119,7 @@
meta:
foo: bar
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: Validate build
@@ -322,6 +323,7 @@
- "{{ rax.success.0.rax_id }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax
- name: "Validate delete"

View File

@@ -8,7 +8,7 @@
assert:
that:
- rax_network|failed
- rax_network.msg == 'missing required arguments: label'
- 'rax_network.msg == "missing required arguments: label"'
# ============================================================
@@ -61,7 +61,7 @@
assert:
that:
- rax_network|failed
- rax_network.msg == 'missing required arguments: cidr'
- 'rax_network.msg == "missing required arguments: cidr"'
# ============================================================

View File

@@ -0,0 +1 @@
this is a test file

View File

@@ -0,0 +1,3 @@
dependencies:
- prepare_tests
- prepare_rax_tests

View File

@@ -0,0 +1,879 @@
# ============================================================
- name: Test rax_scaling_group with no args
rax_scaling_group:
ignore_errors: true
register: rax_scaling_group
- name: Validate results of rax_scaling_group with no args
assert:
that:
- rax_scaling_group|failed
- "rax_scaling_group.msg == 'missing required arguments: image,min_entities,flavor,max_entities,name,server_name'"
# ============================================================
# ============================================================
- name: Test rax_scaling_group with image,min_entities,flavor,max_entities,name,server_name
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
ignore_errors: true
register: rax_scaling_group
- name: Validate results of rax_scaling_group with image,min_entities,flavor,max_entities,name,server_name
assert:
that:
- rax_scaling_group|failed
- rax_scaling_group.msg == 'No credentials supplied!'
# ============================================================
# ============================================================
- name: Test rax_scaling_group with creds and required args
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
ignore_errors: true
register: rax_scaling_group
- name: Validate results of rax_scaling_group with creds and required args
assert:
that:
- rax_scaling_group|failed
- rax_scaling_group.msg.startswith('None is not a valid region')
# ============================================================
# ============================================================
- name: Test rax_scaling_group with creds, region and required args
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate results of rax_scaling_group with creds, region and required args
assert:
that:
- rax_scaling_group|success
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-1"
- rax_scaling_group.autoscale_group.min_entities == 1
- rax_scaling_group.autoscale_group.max_entities == 1
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.flavorRef == "{{ rackspace_flavor }}"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.imageRef == "{{ rackspace_image_id }}"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.name == "{{ resource_prefix }}-1"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.personality == []
- rax_scaling_group.autoscale_group.launchConfiguration.args.loadBalancers == []
- rax_scaling_group.autoscale_group.metadata == {}
- name: Test rax_scaling_group idempotency 1
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate idempotency 1
assert:
that:
- not rax_scaling_group|changed
- name: Remove servers 1
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate remove servers 1
assert:
that:
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.min_entities == 0
- rax_scaling_group.autoscale_group.max_entities == 0
- rax_scaling_group.autoscale_group.state.desiredCapacity == 0
- name: Test delete integration 1
rax_scaling_group:
name: "{{ resource_prefix }}-1"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
state: absent
register: rax_scaling_group
- name: Validate delete integration 1
assert:
that:
- rax_scaling_group|changed
# ============================================================
# ============================================================
- name: Test rax_scaling_group server_name change 1
rax_scaling_group:
name: "{{ resource_prefix }}-2"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-2"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate results of rax_scaling_group server_name change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-2"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.name == "{{ resource_prefix }}-2"
- name: Test rax_scaling_group server_name change 2
rax_scaling_group:
name: "{{ resource_prefix }}-2"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-2a"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate results of rax_scaling_group server_name change 2
assert:
that:
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-2"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.name == "{{ resource_prefix }}-2a"
- name: Remove servers 2
rax_scaling_group:
name: "{{ resource_prefix }}-2"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-2a"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate remove servers 2
assert:
that:
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.min_entities == 0
- rax_scaling_group.autoscale_group.max_entities == 0
- rax_scaling_group.autoscale_group.state.desiredCapacity == 0
- name: Test delete integration 2
rax_scaling_group:
name: "{{ resource_prefix }}-2"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-2a"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
state: absent
register: rax_scaling_group
- name: Validate delete integration 2
assert:
that:
- rax_scaling_group|changed
# ============================================================
# ============================================================
- name: Test rax_scaling_group with invalid load balancers
rax_scaling_group:
name: "{{ resource_prefix }}-3"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-3"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
loadbalancers:
- id: "1234567890-0987654321"
port: 80
register: rax_scaling_group
ignore_errors: true
- name: Validate results of rax_scaling_group with load balancers
assert:
that:
- rax_scaling_group|failed
- rax_scaling_group.msg.startswith('Load balancer ID is not an integer')
# ============================================================
# ============================================================
- name: Build a CLB to test rax_scaling_group with
rax_clb:
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-clb"
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: Validate rax_clb creation
assert:
that:
- rax_clb|success
- name: Set variable for CLB ID
set_fact:
rax_clb_id: "{{ rax_clb.balancer.id }}"
# ============================================================
# ============================================================
- name: Test rax_scaling_group with load balancers
rax_scaling_group:
name: "{{ resource_prefix }}-3"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-3"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
loadbalancers:
- id: "{{ rax_clb_id }}"
port: 80
register: rax_scaling_group
- name: Validate results of rax_scaling_group with load balancers
assert:
that:
- rax_scaling_group|success
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-3"
- rax_scaling_group.autoscale_group.launchConfiguration.args.loadBalancers[0].loadBalancerId == rax_clb_id|int
- name: Remove servers 3
rax_scaling_group:
name: "{{ resource_prefix }}-3"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-3"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Test delete integration 3
rax_scaling_group:
name: "{{ resource_prefix }}-3"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-3"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
state: absent
register: rax_scaling_group
# ============================================================
# ============================================================
- name: Test rax_scaling_group files change 1
rax_scaling_group:
name: "{{ resource_prefix }}-4"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
files:
/tmp/test.txt: "{{ role_path }}/files/test.txt"
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-4"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate results of rax_scaling_group files change 1
assert:
that:
- rax_scaling_group|success
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-4"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.personality|length == 1
- name: Test rax_scaling_group files change 2
rax_scaling_group:
name: "{{ resource_prefix }}-4"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-4"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate results of rax_scaling_group files change 2
assert:
that:
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-4"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.personality is not defined
- name: Remove servers 4
rax_scaling_group:
name: "{{ resource_prefix }}-4"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-4"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Test delete integration 4
rax_scaling_group:
name: "{{ resource_prefix }}-4"
image: "{{ rackspace_image_id }}"
min_entities: 0
max_entities: 0
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-4"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
state: absent
register: rax_scaling_group
# ============================================================
# ============================================================
- name: Build scaling group to test argument changes
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
register: rax_scaling_group
- name: Validate default create
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.name == "{{ resource_prefix }}-5"
- rax_scaling_group.autoscale_group.min_entities == 1
- rax_scaling_group.autoscale_group.max_entities == 1
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.flavorRef == "{{ rackspace_flavor }}"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.imageRef == "{{ rackspace_image_id }}"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.name == "{{ resource_prefix }}-5"
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.personality == []
- rax_scaling_group.autoscale_group.launchConfiguration.args.loadBalancers == []
- rax_scaling_group.autoscale_group.metadata == {}
# ============================================================
# ============================================================
- name: Change cooldown
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 1
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate cooldown change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.cooldown == 500
# ============================================================
# ============================================================
- name: Change max_entities
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_image_id }}"
min_entities: 1
max_entities: 2
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate max_entities change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.max_entities == 2
# ============================================================
# ============================================================
- name: Change min_entities
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate min_entities change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.min_entities == 2
# ============================================================
# ============================================================
- name: Change server_name
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate server_name change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.name == "{{ resource_prefix }}-5-1"
# ============================================================
# ============================================================
- name: Change image
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate image change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.imageRef == "{{ rackspace_alt_image_id }}"
# ============================================================
# ============================================================
- name: Change flavor
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
register: rax_scaling_group
- name: Validate flavor change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.flavorRef == "{{ rackspace_alt_flavor }}"
# ============================================================
# ============================================================
- name: Change disk_config
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: auto
register: rax_scaling_group
- name: Validate flavor change
assert:
that:
- rax_scaling_group|success
- not rax_scaling_group|changed
- "rax_scaling_group.autoscale_group.launchConfiguration.args.server['OS-DCF:diskConfig'] == 'AUTO'"
- name: Change disk_config 2
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
register: rax_scaling_group
- name: Validate flavor change 2
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- "rax_scaling_group.autoscale_group.launchConfiguration.args.server['OS-DCF:diskConfig'] == 'MANUAL'"
# ============================================================
# ============================================================
- name: Change networks
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
networks:
- public
register: rax_scaling_group
- name: Validate networks change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.networks.0.uuid == "00000000-0000-0000-0000-000000000000"
# ============================================================
# ============================================================
- name: Change load balancers
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
networks:
- public
- private
loadbalancers:
- id: "{{ rax_clb_id }}"
port: 80
register: rax_scaling_group
- name: Validate networks change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.loadBalancers.0.loadBalancerId == rax_clb_id|int
# ============================================================
# ============================================================
- name: Create keypair to test with
rax_keypair:
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-keypair"
public_key: "{{ rackspace_keypair_pub }}"
register: rax_keypair
- name: Validate rax_keypair creation
assert:
that:
- rax_keypair|success
- rax_keypair|changed
- rax_keypair.keypair.name == "{{ resource_prefix }}-keypair"
- rax_keypair.keypair.public_key == "{{ rackspace_keypair_pub }}"
# ============================================================
# ============================================================
- name: Change key_name
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
networks:
- public
- private
loadbalancers:
- id: "{{ rax_clb_id }}"
port: 80
key_name: "{{ resource_prefix }}-keypair"
register: rax_scaling_group
- name: Validate key_name change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.key_name == "{{ resource_prefix }}-keypair"
# ============================================================
# ============================================================
- name: Change config_drive
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
networks:
- public
- private
loadbalancers:
- id: "{{ rax_clb_id }}"
port: 80
key_name: "{{ resource_prefix }}-keypair"
config_drive: true
register: rax_scaling_group
- name: Validate config_drive change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.config_drive
# ============================================================
# ============================================================
- name: Change config_drive
rax_scaling_group:
name: "{{ resource_prefix }}-5"
image: "{{ rackspace_alt_image_id }}"
min_entities: 2
max_entities: 2
flavor: "{{ rackspace_alt_flavor }}"
server_name: "{{ resource_prefix }}-5-1"
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
cooldown: 500
disk_config: manual
networks:
- public
- private
loadbalancers:
- id: "{{ rax_clb_id }}"
port: 80
key_name: "{{ resource_prefix }}-keypair"
config_drive: true
user_data: "foo"
register: rax_scaling_group
- name: Validate config_drive change
assert:
that:
- rax_scaling_group|success
- rax_scaling_group|changed
- rax_scaling_group.autoscale_group.launchConfiguration.args.server.user_data == '{{ "foo"|b64encode }}'
# ============================================================
# ============================================================
- name: Delete keypair
rax_keypair:
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
name: "{{ resource_prefix }}-keypair"
public_key: "{{ rackspace_keypair_pub }}"
state: absent
register: rax_keypair
- name: Validate rax_keypair creation
assert:
that:
- rax_keypair|success
- rax_keypair|changed
# ============================================================
# ============================================================
- name: Delete CLB
rax_clb:
username: "{{ rackspace_username }}"
api_key: "{{ rackspace_api_key }}"
region: "{{ rackspace_region }}"
name: "{{ rax_clb.balancer.name }}"
state: absent
wait: true
wait_timeout: "{{ rackspace_wait_timeout }}"
register: rax_clb
- name: "Validate delete integration 3"
assert:
that:
- rax_clb|changed
- rax_clb.balancer.id == rax_clb_id|int
# ============================================================

View File

@@ -0,0 +1,3 @@
dependencies:
- prepare_tests

View File

@@ -0,0 +1,37 @@
# test code for the template module
# (c) 2015, Brian Coca <bcoca@ansible.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: veryfiy ansible_managed
template: src=foo.j2 dest={{output_dir}}/special_vars.yaml
- name: read the file into facts
include_vars: "{{output_dir}}/special_vars.yaml"
- name: veriy all test vars are defined
assert:
that:
- 'item in hostvars[inventory_hostname].keys()'
with_items:
- test_template_host
- test_template_path
- test_template_mtime
- test_template_uid
- test_template_fullpath
- test_template_run_date
- test_ansible_managed

View File

@@ -0,0 +1,7 @@
test_template_host: "{{template_host}}"
test_template_path: "{{template_path}}"
test_template_mtime: "{{template_mtime}}"
test_template_uid: "{{template_uid}}"
test_template_fullpath: "{{template_fullpath}}"
test_template_run_date: "{{template_run_date}}"
test_ansible_managed: "{{ansible_managed}}"

View File

@@ -40,6 +40,124 @@
- "sync_result.msg.startswith('>f+')"
- "sync_result.msg.endswith('+ foo.txt\n')"
- name: test that the file was really copied over
stat:
path: "{{ output_dir }}/foo.result"
register: stat_result
- assert:
that:
- "stat_result.stat.exists == True"
- "stat_result.stat.checksum == '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed'"
- name: test that the file is not copied a second time
synchronize: src={{output_dir}}/foo.txt dest={{output_dir}}/foo.result
register: sync_result
- assert:
that:
- "sync_result.changed == False"
- name: Cleanup
file:
state: absent
path: "{{output_dir}}/{{item}}"
with_items:
- foo.result
- bar.result
- name: Synchronize using the mode=push param
synchronize:
src: "{{output_dir}}/foo.txt"
dest: "{{output_dir}}/foo.result"
mode: push
register: sync_result
- assert:
that:
- "'changed' in sync_result"
- "sync_result.changed == true"
- "'cmd' in sync_result"
- "'rsync' in sync_result.cmd"
- "'msg' in sync_result"
- "sync_result.msg.startswith('>f+')"
- "sync_result.msg.endswith('+ foo.txt\n')"
- name: test that the file was really copied over
stat:
path: "{{ output_dir }}/foo.result"
register: stat_result
- assert:
that:
- "stat_result.stat.exists == True"
- "stat_result.stat.checksum == '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed'"
- name: test that the file is not copied a second time
synchronize:
src: "{{output_dir}}/foo.txt"
dest: "{{output_dir}}/foo.result"
mode: push
register: sync_result
- assert:
that:
- "sync_result.changed == False"
- name: Cleanup
file:
state: absent
path: "{{output_dir}}/{{item}}"
with_items:
- foo.result
- bar.result
- name: Synchronize using the mode=pull param
synchronize:
src: "{{output_dir}}/foo.txt"
dest: "{{output_dir}}/foo.result"
mode: pull
register: sync_result
- assert:
that:
- "'changed' in sync_result"
- "sync_result.changed == true"
- "'cmd' in sync_result"
- "'rsync' in sync_result.cmd"
- "'msg' in sync_result"
- "sync_result.msg.startswith('>f+')"
- "sync_result.msg.endswith('+ foo.txt\n')"
- name: test that the file was really copied over
stat:
path: "{{ output_dir }}/foo.result"
register: stat_result
- assert:
that:
- "stat_result.stat.exists == True"
- "stat_result.stat.checksum == '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed'"
- name: test that the file is not copied a second time
synchronize:
src: "{{output_dir}}/foo.txt"
dest: "{{output_dir}}/foo.result"
mode: pull
register: sync_result
- assert:
that:
- "sync_result.changed == False"
- name: Cleanup
file:
state: absent
path: "{{output_dir}}/{{item}}"
with_items:
- foo.result
- bar.result
- name: synchronize files using with_items (issue#5965)
synchronize: src={{output_dir}}/{{item}} dest={{output_dir}}/{{item}}.result
with_items:
@@ -47,7 +165,6 @@
- bar.txt
register: sync_result
- debug: var=sync_result
- assert:
that:
- "sync_result.changed"
@@ -61,7 +178,6 @@
synchronize: src={{output_dir}}/foo.txt dest={{output_dir}}/foo.rsync_path rsync_path="sudo rsync"
register: sync_result
- debug: var=sync_result
- assert:
that:
- "'changed' in sync_result"

View File

@@ -91,3 +91,43 @@
with_together:
- fail_checksum.results
- fail.results
- name: test https fetch to a site with mismatched hostname and certificate
uri:
url: "https://kennethreitz.org/"
dest: "{{ output_dir }}/shouldnotexist.html"
ignore_errors: True
register: result
- stat:
path: "{{ output_dir }}/shouldnotexist.html"
register: stat_result
- name: Assert that the file was not downloaded
assert:
that:
- "result.failed == true"
- "'certificate does not match ' in result.msg"
- "stat_result.stat.exists == false"
- name: Clean up any cruft from the results directory
file:
name: "{{ output_dir }}/kreitz.html"
state: absent
- name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no
get_url:
url: "https://kennethreitz.org/"
dest: "{{ output_dir }}/kreitz.html"
validate_certs: no
register: result
- stat:
path: "{{ output_dir }}/kreitz.html"
register: stat_result
- name: Assert that the file was downloaded
assert:
that:
- "stat_result.stat.exists == true"
- "result.changed == true"

View File

@@ -7,7 +7,7 @@
- assert:
that:
- 'extra_var == "extra_var"'
- 'param_var == "param_var_role1"'
- 'param_var == "param_var"'
- 'vars_var == "vars_var"'
- 'vars_files_var == "vars_files_var"'
- 'vars_files_var_role == "vars_files_var_dep"'

View File

@@ -62,7 +62,7 @@
- name: verify that the file checksum is correct
assert:
that:
- "copy_result.checksum[0] == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- "copy_result.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- name: check the stat results of the file
win_stat: path={{output_file}}
@@ -78,7 +78,7 @@
# - "stat_results.stat.isfifo == false"
# - "stat_results.stat.isreg == true"
# - "stat_results.stat.issock == false"
- "stat_results.stat.checksum[0] == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- "stat_results.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- name: overwrite the file via same means
win_copy: src=foo.txt dest={{output_file}}

View File

@@ -17,10 +17,16 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: check whether servermanager module is available (windows 2008 r2 or later)
raw: PowerShell -Command Import-Module ServerManager
register: win_feature_has_servermanager
ignore_errors: true
- name: start with feature absent
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
when: win_feature_has_servermanager|success
- name: install feature
win_feature:
@@ -30,6 +36,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_result
when: win_feature_has_servermanager|success
- name: check result of installing feature
assert:
@@ -45,6 +52,7 @@
- "win_feature_install_result.feature_result[0].restart_needed is defined"
- "win_feature_install_result.feature_result[0].skip_reason"
- "win_feature_install_result.feature_result[0].success is defined"
when: win_feature_has_servermanager|success
- name: install feature again
win_feature:
@@ -54,6 +62,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_again_result
when: win_feature_has_servermanager|success
- name: check result of installing feature again
assert:
@@ -63,12 +72,14 @@
- "win_feature_install_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_install_again_result.restart_needed"
- "win_feature_install_again_result.feature_result == []"
when: win_feature_has_servermanager|success
- name: remove feature
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_result
when: win_feature_has_servermanager|success
- name: check result of removing feature
assert:
@@ -84,12 +95,14 @@
- "win_feature_remove_result.feature_result[0].restart_needed is defined"
- "win_feature_remove_result.feature_result[0].skip_reason"
- "win_feature_remove_result.feature_result[0].success is defined"
when: win_feature_has_servermanager|success
- name: remove feature again
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_again_result
when: win_feature_has_servermanager|success
- name: check result of removing feature again
assert:
@@ -99,6 +112,7 @@
- "win_feature_remove_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_remove_again_result.restart_needed"
- "win_feature_remove_again_result.feature_result == []"
when: win_feature_has_servermanager|success
- name: try to install an invalid feature name
win_feature:
@@ -106,6 +120,7 @@
state: present
register: win_feature_install_invalid_result
ignore_errors: true
when: win_feature_has_servermanager|success
- name: check result of installing invalid feature name
assert:
@@ -114,6 +129,7 @@
- "not win_feature_install_invalid_result|changed"
- "win_feature_install_invalid_result.msg"
- "win_feature_install_invalid_result.exitcode == 'InvalidArgs'"
when: win_feature_has_servermanager|success
- name: try to remove an invalid feature name
win_feature:
@@ -121,6 +137,7 @@
state: absent
register: win_feature_remove_invalid_result
ignore_errors: true
when: win_feature_has_servermanager|success
- name: check result of removing invalid feature name
assert:
@@ -129,3 +146,4 @@
- "not win_feature_remove_invalid_result|changed"
- "win_feature_remove_invalid_result.msg"
- "win_feature_remove_invalid_result.exitcode == 'InvalidArgs'"
when: win_feature_has_servermanager|success

View File

@@ -73,16 +73,14 @@
- "fetch_flat_stat.stat.isreg"
- "fetch_flat_stat.stat.md5 == fetch_flat.md5sum"
- name: fetch a small file to flat directory (without trailing slash)
fetch: src="C:/Windows/win.ini" dest="{{ output_dir }}" flat=yes
register: fetch_flat_dir
ignore_errors: true
#- name: fetch a small file to flat directory (without trailing slash)
# fetch: src="C:/Windows/win.ini" dest="{{ output_dir }}" flat=yes
# register: fetch_flat_dir
- name: check fetch flat to directory result
assert:
that:
- "fetch_flat_dir|failed"
- "fetch_flat_dir.msg"
#- name: check fetch flat to directory result
# assert:
# that:
# - "not fetch_flat_dir|changed"
- name: fetch a large binary file
fetch: src="C:/Windows/explorer.exe" dest={{ output_dir }}
@@ -114,7 +112,7 @@
- "not fetch_large_again.changed"
- name: fetch a small file using backslashes in src path
fetch: src="C:\Windows\system.ini" dest={{ output_dir }}
fetch: src="C:\\Windows\\system.ini" dest={{ output_dir }}
register: fetch_small_bs
- name: check fetch small result with backslashes
@@ -157,7 +155,7 @@
- "not fetch_missing|changed"
- name: attempt to fetch a directory
fetch: src="C:\Windows" dest={{ output_dir }}
fetch: src="C:\\Windows" dest={{ output_dir }}
register: fetch_dir
ignore_errors: true

View File

@@ -32,7 +32,7 @@
# - "file_result.state == 'file'"
- name: verify that we are checking an absent file
win_file: path={{win_output_dir}}\bar.txt state=absent
win_file: path={{win_output_dir}}/bar.txt state=absent
register: file2_result
- name: verify that the file was marked as changed
@@ -42,7 +42,7 @@
# - "file2_result.state == 'absent'"
- name: verify we can touch a file
win_file: path={{win_output_dir}}\baz.txt state=touch
win_file: path={{win_output_dir}}/baz.txt state=touch
register: file3_result
- name: verify that the file was marked as changed
@@ -85,8 +85,8 @@
# - "chown_result.failed == True"
# - "file_exists_result.stat.exists == False"
#
- name: clean up
win_file: path=/tmp/worldwritable state=absent
#- name: clean up
# win_file: path=/tmp/worldwritable state=absent
#- name: create soft link to file
# win_file: src={{output_file}} dest={{win_output_dir}}/soft.txt state=link
@@ -107,7 +107,7 @@
# - "file6_result.changed == true"
#
- name: create a directory
win_file: path={{win_output_dir}}\foobar state=directory
win_file: path={{win_output_dir}}/foobar state=directory
register: file7_result
- debug: var=file7_result
@@ -134,22 +134,22 @@
# when: selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
- name: remote directory foobar
win_file: path={{win_output_dir}}\foobar state=absent
win_file: path={{win_output_dir}}/foobar state=absent
- name: remove file foo.txt
win_file: path={{win_output_dir}}\foo.txt state=absent
win_file: path={{win_output_dir}}/foo.txt state=absent
- name: remove file bar.txt
win_file: path={{win_output_dir}}\foo.txt state=absent
win_file: path={{win_output_dir}}/foo.txt state=absent
- name: remove file baz.txt
win_file: path={{win_output_dir}}\foo.txt state=absent
win_file: path={{win_output_dir}}/foo.txt state=absent
- name: win copy directory structure over
win_copy: src=foobar dest={{win_output_dir}}
- name: remove directory foobar
win_file: path={{win_output_dir}}\foobar state=absent
win_file: path={{win_output_dir}}/foobar state=absent
register: file14_result
- debug: var=file14_result

View File

@@ -0,0 +1,7 @@
---
test_win_get_url_link: http://docs.ansible.com
test_win_get_url_path: "C:\\Users\\{{ansible_ssh_user}}\\docs_index.html"
test_win_get_url_invalid_link: http://docs.ansible.com/skynet_module.html
test_win_get_url_invalid_path: "Q:\\Filez\\Cyberdyne.html"
test_win_get_url_dir_path: "C:\\Users\\{{ansible_ssh_user}}"

View File

@@ -17,19 +17,81 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: remove test file if it exists
raw: PowerShell -Command {Remove-Item "C:\Users\Administrator\win_get_url.jpg" -Force}
raw: >
PowerShell -Command Remove-Item "{{test_win_get_url_path}}" -Force
ignore_errors: true
- name: test win_get_url module
win_get_url: url=http://placehold.it/10x10.jpg dest='C:\Users\Administrator\win_get_url.jpg'
win_get_url:
url: "{{test_win_get_url_link}}"
dest: "{{test_win_get_url_path}}"
register: win_get_url_result
- name: check win_get_url result
- name: check that url was downloaded
assert:
that:
- "not win_get_url_result|failed"
- "win_get_url_result|changed"
- "win_get_url_result.win_get_url.url"
- "win_get_url_result.win_get_url.dest"
# FIXME:
# - Test invalid url
# - Test invalid dest, when dest is directory
# - Test idempotence when downloading same url/dest (not yet implemented)
- name: test win_get_url module again (force should be yes by default)
win_get_url:
url: "{{test_win_get_url_link}}"
dest: "{{test_win_get_url_path}}"
register: win_get_url_result_again
- name: check that url was downloaded again
assert:
that:
- "not win_get_url_result_again|failed"
- "win_get_url_result_again|changed"
- name: test win_get_url module again with force=no
win_get_url:
url: "{{test_win_get_url_link}}"
dest: "{{test_win_get_url_path}}"
force: no
register: win_get_url_result_noforce
- name: check that url was not downloaded again
assert:
that:
- "not win_get_url_result_noforce|failed"
- "not win_get_url_result_noforce|changed"
- name: test win_get_url module with url that returns a 404
win_get_url:
url: "{{test_win_get_url_invalid_link}}"
dest: "{{test_win_get_url_path}}"
register: win_get_url_result_invalid_link
ignore_errors: true
- name: check that the download failed for an invalid url
assert:
that:
- "win_get_url_result_invalid_link|failed"
- name: test win_get_url module with an invalid path
win_get_url:
url: "{{test_win_get_url_link}}"
dest: "{{test_win_get_url_invalid_path}}"
register: win_get_url_result_invalid_path
ignore_errors: true
- name: check that the download failed for an invalid path
assert:
that:
- "win_get_url_result_invalid_path|failed"
- name: test win_get_url module with a valid path that is a directory
win_get_url:
url: "{{test_win_get_url_link}}"
dest: "{{test_win_get_url_dir_path}}"
register: win_get_url_result_dir_path
ignore_errors: true
- name: check that the download failed if dest is a directory
assert:
that:
- "win_get_url_result_dir_path|failed"

View File

@@ -17,7 +17,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: use win_get_url module to download msi
win_get_url: url=http://downloads.sourceforge.net/project/sevenzip/7-Zip/9.22/7z922-x64.msi dest='C:\7z922-x64.msi'
win_get_url: url=http://downloads.sourceforge.net/project/sevenzip/7-Zip/9.22/7z922-x64.msi dest='C:\\7z922-x64.msi'
register: win_get_url_result
- name: install 7zip msi

View File

@@ -28,7 +28,7 @@
- "win_ping_result.ping == 'pong'"
- name: test win_ping with data
win_ping: data=blah
win_ping: data=
register: win_ping_with_data_result
- name: check win_ping result with data
@@ -36,21 +36,11 @@
that:
- "not win_ping_with_data_result|failed"
- "not win_ping_with_data_result|changed"
- "win_ping_with_data_result.ping == 'blah'"
- "win_ping_with_data_result.ping == ''"
#- name: test local ping (should use default ping)
# local_action: ping
# register: local_ping_result
#- name: check local ping result
# assert:
# that:
# - "not local_ping_result|failed"
# - "not local_ping_result|changed"
# - "local_ping_result.ping == 'pong'"
- name: test win_ping.ps1 with data
win_ping.ps1: data=bleep
- name: test win_ping.ps1 with data as complex args
win_ping.ps1:
data: bleep
register: win_ping_ps1_result
- name: check win_ping.ps1 result with data
@@ -60,13 +50,32 @@
- "not win_ping_ps1_result|changed"
- "win_ping_ps1_result.ping == 'bleep'"
#- name: test win_ping with invalid args
# win_ping: arg=invalid
# register: win_ping_ps1_invalid_args_result
#- name: check that win_ping.ps1 with invalid args fails
# assert:
# that:
# - "win_ping_ps1_invalid_args_result|failed"
# - "win_ping_ps1_invalid_args_result.msg"
- name: test win_ping with extra args to verify that v2 module replacer escaping works as expected
win_ping:
data: bloop
a_null: null
a_boolean: true
another_boolean: false
a_number: 299792458
another_number: 22.7
yet_another_number: 6.022e23
a_string: |
it's magic
"@'
'@"
an_array:
- first
- 2
- 3.0
an_object:
- the_thing: the_value
- the_other_thing: 0
- the_list_of_things: [1, 2, 3, 5]
register: win_ping_extra_args_result
- name: check that win_ping with extra args succeeds and ignores everything except data
assert:
that:
- "not win_ping_extra_args_result|failed"
- "not win_ping_extra_args_result|changed"
- "win_ping_extra_args_result.ping == 'bloop'"

View File

@@ -72,7 +72,7 @@
- "not unknown_result|changed"
- name: run a command that takes longer than 60 seconds
raw: PowerShell -Command Start-Sleep -s 75
raw: Start-Sleep -s 75
register: sleep_command
- name: assert that the sleep command ran
@@ -83,3 +83,12 @@
- "not sleep_command.stderr"
- "not sleep_command|failed"
- "not sleep_command|changed"
- name: run a raw command with key=value arguments
raw: echo wwe=raw
register: raw_result
- name: make sure raw is really raw and not removing key=value arguments
assert:
that:
- "raw_result.stdout_lines[0] == 'wwe=raw'"

View File

@@ -3,3 +3,4 @@
# Parameters to pass to test scripts.
test_win_script_value: VaLuE
test_win_script_splat: "@{This='THIS'; That='THAT'; Other='OTHER'}"
test_win_script_filename: "C:/Users/{{ansible_ssh_user}}/testing_win_script.txt"

View File

@@ -0,0 +1,2 @@
@ECHO OFF
ECHO We can even run a batch file with cmd extension!

View File

@@ -0,0 +1,3 @@
# Test script to create a file.
echo $null > $args[0]

View File

@@ -0,0 +1,3 @@
# Test script to remove a file.
Remove-Item $args[0] -Force

View File

@@ -30,24 +30,24 @@
- "not test_script_result|failed"
- "test_script_result|changed"
- name: run test script that takes arguments
script: test_script_with_args.ps1 /this /that /other
- name: run test script that takes arguments including a unicode char
script: test_script_with_args.ps1 /this /that /Ӧther
register: test_script_with_args_result
- name: check that script ran and received arguments
- name: check that script ran and received arguments and returned unicode
assert:
that:
- "test_script_with_args_result.rc == 0"
- "test_script_with_args_result.stdout"
- "test_script_with_args_result.stdout_lines[0] == '/this'"
- "test_script_with_args_result.stdout_lines[1] == '/that'"
- "test_script_with_args_result.stdout_lines[2] == '/other'"
- "test_script_with_args_result.stdout_lines[2] == '/Ӧther'"
- "not test_script_with_args_result.stderr"
- "not test_script_with_args_result|failed"
- "test_script_with_args_result|changed"
- name: run test script that takes parameters passed via splatting
script: test_script_with_splatting.ps1 "@{ This = 'this'; That = '{{ test_win_script_value }}'; Other = 'other'}"
script: test_script_with_splatting.ps1 @{ This = 'this'; That = '{{ test_win_script_value }}'; Other = 'other'}
register: test_script_with_splatting_result
- name: check that script ran and received parameters via splatting
@@ -63,7 +63,7 @@
- "test_script_with_splatting_result|changed"
- name: run test script that takes splatted parameters from a variable
script: test_script_with_splatting.ps1 {{ test_win_script_splat|quote }}
script: test_script_with_splatting.ps1 {{ test_win_script_splat }}
register: test_script_with_splatting2_result
- name: check that script ran and received parameters via splatting from a variable
@@ -92,6 +92,58 @@
- "test_script_with_errors_result|failed"
- "test_script_with_errors_result|changed"
- name: cleanup test file if it exists
raw: Remove-Item "{{test_win_script_filename}}" -Force
ignore_errors: true
- name: run test script that creates a file
script: test_script_creates_file.ps1 "{{test_win_script_filename}}" creates="{{test_win_script_filename}}"
register: test_script_creates_file_result
- name: check that script ran and indicated a change
assert:
that:
- "test_script_creates_file_result.rc == 0"
- "not test_script_creates_file_result.stdout"
- "not test_script_creates_file_result.stderr"
- "not test_script_creates_file_result|failed"
- "test_script_creates_file_result|changed"
- name: run test script that creates a file again
script: test_script_creates_file.ps1 "{{test_win_script_filename}}" creates="{{test_win_script_filename}}"
register: test_script_creates_file_again_result
- name: check that the script did not run since the remote file exists
assert:
that:
- "not test_script_creates_file_again_result|failed"
- "not test_script_creates_file_again_result|changed"
- "test_script_creates_file_again_result|skipped"
- name: run test script that removes a file
script: test_script_removes_file.ps1 "{{test_win_script_filename}}" removes="{{test_win_script_filename}}"
register: test_script_removes_file_result
- name: check that the script ran since the remote file exists
assert:
that:
- "test_script_removes_file_result.rc == 0"
- "not test_script_removes_file_result.stdout"
- "not test_script_removes_file_result.stderr"
- "not test_script_removes_file_result|failed"
- "test_script_removes_file_result|changed"
- name: run test script that removes a file again
script: test_script_removes_file.ps1 "{{test_win_script_filename}}" removes="{{test_win_script_filename}}"
register: test_script_removes_file_again_result
- name: check that the script did not run since the remote file does not exist
assert:
that:
- "not test_script_removes_file_again_result|failed"
- "not test_script_removes_file_again_result|changed"
- "test_script_removes_file_again_result|skipped"
- name: run simple batch file
script: test_script.bat
register: test_batch_result
@@ -105,3 +157,17 @@
- "not test_batch_result.stderr"
- "not test_batch_result|failed"
- "test_batch_result|changed"
- name: run simple batch file with .cmd extension
script: test_script.cmd
register: test_cmd_result
- name: check that batch file with .cmd extension ran
assert:
that:
- "test_cmd_result.rc == 0"
- "test_cmd_result.stdout"
- "'cmd extension' in test_cmd_result.stdout"
- "not test_cmd_result.stderr"
- "not test_cmd_result|failed"
- "test_cmd_result|changed"

View File

@@ -20,7 +20,7 @@
action: setup
register: setup_result
- name: check setup result
- name: check windows setup result
assert:
that:
- "not setup_result|failed"
@@ -38,6 +38,8 @@
- "setup_result.ansible_facts.ansible_interfaces[0]"
- "setup_result.ansible_facts.ansible_interfaces[0].interface_name"
- "setup_result.ansible_facts.ansible_interfaces[0].interface_index"
- "setup_result.ansible_facts.ansible_architecture"
- "setup_result.ansible_facts.ansible_os_name"
- "setup_result.ansible_facts.ansible_powershell_version"
- name: check setup result only when using https

View File

@@ -27,6 +27,12 @@
- "not win_stat_file.stat.isdir"
- "win_stat_file.stat.size > 0"
- "win_stat_file.stat.md5"
- "win_stat_file.stat.extension"
- "win_stat_file.stat.attributes"
- "win_stat_file.stat.owner"
- "win_stat_file.stat.creationtime"
- "win_stat_file.stat.lastaccesstime"
- "win_stat_file.stat.lastwritetime"
- "not win_stat_file|failed"
- "not win_stat_file|changed"
@@ -34,13 +40,19 @@
win_stat: path="C:\Windows\win.ini" get_md5=no
register: win_stat_file_no_md5
- name: check win_stat file result without md
- name: check win_stat file result without md5
assert:
that:
- "win_stat_file_no_md5.stat.exists"
- "not win_stat_file_no_md5.stat.isdir"
- "win_stat_file_no_md5.stat.size > 0"
- "not win_stat_file_no_md5.stat.md5|default('')"
- "win_stat_file_no_md5.stat.extension"
- "win_stat_file_no_md5.stat.attributes"
- "win_stat_file_no_md5.stat.owner"
- "win_stat_file_no_md5.stat.creationtime"
- "win_stat_file_no_md5.stat.lastaccesstime"
- "win_stat_file_no_md5.stat.lastwritetime"
- "not win_stat_file_no_md5|failed"
- "not win_stat_file_no_md5|changed"
@@ -53,6 +65,12 @@
that:
- "win_stat_dir.stat.exists"
- "win_stat_dir.stat.isdir"
- "win_stat_dir.stat.extension == ''"
- "win_stat_dir.stat.attributes"
- "win_stat_dir.stat.owner"
- "win_stat_dir.stat.creationtime"
- "win_stat_dir.stat.lastaccesstime"
- "win_stat_dir.stat.lastwritetime"
- "not win_stat_dir|failed"
- "not win_stat_dir|changed"

View File

@@ -39,13 +39,24 @@
that:
- "template_result.changed == true"
- name: fill in a basic template again
win_template:
src: foo.j2
dest: "{{win_output_dir}}/foo.templated"
register: template_result2
- name: verify that the template was not changed
assert:
that:
- "not template_result2|changed"
# VERIFY CONTENTS
- name: copy known good into place
win_copy: src=foo.txt dest={{win_output_dir}}\foo.txt
win_copy: src=foo.txt dest={{win_output_dir}}\\foo.txt
- name: compare templated file to known good
raw: fc.exe {{win_output_dir}}\foo.templated {{win_output_dir}}\foo.txt
raw: fc.exe {{win_output_dir}}\\foo.templated {{win_output_dir}}\\foo.txt
register: diff_result
- debug: var=diff_result

View File

@@ -51,7 +51,7 @@
- "win_user_missing_query_result.state == 'absent'"
- name: test create user
win_user: name="{{ test_win_user_name }}" password="{{ test_win_user_password }}" groups="Guests"
win_user: name="{{ test_win_user_name }}" password="{{ test_win_user_password }}" fullname="Test User" description="Test user account" groups="Guests"
register: win_user_create_result
- name: check user creation result
@@ -59,7 +59,8 @@
that:
- "win_user_create_result|changed"
- "win_user_create_result.name == '{{ test_win_user_name }}'"
- "win_user_create_result.fullname == '{{ test_win_user_name }}'"
- "win_user_create_result.fullname == 'Test User'"
- "win_user_create_result.description == 'Test user account'"
- "win_user_create_result.path"
- "win_user_create_result.state == 'present'"

View File

@@ -0,0 +1,5 @@
- hosts: testhost
connection: local
gather_facts: yes
roles:
- { role: test_filters }

View File

@@ -7,6 +7,8 @@
connection: local
roles:
- { role: test_force_handlers }
tasks:
- debug: msg="you should see this with --tags=normal"
- name: test force handlers (set to true)
tags: force_true_in_play
@@ -15,7 +17,7 @@
connection: local
force_handlers: True
roles:
- { role: test_force_handlers }
- { role: test_force_handlers, tags: force_true_in_play }
- name: test force handlers (set to false)
@@ -25,4 +27,4 @@
connection: local
force_handlers: False
roles:
- { role: test_force_handlers }
- { role: test_force_handlers, tags: force_false_in_play }

View File

@@ -16,19 +16,25 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- hosts: lamini
- name: Create overall groups
hosts: lamini
gather_facts: false
tasks:
- debug: var=genus
- name: group by genus
group_by: key={{ genus }}
- name: group by first three letters of genus with key in quotes
group_by: key="{{ genus | truncate(3, true, '') }}"
- name: group by first two letters of genus with key not in quotes
group_by: key={{ genus | truncate(2, true, '') }}
- name: group by genus in uppercase using complex args
group_by: { key: "{{ genus | upper() }}" }
- hosts: vicugna
- name: Vicunga group validation
hosts: vicugna
gather_facts: false
tasks:
- name: verify that only the alpaca is in this group
@@ -36,7 +42,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_vicugna=true
- hosts: lama
- name: Lama group validation
hosts: lama
gather_facts: false
tasks:
- name: verify that only the llama is in this group
@@ -44,7 +51,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_lama=true
- hosts: vic
- name: Vic group validation
hosts: vic
gather_facts: false
tasks:
- name: verify that only the alpaca is in this group
@@ -52,7 +60,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_vic=true
- hosts: lam
- name: Lam group validation
hosts: lam
gather_facts: false
tasks:
- name: verify that only the llama is in this group
@@ -60,7 +69,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_lam=true
- hosts: vi
- name: Vi group validation
hosts: vi
gather_facts: false
tasks:
- name: verify that only the alpaca is in this group
@@ -68,7 +78,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_vi=true
- hosts: la
- name: La group validation
hosts: la
gather_facts: false
tasks:
- name: verify that only the llama is in this group
@@ -76,7 +87,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_la=true
- hosts: VICUGNA
- name: VICUGNA group validation
hosts: VICUGNA
gather_facts: false
tasks:
- name: verify that only the alpaca is in this group
@@ -84,7 +96,8 @@
- name: set a fact to check that we ran this play
set_fact: genus_VICUGNA=true
- hosts: LAMA
- name: LAMA group validation
hosts: LAMA
gather_facts: false
tasks:
- name: verify that only the llama is in this group
@@ -92,19 +105,22 @@
- name: set a fact to check that we ran this play
set_fact: genus_LAMA=true
- hosts: 'genus'
- name: genus group validation (expect skipped)
hosts: 'genus'
gather_facts: false
tasks:
- name: no hosts should match this group
fail: msg="should never get here"
- hosts: alpaca
- name: alpaca validation of groups
hosts: alpaca
gather_facts: false
tasks:
- name: check that alpaca matched all four groups
assert: { that: ["genus_vicugna", "genus_vic", "genus_vi", "genus_VICUGNA"] }
- hosts: llama
- name: llama validation of groups
hosts: llama
gather_facts: false
tasks:
- name: check that llama matched all four groups

View File

@@ -4,7 +4,7 @@
gather_facts: False
connection: local
roles:
- { role: test_handlers_meta }
- { role: test_handlers_meta, tags: ['scenario1'] }
- name: verify final handler was run
hosts: A
@@ -16,6 +16,7 @@
that:
- "not hostvars[inventory_hostname]['handler1_called']"
- "'handler2_called' in hostvars[inventory_hostname]"
tags: ['scenario1']
- name: test handlers
hosts: testgroup

View File

@@ -14,9 +14,9 @@
- { role: test_includes, tags: test_includes }
tasks:
- include: roles/test_includes/tasks/not_a_role_task.yml
- include: roles/test_includes/tasks/empty.yml
- assert:
that:
- "ca == 33000"
- "cb == 33001"
- "cc == 33002"

View File

@@ -36,7 +36,7 @@
- hosts: inven_overridehosts
vars_files:
- "{{ var_dir }}/test_var_precedence.yml"
- "test_var_precedence.yml"
roles:
- role: test_var_precedence_inven_override
foo: bar

View File

@@ -18,6 +18,7 @@
- hosts: windows
gather_facts: false
max_fail_percentage: 1
roles:
- { role: test_win_raw, tags: test_win_raw }
- { role: test_win_script, tags: test_win_script }

View File

@@ -1,5 +0,0 @@
Unit tests
==========
Tests at code level. Should be concise and to the point, and organized by subject.

View File

@@ -1,64 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from ansible.constants import get_config
import ConfigParser
import random
import string
import os
def random_string(length):
return ''.join(random.choice(string.ascii_uppercase) for x in range(6))
p = ConfigParser.ConfigParser()
p.read(os.path.join(os.path.dirname(__file__), 'ansible.cfg'))
class TestConstants(unittest.TestCase):
#####################################
### get_config unit tests
def test_configfile_and_env_both_set(self):
r = random_string(6)
env_var = 'ANSIBLE_TEST_%s' % r
os.environ[env_var] = r
res = get_config(p, 'defaults', 'test_key', env_var, 'default')
del os.environ[env_var]
assert res == r
def test_configfile_set_env_not_set(self):
r = random_string(6)
env_var = 'ANSIBLE_TEST_%s' % r
assert env_var not in os.environ
res = get_config(p, 'defaults', 'test_key', env_var, 'default')
print res
assert res == 'test_value'
def test_configfile_not_set_env_set(self):
r = random_string(6)
env_var = 'ANSIBLE_TEST_%s' % r
os.environ[env_var] = r
res = get_config(p, 'defaults', 'doesnt_exist', env_var, 'default')
del os.environ[env_var]
assert res == r
def test_configfile_not_set_env_not_set(self):
r = random_string(6)
env_var = 'ANSIBLE_TEST_%s' % r
assert env_var not in os.environ
res = get_config(p, 'defaults', 'doesnt_exist', env_var, 'default')
assert res == 'default'

View File

@@ -1,191 +0,0 @@
'''
Test bundled filters
'''
import os.path
import unittest, tempfile, shutil
from ansible import playbook, inventory, callbacks
import ansible.runner.filter_plugins.core
import ansible.runner.filter_plugins.mathstuff
INVENTORY = inventory.Inventory(['localhost'])
BOOK = '''
- hosts: localhost
vars:
var: { a: [1,2,3] }
tasks:
- template: src=%s dest=%s
'''
SRC = '''
-
{{ var|to_json }}
-
{{ var|to_nice_json }}
-
{{ var|to_yaml }}
-
{{ var|to_nice_yaml }}
'''
DEST = '''
-
{"a": [1, 2, 3]}
-
{
"a": [
1,
2,
3
]
}
-
a: [1, 2, 3]
-
a:
- 1
- 2
- 3
'''
class TestFilters(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(dir='/tmp')
def tearDown(self):
shutil.rmtree(self.tmpdir)
def temp(self, name, data=''):
'''write a temporary file and return the name'''
name = self.tmpdir + '/' + name
with open(name, 'w') as f:
f.write(data)
return name
def test_bool_none(self):
a = ansible.runner.filter_plugins.core.bool(None)
assert a == None
def test_bool_true(self):
a = ansible.runner.filter_plugins.core.bool(True)
assert a == True
def test_bool_yes(self):
a = ansible.runner.filter_plugins.core.bool('Yes')
assert a == True
def test_bool_no(self):
a = ansible.runner.filter_plugins.core.bool('Foo')
assert a == False
def test_quotes(self):
a = ansible.runner.filter_plugins.core.quote('ls | wc -l')
assert a == "'ls | wc -l'"
def test_fileglob(self):
pathname = os.path.join(os.path.dirname(__file__), '*')
a = ansible.runner.filter_plugins.core.fileglob(pathname)
assert __file__ in a
def test_regex(self):
a = ansible.runner.filter_plugins.core.regex('ansible', 'ansible',
match_type='findall')
assert a == True
def test_match_case_sensitive(self):
a = ansible.runner.filter_plugins.core.match('ansible', 'ansible')
assert a == True
def test_match_case_insensitive(self):
a = ansible.runner.filter_plugins.core.match('ANSIBLE', 'ansible',
True)
assert a == True
def test_match_no_match(self):
a = ansible.runner.filter_plugins.core.match(' ansible', 'ansible')
assert a == False
def test_search_case_sensitive(self):
a = ansible.runner.filter_plugins.core.search(' ansible ', 'ansible')
assert a == True
def test_search_case_insensitive(self):
a = ansible.runner.filter_plugins.core.search(' ANSIBLE ', 'ansible',
True)
assert a == True
def test_regex_replace_case_sensitive(self):
a = ansible.runner.filter_plugins.core.regex_replace('ansible', '^a.*i(.*)$',
'a\\1')
assert a == 'able'
def test_regex_replace_case_insensitive(self):
a = ansible.runner.filter_plugins.core.regex_replace('ansible', '^A.*I(.*)$',
'a\\1', True)
assert a == 'able'
def test_regex_replace_no_match(self):
a = ansible.runner.filter_plugins.core.regex_replace('ansible', '^b.*i(.*)$',
'a\\1')
assert a == 'ansible'
def test_to_uuid(self):
a = ansible.runner.filter_plugins.core.to_uuid('example.com')
assert a == 'ae780c3a-a3ab-53c2-bfb4-098da300b3fe'
#def test_filters(self):
# this test is pretty low level using a playbook, hence I am disabling it for now -- MPD.
#return
#src = self.temp('src.j2', SRC)
#dest = self.temp('dest.txt')
#book = self.temp('book', BOOK % (src, dest))
#playbook.PlayBook(
# playbook = book,
# inventory = INVENTORY,
# transport = 'local',
# callbacks = callbacks.PlaybookCallbacks(),
# runner_callbacks = callbacks.DefaultRunnerCallbacks(),
# stats = callbacks.AggregateStats(),
#).run()
#out = open(dest).read()
#self.assertEqual(DEST, out)
def test_version_compare(self):
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(0, 1.1, 'lt', False))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.1, 1.2, '<'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.2, 1.2, '=='))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.2, 1.2, '='))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.2, 1.2, 'eq'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.3, 1.2, 'gt'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.3, 1.2, '>'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.3, 1.2, 'ne'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.3, 1.2, '!='))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.3, 1.2, '<>'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.1, 1.1, 'ge'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.2, 1.1, '>='))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.1, 1.1, 'le'))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare(1.0, 1.1, '<='))
self.assertTrue(ansible.runner.filter_plugins.core.version_compare('12.04', 12, 'ge'))
def test_min(self):
a = ansible.runner.filter_plugins.mathstuff.min([3, 2, 5, 4])
assert a == 2
def test_max(self):
a = ansible.runner.filter_plugins.mathstuff.max([3, 2, 5, 4])
assert a == 5

View File

@@ -1,510 +0,0 @@
import os
import unittest
from nose.tools import raises
from ansible import errors
from ansible.inventory import Inventory
class TestInventory(unittest.TestCase):
def setUp(self):
self.cwd = os.getcwd()
self.test_dir = os.path.join(self.cwd, 'inventory_test_data')
self.inventory_file = os.path.join(self.test_dir, 'simple_hosts')
self.large_range_inventory_file = os.path.join(self.test_dir, 'large_range')
self.complex_inventory_file = os.path.join(self.test_dir, 'complex_hosts')
self.inventory_script = os.path.join(self.test_dir, 'inventory_api.py')
self.inventory_dir = os.path.join(self.test_dir, 'inventory_dir')
os.chmod(self.inventory_script, 0755)
def tearDown(self):
os.chmod(self.inventory_script, 0644)
def compare(self, left, right, sort=True):
if sort:
left = sorted(left)
right = sorted(right)
print left
print right
assert left == right
def empty_inventory(self):
return Inventory(None)
def simple_inventory(self):
return Inventory(self.inventory_file)
def large_range_inventory(self):
return Inventory(self.large_range_inventory_file)
def script_inventory(self):
return Inventory(self.inventory_script)
def complex_inventory(self):
return Inventory(self.complex_inventory_file)
def dir_inventory(self):
return Inventory(self.inventory_dir)
all_simple_hosts=['jupiter', 'saturn', 'zeus', 'hera',
'cerberus001','cerberus002','cerberus003',
'cottus99', 'cottus100',
'poseidon', 'thor', 'odin', 'loki',
'thrudgelmir0', 'thrudgelmir1', 'thrudgelmir2',
'thrudgelmir3', 'thrudgelmir4', 'thrudgelmir5',
'Hotep-a', 'Hotep-b', 'Hotep-c',
'BastC', 'BastD', 'neptun', 'goldorak', ]
#####################################
### Empty inventory format tests
def test_empty(self):
inventory = self.empty_inventory()
hosts = inventory.list_hosts()
self.assertEqual(hosts, [])
#####################################
### Simple inventory format tests
def test_simple(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(self.all_simple_hosts))
def test_simple_all(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts('all')
self.assertEqual(sorted(hosts), sorted(self.all_simple_hosts))
def test_get_hosts(self):
inventory = Inventory('127.0.0.1,192.168.1.1')
hosts = inventory.get_hosts('!10.0.0.1')
hosts_all = inventory.get_hosts('all')
self.assertEqual(sorted(hosts), sorted(hosts_all))
def test_no_src(self):
inventory = Inventory('127.0.0.1,')
self.assertEqual(inventory.src(), None)
def test_simple_norse(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts("norse")
expected_hosts=['thor', 'odin', 'loki']
assert sorted(hosts) == sorted(expected_hosts)
def test_simple_ungrouped(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts("ungrouped")
expected_hosts=['jupiter', 'saturn',
'thrudgelmir0', 'thrudgelmir1', 'thrudgelmir2',
'thrudgelmir3', 'thrudgelmir4', 'thrudgelmir5']
assert sorted(hosts) == sorted(expected_hosts)
def test_simple_combined(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts("norse:greek")
expected_hosts=['zeus', 'hera', 'poseidon',
'cerberus001','cerberus002','cerberus003',
'cottus99','cottus100',
'thor', 'odin', 'loki']
assert sorted(hosts) == sorted(expected_hosts)
def test_simple_restrict(self):
inventory = self.simple_inventory()
restricted_hosts = ['hera', 'poseidon', 'thor']
expected_hosts=['zeus', 'hera', 'poseidon',
'cerberus001','cerberus002','cerberus003',
'cottus99', 'cottus100',
'thor', 'odin', 'loki']
inventory.restrict_to(restricted_hosts)
hosts = inventory.list_hosts("norse:greek")
assert sorted(hosts) == sorted(restricted_hosts)
inventory.lift_restriction()
hosts = inventory.list_hosts("norse:greek")
assert sorted(hosts) == sorted(expected_hosts)
def test_simple_string_ipv4(self):
inventory = Inventory('127.0.0.1,192.168.1.1')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['127.0.0.1','192.168.1.1']))
def test_simple_string_ipv4_port(self):
inventory = Inventory('127.0.0.1:2222,192.168.1.1')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['127.0.0.1','192.168.1.1']))
def test_simple_string_ipv4_vars(self):
inventory = Inventory('127.0.0.1:2222,192.168.1.1')
var = inventory.get_variables('127.0.0.1')
self.assertEqual(var['ansible_ssh_port'], 2222)
def test_simple_string_ipv6(self):
inventory = Inventory('FE80:EF45::12:1,192.168.1.1')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['FE80:EF45::12:1','192.168.1.1']))
def test_simple_string_ipv6_port(self):
inventory = Inventory('[FE80:EF45::12:1]:2222,192.168.1.1')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['FE80:EF45::12:1','192.168.1.1']))
def test_simple_string_ipv6_vars(self):
inventory = Inventory('[FE80:EF45::12:1]:2222,192.168.1.1')
var = inventory.get_variables('FE80:EF45::12:1')
self.assertEqual(var['ansible_ssh_port'], 2222)
def test_simple_string_fqdn(self):
inventory = Inventory('foo.example.com,bar.example.com')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['foo.example.com','bar.example.com']))
def test_simple_string_fqdn_port(self):
inventory = Inventory('foo.example.com:2222,bar.example.com')
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted(['foo.example.com','bar.example.com']))
def test_simple_string_fqdn_vars(self):
inventory = Inventory('foo.example.com:2222,bar.example.com')
var = inventory.get_variables('foo.example.com')
self.assertEqual(var['ansible_ssh_port'], 2222)
def test_simple_vars(self):
inventory = self.simple_inventory()
vars = inventory.get_variables('thor')
assert vars == {'group_names': ['norse'],
'inventory_hostname': 'thor',
'inventory_hostname_short': 'thor'}
def test_simple_port(self):
inventory = self.simple_inventory()
vars = inventory.get_variables('hera')
expected = { 'ansible_ssh_port': 3000,
'group_names': ['greek'],
'inventory_hostname': 'hera',
'inventory_hostname_short': 'hera' }
assert vars == expected
def test_large_range(self):
inventory = self.large_range_inventory()
hosts = inventory.list_hosts()
self.assertEqual(sorted(hosts), sorted('bob%03i' %i for i in range(0, 143)))
def test_subset(self):
inventory = self.simple_inventory()
inventory.subset('odin;thor,loki')
self.assertEqual(sorted(inventory.list_hosts()), sorted(['thor','odin','loki']))
def test_subset_range(self):
inventory = self.simple_inventory()
inventory.subset('greek[0-2];norse[0]')
self.assertEqual(sorted(inventory.list_hosts()), sorted(['zeus','hera','thor']))
def test_subet_range_empty_group(self):
inventory = self.simple_inventory()
inventory.subset('missing[0]')
self.assertEqual(sorted(inventory.list_hosts()), sorted([]))
def test_subset_filename(self):
inventory = self.simple_inventory()
inventory.subset('@' + os.path.join(self.test_dir, 'restrict_pattern'))
self.assertEqual(sorted(inventory.list_hosts()), sorted(['thor','odin']))
def test_vars_yaml_extension(self):
inventory = self.simple_inventory()
vars = inventory.get_variables('goldorak')
assert vars['YAML_FILENAME_EXTENSIONS_TEST']
@raises(errors.AnsibleError)
def testinvalid_entry(self):
Inventory('1234')
###################################################
### INI file advanced tests
def test_complex_vars(self):
inventory = self.complex_inventory()
vars = inventory.get_variables('rtp_a')
print vars
expected = dict(
a=1, b=2, c=3, d=10002, e=10003, f='10004 != 10005',
g=' g ', h=' h ', i="' i \"", j='" j',
k=[ 'k1', 'k2' ],
rga=1, rgb=2, rgc=3,
inventory_hostname='rtp_a', inventory_hostname_short='rtp_a',
group_names=[ 'eastcoast', 'nc', 'redundantgroup', 'redundantgroup2', 'redundantgroup3', 'rtp', 'us' ]
)
print vars
print expected
assert vars == expected
def test_complex_group_names(self):
inventory = self.complex_inventory()
tests = {
'host1': [ 'role1', 'role3' ],
'host2': [ 'role1', 'role2' ],
'host3': [ 'role2', 'role3' ]
}
for host, roles in tests.iteritems():
group_names = inventory.get_variables(host)['group_names']
assert sorted(group_names) == sorted(roles)
def test_complex_exclude(self):
inventory = self.complex_inventory()
hosts = inventory.list_hosts("nc:florida:!triangle:!orlando")
expected_hosts = ['miami', 'rtp_a', 'rtp_b', 'rtp_c']
print "HOSTS=%s" % sorted(hosts)
print "EXPECTED=%s" % sorted(expected_hosts)
assert sorted(hosts) == sorted(expected_hosts)
def test_regex_exclude(self):
inventory = self.complex_inventory()
hosts = inventory.list_hosts("~rtp_[ac]")
expected_hosts = ['rtp_a', 'rtp_c']
print "HOSTS=%s" % sorted(hosts)
print "EXPECTED=%s" % sorted(expected_hosts)
assert sorted(hosts) == sorted(expected_hosts)
def test_regex_grouping(self):
inventory = self.simple_inventory()
hosts = inventory.list_hosts("~(cer[a-z]|berc)(erus00[13])")
expected_hosts = ['cerberus001', 'cerberus003']
print "HOSTS=%s" % sorted(hosts)
print "EXPECTED=%s" % sorted(expected_hosts)
assert sorted(hosts) == sorted(expected_hosts)
def test_complex_enumeration(self):
expected1 = ['rtp_b']
expected2 = ['rtp_a', 'rtp_b']
expected3 = ['rtp_a', 'rtp_b', 'rtp_c', 'tri_a', 'tri_b', 'tri_c']
expected4 = ['rtp_b', 'orlando' ]
expected5 = ['blade-a-1']
inventory = self.complex_inventory()
hosts = inventory.list_hosts("nc[1]")
self.compare(hosts, expected1, sort=False)
hosts = inventory.list_hosts("nc[0-2]")
self.compare(hosts, expected2, sort=False)
hosts = inventory.list_hosts("nc[0-99999]")
self.compare(hosts, expected3, sort=False)
hosts = inventory.list_hosts("nc[1-2]:florida[0-1]")
self.compare(hosts, expected4, sort=False)
hosts = inventory.list_hosts("blade-a-1")
self.compare(hosts, expected5, sort=False)
def test_complex_intersect(self):
inventory = self.complex_inventory()
hosts = inventory.list_hosts("nc:&redundantgroup:!rtp_c")
self.compare(hosts, ['rtp_a'])
hosts = inventory.list_hosts("nc:&triangle:!tri_c")
self.compare(hosts, ['tri_a', 'tri_b'])
@raises(errors.AnsibleError)
def test_invalid_range(self):
Inventory(os.path.join(self.test_dir, 'inventory','test_incorrect_range'))
@raises(errors.AnsibleError)
def test_missing_end(self):
Inventory(os.path.join(self.test_dir, 'inventory','test_missing_end'))
@raises(errors.AnsibleError)
def test_incorrect_format(self):
Inventory(os.path.join(self.test_dir, 'inventory','test_incorrect_format'))
@raises(errors.AnsibleError)
def test_alpha_end_before_beg(self):
Inventory(os.path.join(self.test_dir, 'inventory','test_alpha_end_before_beg'))
def test_combined_range(self):
i = Inventory(os.path.join(self.test_dir, 'inventory','test_combined_range'))
hosts = i.list_hosts('test')
expected_hosts=['host1A','host2A','host1B','host2B']
assert sorted(hosts) == sorted(expected_hosts)
def test_leading_range(self):
i = Inventory(os.path.join(self.test_dir, 'inventory','test_leading_range'))
hosts = i.list_hosts('test')
expected_hosts=['1.host','2.host','A.host','B.host']
assert sorted(hosts) == sorted(expected_hosts)
hosts2 = i.list_hosts('test2')
expected_hosts2=['1.host','2.host','3.host']
assert sorted(hosts2) == sorted(expected_hosts2)
###################################################
### Inventory API tests
def test_script(self):
inventory = self.script_inventory()
hosts = inventory.list_hosts()
expected_hosts=['jupiter', 'saturn', 'zeus', 'hera', 'poseidon', 'thor', 'odin', 'loki']
print "Expected: %s"%(expected_hosts)
print "Got : %s"%(hosts)
assert sorted(hosts) == sorted(expected_hosts)
def test_script_all(self):
inventory = self.script_inventory()
hosts = inventory.list_hosts('all')
expected_hosts=['jupiter', 'saturn', 'zeus', 'hera', 'poseidon', 'thor', 'odin', 'loki']
assert sorted(hosts) == sorted(expected_hosts)
def test_script_norse(self):
inventory = self.script_inventory()
hosts = inventory.list_hosts("norse")
expected_hosts=['thor', 'odin', 'loki']
assert sorted(hosts) == sorted(expected_hosts)
def test_script_combined(self):
inventory = self.script_inventory()
hosts = inventory.list_hosts("norse:greek")
expected_hosts=['zeus', 'hera', 'poseidon', 'thor', 'odin', 'loki']
assert sorted(hosts) == sorted(expected_hosts)
def test_script_restrict(self):
inventory = self.script_inventory()
restricted_hosts = ['hera', 'poseidon', 'thor']
expected_hosts=['zeus', 'hera', 'poseidon', 'thor', 'odin', 'loki']
inventory.restrict_to(restricted_hosts)
hosts = inventory.list_hosts("norse:greek")
assert sorted(hosts) == sorted(restricted_hosts)
inventory.lift_restriction()
hosts = inventory.list_hosts("norse:greek")
assert sorted(hosts) == sorted(expected_hosts)
def test_script_vars(self):
inventory = self.script_inventory()
vars = inventory.get_variables('thor')
print "VARS=%s" % vars
assert vars == {'hammer':True,
'group_names': ['norse'],
'inventory_hostname': 'thor',
'inventory_hostname_short': 'thor'}
def test_hosts_list(self):
# Test the case when playbook 'hosts' var is a list.
inventory = self.script_inventory()
host_names = sorted(['thor', 'loki', 'odin']) # Not sure if sorting is in the contract or not
actual_hosts = inventory.get_hosts(host_names)
actual_host_names = [host.name for host in actual_hosts]
assert host_names == actual_host_names
def test_script_multiple_groups(self):
inventory = self.script_inventory()
vars = inventory.get_variables('zeus')
print "VARS=%s" % vars
assert vars == {'inventory_hostname': 'zeus',
'inventory_hostname_short': 'zeus',
'group_names': ['greek', 'major-god']}
def test_allows_equals_sign_in_var(self):
inventory = self.simple_inventory()
auth = inventory.get_variables('neptun')['auth']
assert auth == 'YWRtaW46YWRtaW4='
def test_dir_inventory(self):
inventory = self.dir_inventory()
host_vars = inventory.get_variables('zeus')
expected_vars = {'inventory_hostname': 'zeus',
'inventory_hostname_short': 'zeus',
'group_names': ['greek', 'major-god'],
'var_a': '3#4'}
print "HOST VARS=%s" % host_vars
print "EXPECTED VARS=%s" % expected_vars
assert host_vars == expected_vars
def test_dir_inventory_multiple_groups(self):
inventory = self.dir_inventory()
group_greek = inventory.get_hosts('greek')
actual_host_names = [host.name for host in group_greek]
print "greek : %s " % actual_host_names
assert actual_host_names == ['zeus', 'morpheus']
def test_dir_inventory_skip_extension(self):
inventory = self.dir_inventory()
assert 'skipme' not in [h.name for h in inventory.get_hosts()]
def test_dir_inventory_group_hosts(self):
inventory = self.dir_inventory()
expected_groups = {'all': ['morpheus', 'thor', 'zeus'],
'major-god': ['thor', 'zeus'],
'minor-god': ['morpheus'],
'norse': ['thor'],
'greek': ['morpheus', 'zeus'],
'ungrouped': []}
actual_groups = {}
for group in inventory.get_groups():
actual_groups[group.name] = sorted([h.name for h in group.get_hosts()])
print "INVENTORY groups[%s].hosts=%s" % (group.name, actual_groups[group.name])
print "EXPECTED groups[%s].hosts=%s" % (group.name, expected_groups[group.name])
assert actual_groups == expected_groups
def test_dir_inventory_groups_for_host(self):
inventory = self.dir_inventory()
expected_groups_for_host = {'morpheus': ['all', 'greek', 'minor-god'],
'thor': ['all', 'major-god', 'norse'],
'zeus': ['all', 'greek', 'major-god']}
actual_groups_for_host = {}
for (host, expected) in expected_groups_for_host.iteritems():
groups = inventory.groups_for_host(host)
names = sorted([g.name for g in groups])
actual_groups_for_host[host] = names
print "INVENTORY groups_for_host(%s)=%s" % (host, names)
print "EXPECTED groups_for_host(%s)=%s" % (host, expected)
assert actual_groups_for_host == expected_groups_for_host
def test_dir_inventory_groups_list(self):
inventory = self.dir_inventory()
inventory_groups = inventory.groups_list()
expected_groups = {'all': ['morpheus', 'thor', 'zeus'],
'major-god': ['thor', 'zeus'],
'minor-god': ['morpheus'],
'norse': ['thor'],
'greek': ['morpheus', 'zeus'],
'ungrouped': []}
for (name, expected_hosts) in expected_groups.iteritems():
inventory_groups[name] = sorted(inventory_groups.get(name, []))
print "INVENTORY groups_list['%s']=%s" % (name, inventory_groups[name])
print "EXPECTED groups_list['%s']=%s" % (name, expected_hosts)
assert inventory_groups == expected_groups

View File

@@ -1,334 +0,0 @@
import os
import tempfile
import unittest
from nose.tools import raises
from nose.tools import timed
from ansible import errors
from ansible.module_common import ModuleReplacer
from ansible.module_utils.basic import heuristic_log_sanitize
from ansible.utils import checksum as utils_checksum
TEST_MODULE_DATA = """
from ansible.module_utils.basic import *
def get_module():
return AnsibleModule(
argument_spec = dict(),
supports_check_mode = True,
no_log = True,
)
get_module()
"""
class TestModuleUtilsBasic(unittest.TestCase):
def cleanup_temp_file(self, fd, path):
try:
os.close(fd)
os.remove(path)
except:
pass
def cleanup_temp_dir(self, path):
try:
os.rmdir(path)
except:
pass
def setUp(self):
# create a temporary file for the test module
# we're about to generate
self.tmp_fd, self.tmp_path = tempfile.mkstemp()
os.write(self.tmp_fd, TEST_MODULE_DATA)
# template the module code and eval it
module_data, module_style, shebang = ModuleReplacer().modify_module(self.tmp_path, {}, "", {})
d = {}
exec(module_data, d, d)
self.module = d['get_module']()
# module_utils/basic.py screws with CWD, let's save it and reset
self.cwd = os.getcwd()
def tearDown(self):
self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
# Reset CWD back to what it was before basic.py changed it
os.chdir(self.cwd)
#################################################################################
# run_command() tests
# test run_command with a string command
def test_run_command_string(self):
(rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'")
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
(rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'", use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with an array of args (with both use_unsafe_shell=True|False)
def test_run_command_args(self):
(rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"])
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
(rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"], use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with leading environment variables
@raises(SystemExit)
def test_run_command_string_with_env_variables(self):
self.module.run_command('FOO=bar /bin/echo -n "foo bar"')
@raises(SystemExit)
def test_run_command_args_with_env_variables(self):
self.module.run_command(['FOO=bar', '/bin/echo', '-n', 'foo bar'])
def test_run_command_string_unsafe_with_env_variables(self):
(rc, out, err) = self.module.run_command('FOO=bar /bin/echo -n "foo bar"', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with a command pipe (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_pipe(self):
(rc, out, err) = self.module.run_command('echo "foo bar" | cat', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with a shell redirect in (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_redirect_in(self):
(rc, out, err) = self.module.run_command('cat << EOF\nfoo bar\nEOF', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with a shell redirect out (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_redirect_out(self):
tmp_fd, tmp_path = tempfile.mkstemp()
try:
(rc, out, err) = self.module.run_command('echo "foo bar" > %s' % tmp_path, use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
checksum = utils_checksum(tmp_path)
self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
except:
raise
finally:
self.cleanup_temp_file(tmp_fd, tmp_path)
# test run_command with a double shell redirect out (append) (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_double_redirect_out(self):
tmp_fd, tmp_path = tempfile.mkstemp()
try:
(rc, out, err) = self.module.run_command('echo "foo bar" >> %s' % tmp_path, use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
checksum = utils_checksum(tmp_path)
self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
except:
raise
finally:
self.cleanup_temp_file(tmp_fd, tmp_path)
# test run_command with data
def test_run_command_string_with_data(self):
(rc, out, err) = self.module.run_command('cat', data='foo bar')
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with binary data
def test_run_command_string_with_binary_data(self):
(rc, out, err) = self.module.run_command('cat', data='\x41\x42\x43\x44', binary_data=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'ABCD')
# test run_command with a cwd set
def test_run_command_string_with_cwd(self):
tmp_path = tempfile.mkdtemp()
try:
(rc, out, err) = self.module.run_command('pwd', cwd=tmp_path)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
self.assertEqual(out.strip(), os.path.realpath(tmp_path))
except:
raise
finally:
self.cleanup_temp_dir(tmp_path)
class TestModuleUtilsBasicHelpers(unittest.TestCase):
''' Test some implementation details of AnsibleModule
Some pieces of AnsibleModule are implementation details but they have
potential cornercases that we need to check. Go ahead and test at
this level that the functions are behaving even though their API may
change and we'd have to rewrite these tests so that we know that we
need to check for those problems in any rewrite.
In the future we might want to restructure higher level code to be
friendlier to unittests so that we can test at the level that the public
is interacting with the APIs.
'''
MANY_RECORDS = 7000
URL_SECRET = 'http://username:pas:word@foo.com/data'
SSH_SECRET = 'username:pas:word@foo.com/data'
def cleanup_temp_file(self, fd, path):
try:
os.close(fd)
os.remove(path)
except:
pass
def cleanup_temp_dir(self, path):
try:
os.rmdir(path)
except:
pass
def _gen_data(self, records, per_rec, top_level, secret_text):
hostvars = {'hostvars': {}}
for i in range(1, records, 1):
host_facts = {'host%s' % i:
{'pstack':
{'running': '875.1',
'symlinked': '880.0',
'tars': [],
'versions': ['885.0']},
}}
if per_rec:
host_facts['host%s' % i]['secret'] = secret_text
hostvars['hostvars'].update(host_facts)
if top_level:
hostvars['secret'] = secret_text
return hostvars
def setUp(self):
self.many_url = repr(self._gen_data(self.MANY_RECORDS, True, True,
self.URL_SECRET))
self.many_ssh = repr(self._gen_data(self.MANY_RECORDS, True, True,
self.SSH_SECRET))
self.one_url = repr(self._gen_data(self.MANY_RECORDS, False, True,
self.URL_SECRET))
self.one_ssh = repr(self._gen_data(self.MANY_RECORDS, False, True,
self.SSH_SECRET))
self.zero_secrets = repr(self._gen_data(self.MANY_RECORDS, False,
False, ''))
self.few_url = repr(self._gen_data(2, True, True, self.URL_SECRET))
self.few_ssh = repr(self._gen_data(2, True, True, self.SSH_SECRET))
# create a temporary file for the test module
# we're about to generate
self.tmp_fd, self.tmp_path = tempfile.mkstemp()
os.write(self.tmp_fd, TEST_MODULE_DATA)
# template the module code and eval it
module_data, module_style, shebang = ModuleReplacer().modify_module(self.tmp_path, {}, "", {})
d = {}
exec(module_data, d, d)
self.module = d['get_module']()
# module_utils/basic.py screws with CWD, let's save it and reset
self.cwd = os.getcwd()
def tearDown(self):
self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
# Reset CWD back to what it was before basic.py changed it
os.chdir(self.cwd)
#################################################################################
#
# Speed tests
#
# Previously, we used regexes which had some pathologically slow cases for
# parameters with large amounts of data with many ':' but no '@'. The
# present function gets slower when there are many replacements so we may
# want to explore regexes in the future (for the speed when substituting
# or flexibility). These speed tests will hopefully tell us if we're
# introducing code that has cases that are simply too slow.
#
# Some regex notes:
# * re.sub() is faster than re.match() + str.join().
# * We may be able to detect a large number of '@' symbols and then use
# a regex else use the present function.
@timed(5)
def test_log_sanitize_speed_many_url(self):
heuristic_log_sanitize(self.many_url)
@timed(5)
def test_log_sanitize_speed_many_ssh(self):
heuristic_log_sanitize(self.many_ssh)
@timed(5)
def test_log_sanitize_speed_one_url(self):
heuristic_log_sanitize(self.one_url)
@timed(5)
def test_log_sanitize_speed_one_ssh(self):
heuristic_log_sanitize(self.one_ssh)
@timed(5)
def test_log_sanitize_speed_zero_secrets(self):
heuristic_log_sanitize(self.zero_secrets)
#
# Test that the password obfuscation sanitizes somewhat cleanly.
#
def test_log_sanitize_correctness(self):
url_data = repr(self._gen_data(3, True, True, self.URL_SECRET))
ssh_data = repr(self._gen_data(3, True, True, self.SSH_SECRET))
url_output = heuristic_log_sanitize(url_data)
ssh_output = heuristic_log_sanitize(ssh_data)
# Basic functionality: Successfully hid the password
try:
self.assertNotIn('pas:word', url_output)
self.assertNotIn('pas:word', ssh_output)
# Slightly more advanced, we hid all of the password despite the ":"
self.assertNotIn('pas', url_output)
self.assertNotIn('pas', ssh_output)
except AttributeError:
# python2.6 or less's unittest
self.assertFalse('pas:word' in url_output, '%s is present in %s' % ('"pas:word"', url_output))
self.assertFalse('pas:word' in ssh_output, '%s is present in %s' % ('"pas:word"', ssh_output))
self.assertFalse('pas' in url_output, '%s is present in %s' % ('"pas"', url_output))
self.assertFalse('pas' in ssh_output, '%s is present in %s' % ('"pas"', ssh_output))
# In this implementation we replace the password with 8 "*" which is
# also the length of our password. The url fields should be able to
# accurately detect where the password ends so the length should be
# the same:
self.assertEqual(len(url_output), len(url_data))
# ssh checking is harder as the heuristic is overzealous in many
# cases. Since the input will have at least one ":" present before
# the password we can tell some things about the beginning and end of
# the data, though:
self.assertTrue(ssh_output.startswith("{'"))
self.assertTrue(ssh_output.endswith("}"))
try:
self.assertIn(":********@foo.com/data'", ssh_output)
except AttributeError:
# python2.6 or less's unittest
self.assertTrue(":********@foo.com/data'" in ssh_output, '%s is not present in %s' % (":********@foo.com/data'", ssh_output))
# The overzealous-ness here may lead to us changing the algorithm in
# the future. We could make it consume less of the data (with the
# possibility of leaving partial passwords exposed) and encourage
# people to use no_log instead of relying on this obfuscation.

View File

@@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS

View File

@@ -1,390 +0,0 @@
#!/usr/bin/env python
import os
import shutil
from tempfile import mkstemp
from tempfile import mkdtemp
from ansible.playbook.play import Play
import ansible
import unittest
from nose.plugins.skip import SkipTest
class FakeCallBacks(object):
def __init__(self):
pass
def on_vars_prompt(self):
pass
def on_import_for_host(self, host, filename):
pass
class FakeInventory(object):
def __init__(self):
self.hosts = {}
def basedir(self):
return "."
def src(self):
return "fakeinventory"
def get_variables(self, host, vault_password=None):
if host in self.hosts:
return self.hosts[host]
else:
return {}
class FakePlayBook(object):
def __init__(self):
self.extra_vars = {}
self.remote_user = None
self.remote_port = None
self.sudo = None
self.sudo_user = None
self.su = None
self.su_user = None
self.become = None
self.become_method = None
self.become_user = None
self.transport = None
self.only_tags = None
self.skip_tags = None
self.force_handlers = None
self.VARS_CACHE = {}
self.SETUP_CACHE = {}
self.inventory = FakeInventory()
self.callbacks = FakeCallBacks()
self.VARS_CACHE['localhost'] = {}
class TestMe(unittest.TestCase):
########################################
# BASIC FILE LOADING BEHAVIOR TESTS
########################################
def test_play_constructor(self):
# __init__(self, playbook, ds, basedir, vault_password=None)
playbook = FakePlayBook()
ds = { "hosts": "localhost"}
basedir = "."
play = Play(playbook, ds, basedir)
def test_vars_file(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a vars_file
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_file_nonlist_error(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a string for vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": temp_path}
basedir = "."
error_hit = False
try:
play = Play(playbook, ds, basedir)
except:
error_hit = True
os.remove(temp_path)
assert error_hit == True, "no error was thrown when vars_files was not a list"
def test_multiple_vars_files(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play with two vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' in play.vars_file_vars, "vars_file2 was not loaded into play.vars_file_vars"
assert play.vars_file_vars['baz'] == 'bang', "baz was not set to bang in play.vars_file_vars"
def test_vars_files_first_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# get a random file path
fd, temp_path2 = mkstemp()
# make sure this file doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path2, temp_path]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_files_multiple_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path, temp_path2]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' not in play.vars_file_vars, "vars_file2 was loaded after vars_file1 was loaded"
def test_vars_files_assert_all_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
# make sure it doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
error_hit = False
error_msg = None
try:
play = Play(playbook, ds, basedir)
except ansible.errors.AnsibleError, e:
error_hit = True
error_msg = e
os.remove(temp_path)
assert error_hit == True, "no error was thrown for missing vars_file"
########################################
# VARIABLE PRECEDENCE TESTS
########################################
# On the first run vars_files are loaded into play.vars_file_vars by host == None
# * only files with vars from host==None will work here
# On the secondary run(s), a host is given and the vars_files are loaded into VARS_CACHE
# * this only occurs if host is not None, filename2 has vars in the name, and filename3 does not
# filename -- the original string
# filename2 -- filename templated with play vars
# filename3 -- filename2 template with inject (hostvars + setup_cache + vars_cache)
# filename4 -- path_dwim(filename3)
def test_vars_files_for_host(self):
# host != None
# vars in filename2
# no vars in filename3
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": ["{{ temp_path }}"]}
basedir = "."
playbook.VARS_CACHE['localhost']['temp_path'] = temp_path
# create play and do first run
play = Play(playbook, ds, basedir)
# the second run is started by calling update_vars_files
play.update_vars_files(['localhost'])
os.remove(temp_path)
assert 'foo' in play.playbook.VARS_CACHE['localhost'], "vars_file vars were not loaded into vars_cache"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', "foo does not equal bar"
########################################
# COMPLEX FILENAME TEMPLATING TESTS
########################################
def test_vars_files_two_vars_in_name(self):
# self.vars_file_vars = ds['vars']
# self.vars_file_vars += _get_vars() ... aka extra_vars
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file),
"temp_file": os.path.basename(temp_file) },
"vars_files": ["{{ temp_dir + '/' + temp_file }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' in play.vars_file_vars, "double var templated vars_files filename not loaded"
def test_vars_files_two_vars_different_scope(self):
#
# Use a play var and an inventory var to create the filename
#
# self.playbook.inventory.get_variables(host)
# {'group_names': ['ungrouped'], 'inventory_hostname': 'localhost',
# 'ansible_ssh_user': 'root', 'inventory_hostname_short': 'localhost'}
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": ["{{ temp_dir + '/' + inventory_hostname }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
def test_vars_files_two_vars_different_scope_first_found(self):
#
# Use a play var and an inventory var to create the filename
#
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": [["{{ temp_dir + '/' + inventory_hostname }}"]]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"

View File

@@ -1,176 +0,0 @@
import unittest
import getpass
import os
import shutil
import time
import tempfile
from nose.plugins.skip import SkipTest
from ansible.runner.action_plugins.synchronize import ActionModule as Synchronize
class FakeRunner(object):
def __init__(self):
self.connection = None
self.transport = None
self.basedir = None
self.sudo = None
self.remote_user = None
self.private_key_file = None
self.check = False
self.become = False
self.become_method = 'sudo'
self.become_user = False
def _execute_module(self, conn, tmp, module_name, args,
async_jid=None, async_module=None, async_limit=None, inject=None,
persist_files=False, complex_args=None, delete_remote_tmp=True):
self.executed_conn = conn
self.executed_tmp = tmp
self.executed_module_name = module_name
self.executed_args = args
self.executed_async_jid = async_jid
self.executed_async_module = async_module
self.executed_async_limit = async_limit
self.executed_inject = inject
self.executed_persist_files = persist_files
self.executed_complex_args = complex_args
self.executed_delete_remote_tmp = delete_remote_tmp
def noop_on_check(self, inject):
return self.check
class FakeConn(object):
def __init__(self):
self.host = None
self.delegate = None
class TestSynchronize(unittest.TestCase):
def test_synchronize_action_basic(self):
""" verify the synchronize action plugin sets
the delegate to 127.0.0.1 and remote path to user@host:/path """
runner = FakeRunner()
runner.remote_user = "root"
runner.transport = "ssh"
conn = FakeConn()
inject = {
'inventory_hostname': "el6.lab.net",
'inventory_hostname_short': "el6",
'ansible_connection': None,
'ansible_ssh_user': 'root',
'delegate_to': None,
'playbook_dir': '.',
}
x = Synchronize(runner)
x.setup("synchronize", inject)
x.run(conn, "/tmp", "synchronize", "src=/tmp/foo dest=/tmp/bar", inject)
assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1"
assert runner.executed_complex_args == {"dest":"root@el6.lab.net:/tmp/bar", "src":"/tmp/foo"}, "wrong args used"
assert runner.sudo == None, "sudo was not reset to None"
def test_synchronize_action_sudo(self):
""" verify the synchronize action plugin unsets and then sets sudo """
runner = FakeRunner()
runner.become = True
runner.remote_user = "root"
runner.transport = "ssh"
conn = FakeConn()
inject = {
'inventory_hostname': "el6.lab.net",
'inventory_hostname_short': "el6",
'ansible_connection': None,
'ansible_ssh_user': 'root',
'delegate_to': None,
'playbook_dir': '.',
}
x = Synchronize(runner)
x.setup("synchronize", inject)
x.run(conn, "/tmp", "synchronize", "src=/tmp/foo dest=/tmp/bar", inject)
assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1"
assert runner.executed_complex_args == {'dest':'root@el6.lab.net:/tmp/bar',
'src':'/tmp/foo',
'rsync_path':'"sudo rsync"'}, "wrong args used"
assert runner.become == True, "sudo was not reset to True"
def test_synchronize_action_local(self):
""" verify the synchronize action plugin sets
the delegate to 127.0.0.1 and does not alter the dest """
runner = FakeRunner()
runner.remote_user = "jtanner"
runner.transport = "paramiko"
conn = FakeConn()
conn.host = "127.0.0.1"
conn.delegate = "thishost"
inject = {
'inventory_hostname': "thishost",
'ansible_ssh_host': '127.0.0.1',
'ansible_connection': 'local',
'delegate_to': None,
'playbook_dir': '.',
}
x = Synchronize(runner)
x.setup("synchronize", inject)
x.run(conn, "/tmp", "synchronize", "src=/tmp/foo dest=/tmp/bar", inject)
assert runner.transport == "paramiko", "runner transport was changed"
assert runner.remote_user == "jtanner", "runner remote_user was changed"
assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1"
assert "dest_port" not in runner.executed_complex_args, "dest_port should not have been set"
assert runner.executed_complex_args.get("src") == "/tmp/foo", "source was set incorrectly"
assert runner.executed_complex_args.get("dest") == "/tmp/bar", "dest was set incorrectly"
def test_synchronize_action_vagrant(self):
""" Verify the action plugin accommodates the common
scenarios for vagrant boxes. """
runner = FakeRunner()
runner.remote_user = "jtanner"
runner.transport = "ssh"
conn = FakeConn()
conn.host = "127.0.0.1"
conn.delegate = "thishost"
inject = {
'inventory_hostname': "thishost",
'ansible_ssh_user': 'vagrant',
'ansible_ssh_host': '127.0.0.1',
'ansible_ssh_port': '2222',
'delegate_to': None,
'playbook_dir': '.',
'hostvars': {
'thishost': {
'inventory_hostname': 'thishost',
'ansible_ssh_port': '2222',
'ansible_ssh_host': '127.0.0.1',
'ansible_ssh_user': 'vagrant'
}
}
}
x = Synchronize(runner)
x.setup("synchronize", inject)
x.run(conn, "/tmp", "synchronize", "src=/tmp/foo dest=/tmp/bar", inject)
assert runner.transport == "ssh", "runner transport was changed"
assert runner.remote_user == "jtanner", "runner remote_user was changed"
assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1"
assert runner.executed_inject['ansible_ssh_user'] == "vagrant", "runner user was changed"
assert runner.executed_complex_args.get("dest_port") == "2222", "remote port was not set to 2222"
assert runner.executed_complex_args.get("src") == "/tmp/foo", "source was set incorrectly"
assert runner.executed_complex_args.get("dest") == "vagrant@127.0.0.1:/tmp/bar", "dest was set incorrectly"

View File

@@ -1,945 +0,0 @@
# -*- coding: utf-8 -*-
import traceback
import unittest
import os
import os.path
import re
import tempfile
import yaml
import passlib.hash
import string
import StringIO
import copy
import tempfile
import shutil
from nose.plugins.skip import SkipTest
from mock import patch
import ansible.utils
import ansible.errors
import ansible.constants as C
import ansible.utils.template as template2
from ansible.module_utils.splitter import split_args
from ansible import __version__
import sys
reload(sys)
sys.setdefaultencoding("utf8")
class TestUtils(unittest.TestCase):
def _is_fips(self):
try:
data = open('/proc/sys/crypto/fips_enabled').read().strip()
except:
return False
if data != '1':
return False
return True
def test_before_comment(self):
''' see if we can detect the part of a string before a comment. Used by INI parser in inventory '''
input = "before # comment"
expected = "before "
actual = ansible.utils.before_comment(input)
self.assertEqual(expected, actual)
input = "before \# not a comment"
expected = "before # not a comment"
actual = ansible.utils.before_comment(input)
self.assertEqual(expected, actual)
input = ""
expected = ""
actual = ansible.utils.before_comment(input)
self.assertEqual(expected, actual)
input = "#"
expected = ""
actual = ansible.utils.before_comment(input)
self.assertEqual(expected, actual)
#####################################
### check_conditional tests
def test_check_conditional_jinja2_literals(self):
# see http://jinja.pocoo.org/docs/templates/#literals
# none
self.assertEqual(ansible.utils.check_conditional(
None, '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'', '/', {}), True)
# list
self.assertEqual(ansible.utils.check_conditional(
['true'], '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
['false'], '/', {}), False)
# non basestring or list
self.assertEqual(ansible.utils.check_conditional(
{}, '/', {}), {})
# boolean
self.assertEqual(ansible.utils.check_conditional(
'true', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'false', '/', {}), False)
self.assertEqual(ansible.utils.check_conditional(
'True', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'False', '/', {}), False)
# integer
self.assertEqual(ansible.utils.check_conditional(
'1', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'0', '/', {}), False)
# string, beware, a string is truthy unless empty
self.assertEqual(ansible.utils.check_conditional(
'"yes"', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'"no"', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'""', '/', {}), False)
def test_check_conditional_jinja2_variable_literals(self):
# see http://jinja.pocoo.org/docs/templates/#literals
# boolean
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'True'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'true'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'False'}), False)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'false'}), False)
# integer
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '1'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 1}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '0'}), False)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 0}), False)
# string, beware, a string is truthy unless empty
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '"yes"'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '"no"'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '""'}), False)
# Python boolean in Jinja2 expression
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': True}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': False}), False)
def test_check_conditional_jinja2_expression(self):
self.assertEqual(ansible.utils.check_conditional(
'1 == 1', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
'bar == 42', '/', {'bar': 42}), True)
self.assertEqual(ansible.utils.check_conditional(
'bar != 42', '/', {'bar': 42}), False)
def test_check_conditional_jinja2_expression_in_variable(self):
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': '1 == 1'}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'bar == 42', 'bar': 42}), True)
self.assertEqual(ansible.utils.check_conditional(
'var', '/', {'var': 'bar != 42', 'bar': 42}), False)
def test_check_conditional_jinja2_unicode(self):
self.assertEqual(ansible.utils.check_conditional(
u'"\u00df"', '/', {}), True)
self.assertEqual(ansible.utils.check_conditional(
u'var == "\u00df"', '/', {'var': u'\u00df'}), True)
#####################################
### key-value parsing
def test_parse_kv_basic(self):
self.assertEqual(ansible.utils.parse_kv('a=simple b="with space" c="this=that"'),
{'a': 'simple', 'b': 'with space', 'c': 'this=that'})
self.assertEqual(ansible.utils.parse_kv('msg=АБВГД'),
{'msg': 'АБВГД'})
def test_jsonify(self):
self.assertEqual(ansible.utils.jsonify(None), '{}')
self.assertEqual(ansible.utils.jsonify(dict(foo='bar', baz=['qux'])), '{"baz": ["qux"], "foo": "bar"}')
expected = u'{"baz":["qux"],"foo":"bar"}'
self.assertEqual("".join(ansible.utils.jsonify(dict(foo='bar', baz=['qux']), format=True).split()), expected)
def test_is_failed(self):
self.assertEqual(ansible.utils.is_failed(dict(rc=0)), False)
self.assertEqual(ansible.utils.is_failed(dict(rc=1)), True)
self.assertEqual(ansible.utils.is_failed(dict()), False)
self.assertEqual(ansible.utils.is_failed(dict(failed=False)), False)
self.assertEqual(ansible.utils.is_failed(dict(failed=True)), True)
self.assertEqual(ansible.utils.is_failed(dict(failed='True')), True)
self.assertEqual(ansible.utils.is_failed(dict(failed='true')), True)
def test_is_changed(self):
self.assertEqual(ansible.utils.is_changed(dict()), False)
self.assertEqual(ansible.utils.is_changed(dict(changed=False)), False)
self.assertEqual(ansible.utils.is_changed(dict(changed=True)), True)
self.assertEqual(ansible.utils.is_changed(dict(changed='True')), True)
self.assertEqual(ansible.utils.is_changed(dict(changed='true')), True)
def test_path_dwim(self):
self.assertEqual(ansible.utils.path_dwim(None, __file__),
__file__)
self.assertEqual(ansible.utils.path_dwim(None, '~'),
os.path.expanduser('~'))
self.assertEqual(ansible.utils.path_dwim(None, 'TestUtils.py'),
__file__.rstrip('c'))
def test_path_dwim_relative(self):
self.assertEqual(ansible.utils.path_dwim_relative(__file__, 'units', 'TestUtils.py',
os.path.dirname(os.path.dirname(__file__))),
__file__.rstrip('c'))
def test_json_loads(self):
self.assertEqual(ansible.utils.json_loads('{"foo": "bar"}'), dict(foo='bar'))
def test_parse_json(self):
# leading junk
self.assertEqual(ansible.utils.parse_json('ansible\n{"foo": "bar"}'), dict(foo="bar"))
# No closing quotation
try:
rc = ansible.utils.parse_json('foo=bar "')
print rc
except ValueError:
pass
else:
traceback.print_exc()
raise AssertionError('Incorrect exception, expected ValueError')
# Failed to parse
try:
ansible.utils.parse_json('{')
except ValueError:
pass
else:
raise AssertionError('Incorrect exception, expected ValueError')
def test_parse_yaml(self):
#json
self.assertEqual(ansible.utils.parse_yaml('{"foo": "bar"}'), dict(foo='bar'))
# broken json
try:
ansible.utils.parse_yaml('{')
except ansible.errors.AnsibleError:
pass
else:
raise AssertionError
# broken json with path_hint
try:
ansible.utils.parse_yaml('{', path_hint='foo')
except ansible.errors.AnsibleError:
pass
else:
raise AssertionError
# yaml with front-matter
self.assertEqual(ansible.utils.parse_yaml("---\nfoo: bar"), dict(foo='bar'))
# yaml no front-matter
self.assertEqual(ansible.utils.parse_yaml('foo: bar'), dict(foo='bar'))
# yaml indented first line (See #6348)
self.assertEqual(ansible.utils.parse_yaml(' - foo: bar\n baz: qux'), [dict(foo='bar', baz='qux')])
def test_process_common_errors(self):
# no quote
self.assertTrue('YAML thought it' in ansible.utils.process_common_errors('', 'foo: {{bar}}', 6))
# extra colon
self.assertTrue('an extra unquoted colon' in ansible.utils.process_common_errors('', 'foo: bar:', 8))
# match
self.assertTrue('same kind of quote' in ansible.utils.process_common_errors('', 'foo: "{{bar}}"baz', 6))
self.assertTrue('same kind of quote' in ansible.utils.process_common_errors('', "foo: '{{bar}}'baz", 6))
# unbalanced
self.assertTrue('We could be wrong' in ansible.utils.process_common_errors('', 'foo: "bad" "wolf"', 6))
self.assertTrue('We could be wrong' in ansible.utils.process_common_errors('', "foo: 'bad' 'wolf'", 6))
def test_process_yaml_error(self):
data = 'foo: bar\n baz: qux'
try:
ansible.utils.parse_yaml(data)
except yaml.YAMLError, exc:
try:
ansible.utils.process_yaml_error(exc, data, __file__)
except ansible.errors.AnsibleYAMLValidationFailed, e:
self.assertTrue('Syntax Error while loading' in str(e))
else:
raise AssertionError('Incorrect exception, expected AnsibleYAMLValidationFailed')
data = 'foo: bar\n baz: {{qux}}'
try:
ansible.utils.parse_yaml(data)
except yaml.YAMLError, exc:
try:
ansible.utils.process_yaml_error(exc, data, __file__)
except ansible.errors.AnsibleYAMLValidationFailed, e:
self.assertTrue('Syntax Error while loading' in str(e))
else:
raise AssertionError('Incorrect exception, expected AnsibleYAMLValidationFailed')
data = '\xFF'
try:
ansible.utils.parse_yaml(data)
except yaml.YAMLError, exc:
try:
ansible.utils.process_yaml_error(exc, data, __file__)
except ansible.errors.AnsibleYAMLValidationFailed, e:
self.assertTrue('Check over' in str(e))
else:
raise AssertionError('Incorrect exception, expected AnsibleYAMLValidationFailed')
data = '\xFF'
try:
ansible.utils.parse_yaml(data)
except yaml.YAMLError, exc:
try:
ansible.utils.process_yaml_error(exc, data, None)
except ansible.errors.AnsibleYAMLValidationFailed, e:
self.assertTrue('Could not parse YAML.' in str(e))
else:
raise AssertionError('Incorrect exception, expected AnsibleYAMLValidationFailed')
def test_parse_yaml_from_file(self):
test = os.path.join(os.path.dirname(__file__), 'inventory_test_data',
'common_vars.yml')
encrypted = os.path.join(os.path.dirname(__file__), 'inventory_test_data',
'encrypted.yml')
broken = os.path.join(os.path.dirname(__file__), 'inventory_test_data',
'broken.yml')
try:
ansible.utils.parse_yaml_from_file(os.path.dirname(__file__))
except ansible.errors.AnsibleError:
pass
else:
raise AssertionError('Incorrect exception, expected AnsibleError')
self.assertEqual(ansible.utils.parse_yaml_from_file(test), yaml.safe_load(open(test)))
self.assertEqual(ansible.utils.parse_yaml_from_file(encrypted, 'ansible'), dict(foo='bar'))
try:
ansible.utils.parse_yaml_from_file(broken)
except ansible.errors.AnsibleYAMLValidationFailed, e:
self.assertTrue('Syntax Error while loading' in str(e))
else:
raise AssertionError('Incorrect exception, expected AnsibleYAMLValidationFailed')
def test_merge_hash(self):
self.assertEqual(ansible.utils.merge_hash(dict(foo='bar', baz='qux'), dict(foo='baz')),
dict(foo='baz', baz='qux'))
self.assertEqual(ansible.utils.merge_hash(dict(foo=dict(bar='baz')), dict(foo=dict(bar='qux'))),
dict(foo=dict(bar='qux')))
def test_md5s(self):
if self._is_fips():
raise SkipTest('MD5 unavailable on FIPs enabled systems')
self.assertEqual(ansible.utils.md5s('ansible'), '640c8a5376aa12fa15cf02130ce239a6')
# Need a test that causes UnicodeEncodeError See 4221
def test_md5(self):
if self._is_fips():
raise SkipTest('MD5 unavailable on FIPs enabled systems')
self.assertEqual(ansible.utils.md5(os.path.join(os.path.dirname(__file__), 'ansible.cfg')),
'fb7b5b90ea63f04bde33e804b6fad42c')
self.assertEqual(ansible.utils.md5(os.path.join(os.path.dirname(__file__), 'ansible.cf')),
None)
def test_checksum_s(self):
self.assertEqual(ansible.utils.checksum_s('ansible'), 'bef45157a43c9e5f469d188810814a4a8ab9f2ed')
# Need a test that causes UnicodeEncodeError See 4221
def test_checksum(self):
self.assertEqual(ansible.utils.checksum(os.path.join(os.path.dirname(__file__), 'ansible.cfg')),
'658b67c8ac7595adde7048425ff1f9aba270721a')
self.assertEqual(ansible.utils.checksum(os.path.join(os.path.dirname(__file__), 'ansible.cf')),
None)
def test_default(self):
self.assertEqual(ansible.utils.default(None, lambda: {}), {})
self.assertEqual(ansible.utils.default(dict(foo='bar'), lambda: {}), dict(foo='bar'))
def test__gitinfo(self):
# this fails if not run from git clone
# self.assertEqual('last updated' in ansible.utils._gitinfo())
# missing test for git submodule
# missing test outside of git clone
pass
def test_version(self):
version = ansible.utils.version('ansible')
self.assertTrue(version.startswith('ansible %s' % __version__))
# this fails if not run from git clone
# self.assertEqual('last updated' in version)
def test_getch(self):
# figure out how to test this
pass
def test_sanitize_output(self):
self.assertEqual(ansible.utils.sanitize_output('password=foo'), 'password=VALUE_HIDDEN')
self.assertEqual(ansible.utils.sanitize_output('foo=user:pass@foo/whatever'),
'foo=user:********@foo/whatever')
self.assertEqual(ansible.utils.sanitize_output('foo=http://username:pass@wherever/foo'),
'foo=http://username:********@wherever/foo')
self.assertEqual(ansible.utils.sanitize_output('foo=http://wherever/foo'),
'foo=http://wherever/foo')
def test_increment_debug(self):
ansible.utils.VERBOSITY = 0
ansible.utils.increment_debug(None, None, None, None)
self.assertEqual(ansible.utils.VERBOSITY, 1)
def test_base_parser(self):
output = ansible.utils.base_parser(output_opts=True)
self.assertTrue(output.has_option('--one-line') and output.has_option('--tree'))
runas = ansible.utils.base_parser(runas_opts=True)
for opt in ['--sudo', '--sudo-user', '--user', '--su', '--su-user']:
self.assertTrue(runas.has_option(opt))
async = ansible.utils.base_parser(async_opts=True)
self.assertTrue(async.has_option('--poll') and async.has_option('--background'))
connect = ansible.utils.base_parser(connect_opts=True)
self.assertTrue(connect.has_option('--connection'))
subset = ansible.utils.base_parser(subset_opts=True)
self.assertTrue(subset.has_option('--limit'))
check = ansible.utils.base_parser(check_opts=True)
self.assertTrue(check.has_option('--check'))
diff = ansible.utils.base_parser(diff_opts=True)
self.assertTrue(diff.has_option('--diff'))
def test_do_encrypt(self):
salt_chars = string.ascii_letters + string.digits + './'
salt = ansible.utils.random_password(length=8, chars=salt_chars)
hash = ansible.utils.do_encrypt('ansible', 'sha256_crypt', salt=salt)
self.assertTrue(passlib.hash.sha256_crypt.verify('ansible', hash))
hash = ansible.utils.do_encrypt('ansible', 'sha256_crypt')
self.assertTrue(passlib.hash.sha256_crypt.verify('ansible', hash))
try:
ansible.utils.do_encrypt('ansible', 'ansible')
except ansible.errors.AnsibleError:
pass
else:
raise AssertionError('Incorrect exception, expected AnsibleError')
def test_do_encrypt_md5(self):
if self._is_fips():
raise SkipTest('MD5 unavailable on FIPS systems')
hash = ansible.utils.do_encrypt('ansible', 'md5_crypt', salt_size=4)
self.assertTrue(passlib.hash.md5_crypt.verify('ansible', hash))
def test_last_non_blank_line(self):
self.assertEqual(ansible.utils.last_non_blank_line('a\n\nb\n\nc'), 'c')
self.assertEqual(ansible.utils.last_non_blank_line(''), '')
def test_filter_leading_non_json_lines(self):
self.assertEqual(ansible.utils.filter_leading_non_json_lines('a\nb\nansible!\n{"foo": "bar"}'),
'{"foo": "bar"}\n')
self.assertEqual(ansible.utils.filter_leading_non_json_lines('a\nb\nansible!\n["foo", "bar"]'),
'["foo", "bar"]\n')
def test_boolean(self):
self.assertEqual(ansible.utils.boolean("true"), True)
self.assertEqual(ansible.utils.boolean("True"), True)
self.assertEqual(ansible.utils.boolean("TRUE"), True)
self.assertEqual(ansible.utils.boolean("t"), True)
self.assertEqual(ansible.utils.boolean("T"), True)
self.assertEqual(ansible.utils.boolean("Y"), True)
self.assertEqual(ansible.utils.boolean("y"), True)
self.assertEqual(ansible.utils.boolean("1"), True)
self.assertEqual(ansible.utils.boolean(1), True)
self.assertEqual(ansible.utils.boolean("false"), False)
self.assertEqual(ansible.utils.boolean("False"), False)
self.assertEqual(ansible.utils.boolean("0"), False)
self.assertEqual(ansible.utils.boolean(0), False)
self.assertEqual(ansible.utils.boolean("foo"), False)
def test_make_sudo_cmd(self):
cmd = ansible.utils.make_sudo_cmd(C.DEFAULT_SUDO_EXE, 'root', '/bin/sh', '/bin/ls')
self.assertTrue(isinstance(cmd, tuple))
self.assertEqual(len(cmd), 3)
self.assertTrue('-u root' in cmd[0])
self.assertTrue('-p "[sudo via ansible, key=' in cmd[0] and cmd[1].startswith('[sudo via ansible, key'))
self.assertTrue('echo BECOME-SUCCESS-' in cmd[0] and cmd[2].startswith('BECOME-SUCCESS-'))
self.assertTrue('sudo -k' in cmd[0])
def test_make_su_cmd(self):
cmd = ansible.utils.make_su_cmd('root', '/bin/sh', '/bin/ls')
self.assertTrue(isinstance(cmd, tuple))
self.assertEqual(len(cmd), 3)
self.assertTrue('root -c "/bin/sh' in cmd[0] or ' root -c /bin/sh' in cmd[0])
self.assertTrue('echo BECOME-SUCCESS-' in cmd[0] and cmd[2].startswith('BECOME-SUCCESS-'))
def test_to_unicode(self):
uni = ansible.utils.unicode.to_unicode(u'ansible')
self.assertTrue(isinstance(uni, unicode))
self.assertEqual(uni, u'ansible')
none = ansible.utils.unicode.to_unicode(None, nonstring='passthru')
self.assertTrue(isinstance(none, type(None)))
self.assertTrue(none is None)
utf8 = ansible.utils.unicode.to_unicode('ansible')
self.assertTrue(isinstance(utf8, unicode))
self.assertEqual(utf8, u'ansible')
def test_is_list_of_strings(self):
self.assertEqual(ansible.utils.is_list_of_strings(['foo', 'bar', u'baz']), True)
self.assertEqual(ansible.utils.is_list_of_strings(['foo', 'bar', True]), False)
self.assertEqual(ansible.utils.is_list_of_strings(['one', 2, 'three']), False)
def test_contains_vars(self):
self.assertTrue(ansible.utils.contains_vars('{{foo}}'))
self.assertTrue(ansible.utils.contains_vars('$foo'))
self.assertFalse(ansible.utils.contains_vars('foo'))
def test_safe_eval(self):
# Not basestring
self.assertEqual(ansible.utils.safe_eval(len), len)
self.assertEqual(ansible.utils.safe_eval(1), 1)
self.assertEqual(ansible.utils.safe_eval(len, include_exceptions=True), (len, None))
self.assertEqual(ansible.utils.safe_eval(1, include_exceptions=True), (1, None))
# module
self.assertEqual(ansible.utils.safe_eval('foo.bar('), 'foo.bar(')
self.assertEqual(ansible.utils.safe_eval('foo.bar(', include_exceptions=True), ('foo.bar(', None))
# import
self.assertEqual(ansible.utils.safe_eval('import foo'), 'import foo')
self.assertEqual(ansible.utils.safe_eval('import foo', include_exceptions=True), ('import foo', None))
# valid simple eval
self.assertEqual(ansible.utils.safe_eval('True'), True)
self.assertEqual(ansible.utils.safe_eval('True', include_exceptions=True), (True, None))
# valid eval with lookup
self.assertEqual(ansible.utils.safe_eval('foo + bar', dict(foo=1, bar=2)), 3)
self.assertEqual(ansible.utils.safe_eval('foo + bar', dict(foo=1, bar=2), include_exceptions=True), (3, None))
# invalid eval
self.assertEqual(ansible.utils.safe_eval('foo'), 'foo')
nameerror = ansible.utils.safe_eval('foo', include_exceptions=True)
self.assertTrue(isinstance(nameerror, tuple))
self.assertEqual(nameerror[0], 'foo')
self.assertTrue(isinstance(nameerror[1], NameError))
def test_listify_lookup_plugin_terms(self):
basedir = os.path.dirname(__file__)
# Straight lookups
#self.assertEqual(ansible.utils.listify_lookup_plugin_terms('things', basedir, dict(things=[])), [])
#self.assertEqual(ansible.utils.listify_lookup_plugin_terms('things', basedir, dict(things=['one', 'two'])), ['one', 'two'])
def test_deprecated(self):
sys_stderr = sys.stderr
sys.stderr = StringIO.StringIO()
ansible.utils.deprecated('Ack!', '0.0')
out = sys.stderr.getvalue()
self.assertTrue('0.0' in out)
self.assertTrue('[DEPRECATION WARNING]' in out)
sys.stderr = StringIO.StringIO()
ansible.utils.deprecated('Ack!', None)
out = sys.stderr.getvalue()
self.assertTrue('0.0' not in out)
self.assertTrue('[DEPRECATION WARNING]' in out)
sys.stderr = StringIO.StringIO()
warnings = C.DEPRECATION_WARNINGS
C.DEPRECATION_WARNINGS = False
ansible.utils.deprecated('Ack!', None)
out = sys.stderr.getvalue()
self.assertTrue(not out)
C.DEPRECATION_WARNINGS = warnings
sys.stderr = sys_stderr
try:
ansible.utils.deprecated('Ack!', '0.0', True)
except ansible.errors.AnsibleError, e:
self.assertTrue('0.0' not in str(e))
self.assertTrue('[DEPRECATED]' in str(e))
else:
raise AssertionError("Incorrect exception, expected AnsibleError")
def test_warning(self):
sys_stderr = sys.stderr
sys.stderr = StringIO.StringIO()
ansible.utils.warning('ANSIBLE')
out = sys.stderr.getvalue()
sys.stderr = sys_stderr
self.assertTrue('[WARNING]: ANSIBLE' in out)
def test_combine_vars(self):
one = {'foo': {'bar': True}, 'baz': {'one': 'qux'}}
two = {'baz': {'two': 'qux'}}
replace = {'baz': {'two': 'qux'}, 'foo': {'bar': True}}
merge = {'baz': {'two': 'qux', 'one': 'qux'}, 'foo': {'bar': True}}
C.DEFAULT_HASH_BEHAVIOUR = 'replace'
self.assertEqual(ansible.utils.combine_vars(one, two), replace)
C.DEFAULT_HASH_BEHAVIOUR = 'merge'
self.assertEqual(ansible.utils.combine_vars(one, two), merge)
def test_err(self):
sys_stderr = sys.stderr
sys.stderr = StringIO.StringIO()
ansible.utils.err('ANSIBLE')
out = sys.stderr.getvalue()
sys.stderr = sys_stderr
self.assertEqual(out, 'ANSIBLE\n')
def test_exit(self):
sys_stderr = sys.stderr
sys.stderr = StringIO.StringIO()
try:
ansible.utils.exit('ansible')
except SystemExit, e:
self.assertEqual(e.code, 1)
self.assertEqual(sys.stderr.getvalue(), 'ansible\n')
else:
raise AssertionError('Incorrect exception, expected SystemExit')
finally:
sys.stderr = sys_stderr
def test_unfrackpath(self):
os.environ['TEST_ROOT'] = os.path.dirname(os.path.dirname(__file__))
self.assertEqual(ansible.utils.unfrackpath('$TEST_ROOT/units/../units/TestUtils.py'), __file__.rstrip('c'))
def test_is_executable(self):
self.assertEqual(ansible.utils.is_executable(__file__), 0)
bin_ansible = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
'bin', 'ansible')
self.assertNotEqual(ansible.utils.is_executable(bin_ansible), 0)
def test_get_diff(self):
standard = dict(
before_header='foo',
after_header='bar',
before='fooo',
after='foo'
)
standard_expected = """--- before: foo
+++ after: bar
@@ -1 +1 @@
-fooo+foo"""
# workaround py26 and py27 difflib differences
standard_expected = """-fooo+foo"""
diff = ansible.utils.get_diff(standard)
diff = diff.split('\n')
del diff[0]
del diff[0]
del diff[0]
diff = '\n'.join(diff)
self.assertEqual(diff, unicode(standard_expected))
def test_split_args(self):
# split_args is a smarter shlex.split for the needs of the way ansible uses it
def _split_info(input, desired, actual):
print "SENT: ", input
print "WANT: ", desired
print "GOT: ", actual
def _test_combo(input, desired):
actual = split_args(input)
_split_info(input, desired, actual)
assert actual == desired
# trivial splitting
_test_combo('a b=c d=f', ['a', 'b=c', 'd=f' ])
# mixed quotes
_test_combo('a b=\'c\' d="e" f=\'g\'', ['a', "b='c'", 'd="e"', "f='g'" ])
# with spaces
# FIXME: this fails, commenting out only for now
# _test_combo('a "\'one two three\'"', ['a', "'one two three'" ])
# TODO: ...
# jinja2 preservation
_test_combo('a {{ y }} z', ['a', '{{ y }}', 'z' ])
# jinja2 preservation with spaces and filters and other hard things
_test_combo(
'a {{ x | filter(\'moo\', \'param\') }} z {{ chicken }} "waffles"',
['a', "{{ x | filter('moo', 'param') }}", 'z', '{{ chicken }}', '"waffles"']
)
# invalid quote detection
self.assertRaises(Exception, split_args, 'hey I started a quote"')
self.assertRaises(Exception, split_args, 'hey I started a\' quote')
# jinja2 loop blocks with lots of complexity
_test_combo(
# in memory of neighbors cat
# we preserve line breaks unless a line continuation character precedes them
'a {% if x %} y {%else %} {{meow}} {% endif %} "cookie\nchip" \\\ndone\nand done',
['a', '{% if x %}', 'y', '{%else %}', '{{meow}}', '{% endif %}', '"cookie\nchip"', 'done\n', 'and', 'done']
)
# test space preservation within quotes
_test_combo(
'content="1 2 3 4 " foo=bar',
['content="1 2 3 4 "', 'foo=bar']
)
# invalid jinja2 nesting detection
# invalid quote nesting detection
def test_clean_data(self):
# clean data removes jinja2 tags from data
self.assertEqual(
ansible.utils._clean_data('this is a normal string', from_remote=True),
'this is a normal string'
)
self.assertEqual(
ansible.utils._clean_data('this string has a {{variable}}', from_remote=True),
'this string has a {#variable#}'
)
self.assertEqual(
ansible.utils._clean_data('this string {{has}} two {{variables}} in it', from_remote=True),
'this string {#has#} two {#variables#} in it'
)
self.assertEqual(
ansible.utils._clean_data('this string has a {{variable with a\nnewline}}', from_remote=True),
'this string has a {#variable with a\nnewline#}'
)
self.assertEqual(
ansible.utils._clean_data('this string is from inventory {{variable}}', from_inventory=True),
'this string is from inventory {{variable}}'
)
self.assertEqual(
ansible.utils._clean_data('this string is from inventory too but uses lookup {{lookup("foo","bar")}}', from_inventory=True),
'this string is from inventory too but uses lookup {#lookup("foo","bar")#}'
)
self.assertEqual(
ansible.utils._clean_data('this string has JSON in it: {"foo":{"bar":{"baz":"oops"}}}', from_remote=True),
'this string has JSON in it: {"foo":{"bar":{"baz":"oops"}}}'
)
self.assertEqual(
ansible.utils._clean_data('this string contains unicode: ¢ £ ¤ ¥', from_remote=True),
'this string contains unicode: ¢ £ ¤ ¥'
)
def test_censor_unlogged_data(self):
''' used by the no_log attribute '''
input = dict(
password='sekrit',
rc=12,
failed=True,
changed=False,
skipped=True,
msg='moo',
)
data = ansible.utils.censor_unlogged_data(input)
assert 'password' not in data
assert 'rc' in data
assert 'failed' in data
assert 'changed' in data
assert 'skipped' in data
assert 'msg' not in data
assert data['censored'] == 'results hidden due to no_log parameter'
def test_repo_url_to_role_name(self):
tests = [("http://git.example.com/repos/repo.git", "repo"),
("ssh://git@git.example.com:repos/role-name", "role-name"),
("ssh://git@git.example.com:repos/role-name,v0.1", "role-name"),
("directory/role/is/installed/in", "directory/role/is/installed/in")]
for (url, result) in tests:
self.assertEqual(ansible.utils.repo_url_to_role_name(url), result)
def test_role_spec_parse(self):
tests = [
(
"git+http://git.example.com/repos/repo.git,v1.0",
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
),
(
"http://repo.example.com/download/tarfile.tar.gz",
{
'scm': None,
'src': 'http://repo.example.com/download/tarfile.tar.gz',
'version': '',
'name': 'tarfile'
}
),
(
"http://repo.example.com/download/tarfile.tar.gz,,nicename",
{
'scm': None,
'src': 'http://repo.example.com/download/tarfile.tar.gz',
'version': '',
'name': 'nicename'
}
),
(
"git+http://git.example.com/repos/repo.git,v1.0,awesome",
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'awesome'
}
),
(
# test that http://github URLs are assumed git+http:// unless they end in .tar.gz
"http://github.com/ansible/fakerole/fake",
{
'scm' : 'git',
'src' : 'http://github.com/ansible/fakerole/fake',
'version' : 'master',
'name' : 'fake'
}
),
(
# test that http://github URLs are assumed git+http:// unless they end in .tar.gz
"http://github.com/ansible/fakerole/fake/archive/master.tar.gz",
{
'scm' : None,
'src' : 'http://github.com/ansible/fakerole/fake/archive/master.tar.gz',
'version' : '',
'name' : 'master'
}
)
]
for (spec, result) in tests:
self.assertEqual(ansible.utils.role_spec_parse(spec), result)
def test_role_yaml_parse(self):
tests = (
(
# Old style
{
'role': 'debops.elasticsearch',
'name': 'elks'
},
{
'role': 'debops.elasticsearch',
'name': 'elks',
'scm': None,
'src': 'debops.elasticsearch',
'version': '',
}
),
(
{
'role': 'debops.elasticsearch,1.0,elks',
'my_param': 'foo'
},
{
'role': 'debops.elasticsearch,1.0,elks',
'name': 'elks',
'scm': None,
'src': 'debops.elasticsearch',
'version': '1.0',
'my_param': 'foo',
}
),
(
{
'role': 'debops.elasticsearch,1.0',
'my_param': 'foo'
},
{
'role': 'debops.elasticsearch,1.0',
'name': 'debops.elasticsearch',
'scm': None,
'src': 'debops.elasticsearch',
'version': '1.0',
'my_param': 'foo',
}
),
# New style
(
{
'src': 'debops.elasticsearch',
'name': 'elks',
'my_param': 'foo'
},
{
'name': 'elks',
'scm': None,
'src': 'debops.elasticsearch',
'version': '',
'my_param': 'foo'
}
),
)
for (role, result) in tests:
self.assertEqual(ansible.utils.role_yaml_parse(role), result)
@patch('ansible.utils.plugins.module_finder._get_paths')
def test_find_plugin(self, mock_get_paths):
tmp_path = tempfile.mkdtemp()
mock_get_paths.return_value = [tmp_path,]
right_module_1 = 'module.py'
right_module_2 = 'module_without_extension'
wrong_module_1 = 'folder'
wrong_module_2 = 'inexistent'
path_right_module_1 = os.path.join(tmp_path, right_module_1)
path_right_module_2 = os.path.join(tmp_path, right_module_2)
path_wrong_module_1 = os.path.join(tmp_path, wrong_module_1)
open(path_right_module_1, 'w').close()
open(path_right_module_2, 'w').close()
os.mkdir(path_wrong_module_1)
self.assertEqual(ansible.utils.plugins.module_finder.find_plugin(right_module_1),
path_right_module_1)
self.assertEqual(ansible.utils.plugins.module_finder.find_plugin(right_module_2),
path_right_module_2)
self.assertEqual(ansible.utils.plugins.module_finder.find_plugin(wrong_module_1),
None)
self.assertEqual(ansible.utils.plugins.module_finder.find_plugin(wrong_module_2),
None)
shutil.rmtree(tmp_path)

View File

@@ -1,33 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
import os
import os.path
import tempfile
import yaml
import passlib.hash
import string
import StringIO
import copy
from nose.plugins.skip import SkipTest
from ansible.utils import string_functions
import ansible.errors
import ansible.constants as C
import ansible.utils.template as template2
from ansible import __version__
import sys
reload(sys)
sys.setdefaultencoding("utf8")
class TestUtilsStringFunctions(unittest.TestCase):
def test_isprintable(self):
self.assertFalse(string_functions.isprintable(chr(7)))
self.assertTrue(string_functions.isprintable('hello'))
def test_count_newlines_from_end(self):
self.assertEqual(string_functions.count_newlines_from_end('foo\n\n\n\n'), 4)
self.assertEqual(string_functions.count_newlines_from_end('\nfoo'), 0)

View File

@@ -1,180 +0,0 @@
#!/usr/bin/env python
from unittest import TestCase
import getpass
import os
import shutil
import time
import tempfile
from binascii import unhexlify
from binascii import hexlify
from nose.plugins.skip import SkipTest
from ansible import errors
from ansible.utils.vault import VaultLib
from ansible.utils.vault import VaultEditor
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
class TestVaultEditor(TestCase):
def _is_fips(self):
try:
data = open('/proc/sys/crypto/fips_enabled').read().strip()
except:
return False
if data != '1':
return False
return True
def test_methods_exist(self):
v = VaultEditor(None, None, None)
slots = ['create_file',
'decrypt_file',
'edit_file',
'encrypt_file',
'rekey_file',
'read_data',
'write_data',
'shuffle_files']
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def test_decrypt_1_0(self):
if self._is_fips():
raise SkipTest('Vault-1.0 will not function on FIPS enabled systems')
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
dirpath = tempfile.mkdtemp()
filename = os.path.join(dirpath, "foo-ansible-1.0.yml")
shutil.rmtree(dirpath)
shutil.copytree("vault_test_data", dirpath)
ve = VaultEditor(None, "ansible", filename)
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file()
except errors.AnsibleError, e:
error_hit = True
# verify decrypted content
f = open(filename, "rb")
fdata = f.read()
f.close()
shutil.rmtree(dirpath)
assert error_hit == False, "error decrypting 1.0 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip()
def test_decrypt_1_1_newline(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
dirpath = tempfile.mkdtemp()
filename = os.path.join(dirpath, "foo-ansible-1.1-ansible-newline-ansible.yml")
shutil.rmtree(dirpath)
shutil.copytree("vault_test_data", dirpath)
ve = VaultEditor(None, "ansible\nansible\n", filename)
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file()
except errors.AnsibleError, e:
error_hit = True
# verify decrypted content
f = open(filename, "rb")
fdata = f.read()
f.close()
shutil.rmtree(dirpath)
assert error_hit == False, "error decrypting 1.1 file with newline in password"
#assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_decrypt_1_1(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
dirpath = tempfile.mkdtemp()
filename = os.path.join(dirpath, "foo-ansible-1.1.yml")
shutil.rmtree(dirpath)
shutil.copytree("vault_test_data", dirpath)
ve = VaultEditor(None, "ansible", filename)
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file()
except errors.AnsibleError, e:
error_hit = True
# verify decrypted content
f = open(filename, "rb")
fdata = f.read()
f.close()
shutil.rmtree(dirpath)
assert error_hit == False, "error decrypting 1.1 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_rekey_migration(self):
if self._is_fips():
raise SkipTest('Vault-1.0 will not function on FIPS enabled systems')
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
dirpath = tempfile.mkdtemp()
filename = os.path.join(dirpath, "foo-ansible-1.0.yml")
shutil.rmtree(dirpath)
shutil.copytree("vault_test_data", dirpath)
ve = VaultEditor(None, "ansible", filename)
# make sure the password functions for the cipher
error_hit = False
try:
ve.rekey_file('ansible2')
except errors.AnsibleError, e:
error_hit = True
# verify decrypted content
f = open(filename, "rb")
fdata = f.read()
f.close()
shutil.rmtree(dirpath)
assert error_hit == False, "error rekeying 1.0 file to 1.1"
# ensure filedata can be decrypted, is 1.1 and is AES256
vl = VaultLib("ansible2")
dec_data = None
error_hit = False
try:
dec_data = vl.decrypt(fdata)
except errors.AnsibleError, e:
error_hit = True
assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
assert error_hit == False, "error decrypting migrated 1.0 file"
assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data

5
test/units/__init__.py Normal file
View File

@@ -0,0 +1,5 @@
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

View File

@@ -1,3 +0,0 @@
[defaults]
test_key = test_value

View File

@@ -0,0 +1,22 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

View File

@@ -0,0 +1,68 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
from ansible.errors import AnsibleError
from ansible.compat.tests import BUILTINS
from ansible.compat.tests.mock import mock_open, patch
class TestErrors(unittest.TestCase):
def setUp(self):
self.message = 'This is the error message'
self.obj = AnsibleBaseYAMLObject()
def tearDown(self):
pass
def test_basic_error(self):
e = AnsibleError(self.message)
self.assertEqual(e.message, 'ERROR! ' + self.message)
self.assertEqual(e.__repr__(), 'ERROR! ' + self.message)
@patch.object(AnsibleError, '_get_error_lines_from_file')
def test_error_with_object(self, mock_method):
self.obj.ansible_pos = ('foo.yml', 1, 1)
mock_method.return_value = ('this is line 1\n', '')
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
def test_get_error_lines_from_file(self):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n']
with patch('{0}.open'.format(BUILTINS), m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
# this line will not be found, as it is out of the index range
self.obj.ansible_pos = ('foo.yml', 2, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 2, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\n(specified line no longer in file, maybe it changed?)")

View File

@@ -0,0 +1,21 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

View File

@@ -0,0 +1,113 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
from units.mock.loader import DictDataLoader
class TestPlayIterator(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_play_iterator(self):
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
roles:
- test_role
pre_tasks:
- debug: msg="this is a pre_task"
tasks:
- debug: msg="this is a regular task"
post_tasks:
- debug: msg="this is a post_task"
""",
'/etc/ansible/roles/test_role/tasks/main.yml': """
- debug: msg="this is a role task"
""",
})
p = Playbook.load('test_play.yml', loader=fake_loader)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
all_vars=dict(),
)
# pre task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# role task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertIsNotNone(task._role)
# regular play task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertIsNone(task._role)
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# post task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)

Some files were not shown because too many files have changed in this diff Show More