From 12847a1555ee23cfffefcc0948e9e81029521596 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 26 Jan 2026 16:43:02 -0600 Subject: [PATCH] Replace passing ``warnings`` to ``exit_json`` with ``AnsibleModule.warn`` for the few modules (#1033) (#1074) This is a backport of PR #1033 as merged into main (3e32c12). SUMMARY Using exit_json or fail_json for warnings is deprecated in ansible-core>=2.19.0 and will be removed in ansible-core>=2.23.0 Tested with ansible-core 2.19.3 as the latest released version at the time of the start of this PR and with 2.16.0 as the lowest version supported by kubernetes.core 6.x Resolves: #1031 ISSUE TYPE Bugfix Pull Request COMPONENT NAME k8s_drain k8s_rollback k8s_scale ADDITIONAL INFORMATION The initial version of this PR covers only the module k8s_drain, with the following commits extended to k8s_rollback k8s_scale Reviewed-by: Bianca Henderson Reviewed-by: Mike Graves --- .../fragments/1033-warnings-deprecations.yaml | 2 + plugins/module_utils/copy.py | 10 +- plugins/modules/k8s_drain.py | 3 +- plugins/modules/k8s_rollback.py | 4 +- plugins/modules/k8s_scale.py | 6 +- .../targets/k8s_manifest_url/tasks/main.yml | 1 + .../targets/k8s_rollback/tasks/main.yml | 295 ++++++++++++++++++ .../targets/k8s_scale/tasks/main.yml | 7 +- 8 files changed, 319 insertions(+), 9 deletions(-) create mode 100644 changelogs/fragments/1033-warnings-deprecations.yaml diff --git a/changelogs/fragments/1033-warnings-deprecations.yaml b/changelogs/fragments/1033-warnings-deprecations.yaml new file mode 100644 index 00000000..729cdcee --- /dev/null +++ b/changelogs/fragments/1033-warnings-deprecations.yaml @@ -0,0 +1,2 @@ +bugfixes: + - Replace passing ``warnings`` to ``exit_json`` with ``AnsibleModule.warn`` in the ``k8s_drain``, ``k8s_rollback.py`` and ``k8s_scale.py`` modules as it deprecated in ``ansible-core>=2.19.0`` and will be removed in ``ansible-core>=2.23.0`` (https://github.com/ansible-collections/kubernetes.core/pull/1033). diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 0e19e17d..2dd04d18 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -278,11 +278,15 @@ class K8SCopyFromPod(K8SCopy): def run(self): self.files_to_copy = self.list_remote_files() if self.files_to_copy == []: + # Using warn method instead of passing warnings to exit_json as it is + # deprecated in ansible-core>=2.19.0 + self._module.warn( + "No file found from directory '{0}' into remote Pod.".format( + self.remote_path + ) + ) self.module.exit_json( changed=False, - warning="No file found from directory '{0}' into remote Pod.".format( - self.remote_path - ), ) self.copy() diff --git a/plugins/modules/k8s_drain.py b/plugins/modules/k8s_drain.py index 0f1c290b..d4fd1356 100644 --- a/plugins/modules/k8s_drain.py +++ b/plugins/modules/k8s_drain.py @@ -441,7 +441,8 @@ class K8sDrainAnsible(object): warnings.append(warn) result.append("{0} Pod(s) deleted from node.".format(number_pod)) if warnings: - return dict(result=" ".join(result), warnings=warnings) + for warning in warnings: + self._module.warn(warning) return dict(result=" ".join(result)) def patch_node(self, unschedulable): diff --git a/plugins/modules/k8s_rollback.py b/plugins/modules/k8s_rollback.py index 4fbf6104..308f99e5 100644 --- a/plugins/modules/k8s_rollback.py +++ b/plugins/modules/k8s_rollback.py @@ -168,7 +168,9 @@ def perform_action(svc, resource): module.params["kind"], resource["metadata"]["name"], ) - result = {"changed": False, "warnings": [warn]} + if warn: + module.warn(warn) + result = {"changed": False} return result if module.params["kind"] == "Deployment": diff --git a/plugins/modules/k8s_scale.py b/plugins/modules/k8s_scale.py index 6e7d8c44..b2873c87 100644 --- a/plugins/modules/k8s_scale.py +++ b/plugins/modules/k8s_scale.py @@ -243,10 +243,12 @@ def execute_module(client, module): module.fail_json(msg=error, **return_attributes) def _continue_or_exit(warn): + if warn: + module.warn(warn) if multiple_scale: - return_attributes["results"].append({"warning": warn, "changed": False}) + return_attributes["results"].append({"changed": False}) else: - module.exit_json(warning=warn, **return_attributes) + module.exit_json(**return_attributes) for existing in existing_items: if kind.lower() == "job": diff --git a/tests/integration/targets/k8s_manifest_url/tasks/main.yml b/tests/integration/targets/k8s_manifest_url/tasks/main.yml index f612733d..17b30253 100644 --- a/tests/integration/targets/k8s_manifest_url/tasks/main.yml +++ b/tests/integration/targets/k8s_manifest_url/tasks/main.yml @@ -81,6 +81,7 @@ replicas: 1 current_replicas: 3 wait: true + wait_timeout: 60 register: scale - name: Read deployment diff --git a/tests/integration/targets/k8s_rollback/tasks/main.yml b/tests/integration/targets/k8s_rollback/tasks/main.yml index 0706a22e..178864d0 100644 --- a/tests/integration/targets/k8s_rollback/tasks/main.yml +++ b/tests/integration/targets/k8s_rollback/tasks/main.yml @@ -291,6 +291,301 @@ that: - failed_version | int + 1 == result.resources[0].metadata.annotations['deprecated.daemonset.template.generation'] | int + - name: Create deployment with specific labels for selector testing + k8s: + state: present + wait: yes + wait_timeout: "{{ k8s_wait_timeout }}" + definition: + apiVersion: apps/v1 + kind: Deployment + metadata: + name: nginx-labeled + namespace: "{{ namespace }}" + labels: + app: nginx + test-group: label-selector-test + spec: + replicas: 2 + selector: + matchLabels: + app: nginx-labeled + template: + metadata: + labels: + app: nginx-labeled + spec: + containers: + - name: nginx + image: nginx:1.17 + ports: + - containerPort: 80 + + - name: Update deployment to create second revision + k8s: + state: present + wait: yes + wait_timeout: 30 + definition: + apiVersion: apps/v1 + kind: Deployment + metadata: + name: nginx-labeled + namespace: "{{ namespace }}" + labels: + app: nginx + test-group: label-selector-test + spec: + replicas: 2 + selector: + matchLabels: + app: nginx-labeled + template: + metadata: + labels: + app: nginx-labeled + spec: + containers: + - name: nginx + image: nginx:1.18 + ports: + - containerPort: 80 + + - name: Test rollback with label selectors + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: nginx-labeled + namespace: "{{ namespace }}" + label_selectors: + - "test-group=label-selector-test" + register: result + + - name: Assert label selector rollback worked + assert: + that: + - result is changed + - result.rollback_info | length == 1 + - result.rollback_info[0].method == "patch" + + - name: Create deployment with single revision + k8s: + state: present + wait: yes + wait_timeout: 30 + definition: + apiVersion: apps/v1 + kind: Deployment + metadata: + name: single-revision + namespace: "{{ namespace }}" + spec: + replicas: 1 + selector: + matchLabels: + app: single-revision + template: + metadata: + labels: + app: single-revision + spec: + containers: + - name: nginx + image: nginx:1.17 + ports: + - containerPort: 80 + + - name: Try to rollback deployment with no previous revisions + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: single-revision + namespace: "{{ namespace }}" + register: result + + - name: Debug result + debug: + var: result + + - name: Assert warning is returned for no rollout history + assert: + that: + - not result.changed + - result.warnings is defined + - "'No rollout history found' in result.warnings[0]" + + - name: Create a service for unsupported resource test + k8s: + state: present + definition: + apiVersion: v1 + kind: Service + metadata: + name: test-service + namespace: "{{ namespace }}" + spec: + selector: + app: nginx + ports: + - port: 80 + targetPort: 80 + + - name: Test rollback on unsupported resource type + k8s_rollback: + api_version: v1 + kind: Service + name: test-service + namespace: "{{ namespace }}" + register: result + ignore_errors: yes + + - name: Assert error message for unsupported resource + assert: + that: + - not result.changed + - "'Cannot perform rollback on resource of kind Service' in result.msg" + + - name: Test rollback on non-existent deployment + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: non-existent + namespace: "{{ namespace }}" + register: result + + - name: Assert no resources found + assert: + that: + - not result.changed + - result.rollback_info | length == 0 + + - name: Create multiple deployments with same label + k8s: + state: present + wait: yes + wait_timeout: 30 + definition: + apiVersion: apps/v1 + kind: Deployment + metadata: + name: "multi-{{ item }}" + namespace: "{{ namespace }}" + labels: + group: multi-test + app: multi + spec: + replicas: 1 + selector: + matchLabels: + app: "multi-{{ item }}" + template: + metadata: + labels: + app: "multi-{{ item }}" + spec: + containers: + - name: nginx + image: nginx:1.17 + ports: + - containerPort: 80 + loop: [1, 2, 3] + + - name: Update multiple deployments to create second revisions + k8s: + state: present + wait: yes + wait_timeout: 30 + definition: + apiVersion: apps/v1 + kind: Deployment + metadata: + name: "multi-{{ item }}" + namespace: "{{ namespace }}" + labels: + group: multi-test + app: multi + spec: + replicas: 1 + selector: + matchLabels: + app: "multi-{{ item }}" + template: + metadata: + labels: + app: "multi-{{ item }}" + spec: + containers: + - name: nginx + image: nginx:1.18 + ports: + - containerPort: 80 + loop: [1, 2, 3] + + - name: Rollback multiple deployments using label selector + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: "multi-{{ item }}" + namespace: "{{ namespace }}" + label_selectors: + - "group=multi-test" + register: result + loop: [1, 2, 3] + + - name: Assert multiple resources were rolled back + assert: + that: + - result.results | length == 3 + - result.results | selectattr('changed', 'equalto', true) | list | length == 3 + - result.results | selectattr('rollback_info', 'defined') | list | length == 3 + - result.results | map(attribute='rollback_info') | map('first') | map(attribute='method') | select('equalto', 'patch') | list | length == 3 + + - name: Validate rollback_info structure for deployment + assert: + that: + - result.results is defined + - result.results[0].rollback_info is defined + - result.results[0].rollback_info | length > 0 + - result.results[0].rollback_info[0].method == "patch" + - result.results[0].rollback_info[0].body is defined + - result.results[0].rollback_info[0].resources is defined + - result.results[0].rollback_info[0].resources.metadata is defined + - result.results[0].rollback_info[0].resources.spec is defined + + - name: Test rollback with field selectors + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: multi-1 + namespace: "{{ namespace }}" + field_selectors: + - "metadata.name=multi-1" + register: result + + - name: Assert field selector rollback worked + assert: + that: + - result is changed + - result.rollback_info | length == 1 + - result.rollback_info[0].resources.metadata.name == "multi-1" + + - name: Test check mode return values + k8s_rollback: + api_version: apps/v1 + kind: Deployment + name: multi-2 + namespace: "{{ namespace }}" + register: result + check_mode: yes + + - name: Validate check mode returns expected structure + assert: + that: + - result is changed + - result.rollback_info is defined + - result.rollback_info[0].method == "patch" + - result.rollback_info[0].body is defined + always: - name: Delete {{ namespace }} namespace k8s: diff --git a/tests/integration/targets/k8s_scale/tasks/main.yml b/tests/integration/targets/k8s_scale/tasks/main.yml index bfcb90d9..3a32d896 100644 --- a/tests/integration/targets/k8s_scale/tasks/main.yml +++ b/tests/integration/targets/k8s_scale/tasks/main.yml @@ -49,6 +49,7 @@ namespace: "{{ scale_namespace }}" replicas: 0 wait: true + wait_timeout: 60 register: scale_down check_mode: true @@ -80,6 +81,7 @@ namespace: "{{ scale_namespace }}" replicas: 0 wait: true + wait_timeout: 60 register: scale_down check_mode: true @@ -143,6 +145,7 @@ namespace: "{{ scale_namespace }}" replicas: 0 wait: true + wait_timeout: 60 register: scale_down_idempotency diff: true @@ -233,6 +236,7 @@ namespace: "{{ scale_namespace }}" replicas: 2 wait: yes + wait_timeout: 60 register: scale_up_noop diff: false @@ -308,12 +312,12 @@ resource_version: 0 label_selectors: - app=nginx + wait_timeout: 60 register: scale_out - assert: that: - not scale_out.changed - - scale_out.results | selectattr('warning', 'defined') | list | length == 2 - name: scale deployment using current replicas (wrong value) kubernetes.core.k8s_scale: @@ -328,7 +332,6 @@ - assert: that: - not scale_out.changed - - scale_out.results | selectattr('warning', 'defined') | list | length == 2 - name: scale deployment using current replicas (right value) kubernetes.core.k8s_scale: