add step to prepare downstream collection (#154)

* add step to prepare downstream collection

* fix header generation

* update downstream_fragments.py
This commit is contained in:
Bikouo Aubin
2022-04-29 15:22:46 +02:00
committed by GitHub
parent 0290391446
commit b2d9e78b8e
10 changed files with 140 additions and 25 deletions

97
ci/doc_fragment_modules.py Executable file
View File

@@ -0,0 +1,97 @@
#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
from pathlib import PosixPath
import yaml
import argparse
import os
def read_docstring(filename):
"""
Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
"""
data = {
'doc': None,
'plainexamples': None,
'returndocs': None,
'metadata': None, # NOTE: not used anymore, kept for compat
'seealso': None,
}
string_to_vars = {
'DOCUMENTATION': 'doc',
'EXAMPLES': 'plainexamples',
'RETURN': 'returndocs',
'ANSIBLE_METADATA': 'metadata', # NOTE: now unused, but kept for backwards compat
}
try:
with open(filename, 'rb') as b_module_data:
M = ast.parse(b_module_data.read())
for child in M.body:
if isinstance(child, ast.Assign):
for t in child.targets:
try:
theid = t.id
except AttributeError:
# skip errors can happen when trying to use the normal code
# sys.stderr.write("Failed to assign id for %s on %s, skipping\n" % (t, filename))
continue
if theid in string_to_vars:
varkey = string_to_vars[theid]
if isinstance(child.value, ast.Dict):
data[varkey] = ast.literal_eval(child.value)
else:
if theid != 'EXAMPLES':
# string should be yaml if already not a dict
data[varkey] = child.value.s
# sys.stderr.write('assigned: %s\n' % varkey)
except Exception:
# sys.stderr.write("unable to parse %s" % filename)
return
return yaml.safe_load(data["doc"]) if data["doc"] is not None else None
def is_extending_collection(result, col_fqcn):
if result:
for x in result.get("extends_documentation_fragment", []):
if x.startswith(col_fqcn):
return True
return False
def main():
parser = argparse.ArgumentParser(
description="list modules with inherited doc fragments from kubernetes.core that need rendering to deal with Galaxy/AH lack of functionality."
)
parser.add_argument(
"-c", "--collection-path", type=str, default=os.getcwd(), help="path to the collection"
)
args = parser.parse_args()
path = PosixPath(args.collection_path) / PosixPath("plugins/modules")
output = []
for d in path.iterdir():
if d.is_file():
result = read_docstring(str(d))
if is_extending_collection(result, "kubernetes.core."):
output.append(d.stem.replace(".py", ""))
print("\n".join(output))
if __name__ == '__main__':
main()

View File

@@ -11,6 +11,7 @@
DOWNSTREAM_VERSION="2.1.0"
KEEP_DOWNSTREAM_TMPDIR="${KEEP_DOWNSTREAM_TMPDIR:-''}"
INSTALL_DOWNSTREAM_COLLECTION_PATH="${INSTALL_DOWNSTREAM_COLLECTION_PATH:-}"
_build_dir=""
f_log_info()
@@ -69,6 +70,7 @@ f_prep()
.yamllint
requirements.txt
requirements.yml
test-requirements.txt
)
# Directories to recursively copy downstream (relative repo root dir path)
@@ -81,15 +83,6 @@ f_prep()
tests
)
# Modules with inherited doc fragments from kubernetes.core that need
# rendering to deal with Galaxy/AH lack of functionality.
_doc_fragment_modules=(
k8s
openshift_process
openshift_route
)
# Temp build dir
_tmp_dir=$(mktemp -d)
_start_dir="${PWD}"
@@ -150,6 +143,15 @@ f_handle_doc_fragments_workaround()
local temp_end="${_tmp_dir}/endfile.txt"
local rendered_fragments="./rendereddocfragments.txt"
# FIXME: Check Python interpreter from environment variable to work with prow
PYTHON=${DOWNSTREAM_BUILD_PYTHON:-/usr/bin/python3.6}
f_log_info "Using Python interpreter: ${PYTHON}"
# Modules with inherited doc fragments from kubernetes.core that need
# rendering to deal with Galaxy/AH lack of functionality.
# shellcheck disable=SC2207
_doc_fragment_modules=($("${PYTHON}" "${_start_dir}/ci/doc_fragment_modules.py" -c "${_start_dir}"))
# Build the collection, export docs, render them, stitch it all back together
pushd "${_build_dir}" || return
ansible-galaxy collection build
@@ -164,12 +166,6 @@ f_handle_doc_fragments_workaround()
ANSIBLE_COLLECTIONS_PATH="${install_collections_dir}" \
ANSIBLE_COLLECTIONS_PATHS="${ANSIBLE_COLLECTIONS_PATH}:${install_collections_dir}" \
ansible-doc -j "redhat.openshift.${doc_fragment_mod}" > "${temp_fragments_json}"
# FIXME: Check Python interpreter from environment variable to work with prow
if [ -e /usr/bin/python3.6 ]; then
PYTHON="/usr/bin/python3.6"
else
PYTHON="python"
fi
"${PYTHON}" "${_start_dir}/ci/downstream_fragments.py" "redhat.openshift.${doc_fragment_mod}" "${temp_fragments_json}"
sed -n '/STARTREMOVE/q;p' "${module_py}" > "${temp_start}"
sed '1,/ENDREMOVE/d' "${module_py}" > "${temp_end}"
@@ -185,7 +181,14 @@ f_copy_collection_to_working_dir()
{
f_log_info "${FUNCNAME[0]}"
# Copy the Collection build result into original working dir
f_log_info "copying built collection *.tar.gz into ./"
cp "${_build_dir}"/*.tar.gz ./
# Install downstream collection into provided path
if [[ -n ${INSTALL_DOWNSTREAM_COLLECTION_PATH} ]]; then
f_log_info "Install built collection *.tar.gz into ${INSTALL_DOWNSTREAM_COLLECTION_PATH}"
ansible-galaxy collection install -p "${INSTALL_DOWNSTREAM_COLLECTION_PATH}" "${_build_dir}"/*.tar.gz
fi
rm -f "${_build_dir}"/*.tar.gz
}
f_common_steps()

View File

@@ -15,14 +15,18 @@ with open("./rendereddocfragments.txt", 'w') as df_fd:
json_docs[sys.argv[1]]['doc'].pop('filename', '')
json_docs[sys.argv[1]]['doc'].pop('has_action', '')
df_fd.write("DOCUMENTATION = '''\n")
df_fd.write('DOCUMENTATION = """\n')
df_fd.write(yaml.dump(json_docs[sys.argv[1]]['doc'], default_flow_style=False))
df_fd.write("'''\n\n")
df_fd.write('"""\n\n')
df_fd.write("EXAMPLES = '''\n")
df_fd.write('EXAMPLES = """')
df_fd.write(json_docs[sys.argv[1]]['examples'])
df_fd.write("'''\n\n")
df_fd.write('"""\n\n')
df_fd.write("RETURN = '''\n")
df_fd.write(yaml.dump(json_docs[sys.argv[1]]['return'], default_flow_style=False))
df_fd.write("'''\n\n")
df_fd.write('RETURN = r"""')
data = json_docs[sys.argv[1]]['return']
if isinstance(data, dict):
df_fd.write(yaml.dump(data, default_flow_style=False))
else:
df_fd.write(data)
df_fd.write('"""\n\n')

View File

@@ -7,6 +7,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r"""
module: openshift_adm_groups_sync
@@ -186,6 +187,7 @@ builds:
}
]
"""
# ENDREMOVE (downstream)
import copy
import traceback

View File

@@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r"""
module: openshift_adm_migrate_template_instances
short_description: Update TemplateInstances to point to the latest group-version-kinds
@@ -231,7 +231,7 @@ result:
...
]
"""
# ENDREMOVE (downstream)
import traceback

View File

@@ -7,6 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r'''
module: openshift_adm_prune_auth
@@ -96,7 +97,7 @@ group:
description: list of Security Context Constraints deleted.
returned: I(resource=users)
'''
# ENDREMOVE (downstream)
import copy

View File

@@ -7,6 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r'''
module: openshift_adm_prune_deployments
@@ -64,6 +65,7 @@ replication_controllers:
description: list of replication controllers candidate for pruning.
returned: always
'''
# ENDREMOVE (downstream)
import copy

View File

@@ -7,6 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r'''
module: openshift_adm_prune_images
@@ -275,6 +276,7 @@ deleted_images:
...
]
'''
# ENDREMOVE (downstream)
import copy

View File

@@ -7,6 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r'''
module: openshift_import_image
@@ -153,6 +154,7 @@ result:
returned: success
type: dict
'''
# ENDREMOVE (downstream)
import copy

View File

@@ -7,6 +7,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# STARTREMOVE (downstream)
DOCUMENTATION = r'''
module: openshift_registry_info
@@ -79,6 +80,7 @@ check:
returned: always
type: str
'''
# ENDREMOVE (downstream)
import copy