From dbc2158646ec50a2af029b2c86525a3898f59002 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 28 Apr 2023 06:32:20 +0100 Subject: [PATCH 001/121] Add missing changelog entries for the work done in #64113 Signed-off-by: Pedro Algarvio --- changelog/64111.fixed.md | 1 + changelog/64113.fixed.md | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog/64111.fixed.md create mode 100644 changelog/64113.fixed.md diff --git a/changelog/64111.fixed.md b/changelog/64111.fixed.md new file mode 100644 index 00000000000..a6c00a1b999 --- /dev/null +++ b/changelog/64111.fixed.md @@ -0,0 +1 @@ +Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. diff --git a/changelog/64113.fixed.md b/changelog/64113.fixed.md new file mode 100644 index 00000000000..b2a530eeb3d --- /dev/null +++ b/changelog/64113.fixed.md @@ -0,0 +1,2 @@ +Fixed several file client uses which were not properly terminating it by switching to using it as a context manager +whenever possible or making sure `.destroy()` was called when using a context manager was not possible. From 6766db4114bbae3bc0da8a87338134021f088539 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 28 Apr 2023 07:24:58 +0100 Subject: [PATCH 002/121] Add test case to assert that `salt.client.ssh.SSH.fsclient.destroy()` is called. Signed-off-by: Pedro Algarvio --- changelog/64184.fixed.md | 1 + salt/cli/ssh.py | 5 ++++- tests/pytests/unit/cli/test_ssh.py | 16 ++++++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 changelog/64184.fixed.md create mode 100644 tests/pytests/unit/cli/test_ssh.py diff --git a/changelog/64184.fixed.md b/changelog/64184.fixed.md new file mode 100644 index 00000000000..c63583324e3 --- /dev/null +++ b/changelog/64184.fixed.md @@ -0,0 +1 @@ + Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. diff --git a/salt/cli/ssh.py b/salt/cli/ssh.py index 6048cb5f58f..78522a044a9 100644 --- a/salt/cli/ssh.py +++ b/salt/cli/ssh.py @@ -16,4 +16,7 @@ class SaltSSH(salt.utils.parsers.SaltSSHOptionParser): self.parse_args() ssh = salt.client.ssh.SSH(self.config) - ssh.run() + try: + ssh.run() + finally: + ssh.fsclient.destroy() diff --git a/tests/pytests/unit/cli/test_ssh.py b/tests/pytests/unit/cli/test_ssh.py new file mode 100644 index 00000000000..3cc4a5c0419 --- /dev/null +++ b/tests/pytests/unit/cli/test_ssh.py @@ -0,0 +1,16 @@ +from salt.cli.ssh import SaltSSH +from tests.support.mock import MagicMock, call, patch + + +def test_fsclient_destroy_called(minion_opts): + """ + Test that `salt.client.ssh.SSH.fsclient.destroy()` is called. + """ + ssh_mock = MagicMock() + with patch( + "salt.utils.parsers.SaltSSHOptionParser.parse_args", return_value=MagicMock() + ), patch("salt.client.ssh.SSH", return_value=ssh_mock): + parser = SaltSSH() + parser.config = minion_opts + parser.run() + assert ssh_mock.fsclient.mock_calls == [call.destroy()] From 99deea7c2b87f2061e12c465053d262ab4f59a12 Mon Sep 17 00:00:00 2001 From: Salt Project Packaging Date: Wed, 26 Apr 2023 18:48:28 +0000 Subject: [PATCH 003/121] Update the bootstrap script to v2023.04.26 (cherry picked from commit cab551c6977e79dc1c2593f590ff4be86474f482) --- salt/cloud/deploy/bootstrap-salt.sh | 918 +++++++++++++++++++++++++--- 1 file changed, 817 insertions(+), 101 deletions(-) diff --git a/salt/cloud/deploy/bootstrap-salt.sh b/salt/cloud/deploy/bootstrap-salt.sh index 13f4471dcf4..6d69bf69213 100644 --- a/salt/cloud/deploy/bootstrap-salt.sh +++ b/salt/cloud/deploy/bootstrap-salt.sh @@ -23,7 +23,7 @@ #====================================================================================================================== set -o nounset # Treat unset variables as an error -__ScriptVersion="2022.10.04" +__ScriptVersion="2023.04.26" __ScriptName="bootstrap-salt.sh" __ScriptFullName="$0" @@ -269,6 +269,7 @@ _CUSTOM_MINION_CONFIG="null" _QUIET_GIT_INSTALLATION=$BS_FALSE _REPO_URL="repo.saltproject.io" _ONEDIR_DIR="salt" +_ONEDIR_NIGHTLY_DIR="salt-dev/${_ONEDIR_DIR}" _PY_EXE="python3" _INSTALL_PY="$BS_FALSE" _TORNADO_MAX_PY3_VERSION="5.0" @@ -276,6 +277,7 @@ _POST_NEON_INSTALL=$BS_FALSE _MINIMUM_PIP_VERSION="9.0.1" _MINIMUM_SETUPTOOLS_VERSION="9.1" _POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr" +_PIP_DOWNLOAD_ARGS="" # Defaults for install arguments ITYPE="stable" @@ -388,7 +390,7 @@ __usage() { points to a repository that mirrors Salt packages located at repo.saltproject.io. The option passed with -R replaces the "repo.saltproject.io". If -R is passed, -r is also set. Currently only - works on CentOS/RHEL and Debian based distributions. + works on CentOS/RHEL and Debian based distributions and macOS. -s Sleep time used when waiting for daemons to start, restart and when checking for the services running. Default: ${__DEFAULT_SLEEP} -S Also install salt-syndic @@ -404,7 +406,7 @@ __usage() { implemented for SUSE. -x Changes the Python version used to install Salt. For CentOS 6 git installations python2.7 is supported. - Fedora git installation, CentOS 7, Debian 9, Ubuntu 16.04 and 18.04 support python3. + Fedora git installation, CentOS 7, Ubuntu 18.04 support python3. -X Do not start daemons after installation -y Installs a different python version on host. Currently this has only been tested with CentOS 6 and is considered experimental. This will install the @@ -613,12 +615,24 @@ if [ "$ITYPE" = "git" ]; then # If doing stable install, check if version specified elif [ "$ITYPE" = "stable" ]; then if [ "$#" -eq 0 ];then - STABLE_REV="latest" + ONEDIR_REV="latest" + _ONEDIR_REV="latest" + ITYPE="onedir" else - if [ "$(echo "$1" | grep -E '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7|2018\.3|2019\.2|3000|3001|3002|3003|3004|3005)$')" != "" ]; then + if [ "$(echo "$1" | grep -E '^(nightly|latest|3006)$')" != "" ]; then + ONEDIR_REV="$1" + _ONEDIR_REV="$1" + ITYPE="onedir" + shift + elif [ "$(echo "$1" | grep -E '^(3003|3004|3005)$')" != "" ]; then STABLE_REV="$1" shift - elif [ "$(echo "$1" | grep -E '^(2[0-9]*\.[0-9]*\.[0-9]*|[3-9][0-9]{3}(\.[0-9]*)?)$')" != "" ]; then + elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then + ONEDIR_REV="minor/$1" + _ONEDIR_REV="$1" + ITYPE="onedir" + shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{3}(\.[0-9]*)?)$')" != "" ]; then # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix STABLE_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') if [ "$(uname)" != "Darwin" ]; then @@ -626,7 +640,7 @@ elif [ "$ITYPE" = "stable" ]; then fi shift else - echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, 2018.3, 2019.2, 3000, 3001, 3002, 3003, 3004, 3005, latest, \$MAJOR.\$MINOR.\$PATCH until 2019.2, \$MAJOR or \$MAJOR.\$PATCH starting from 3000)" + echo "Unknown stable version: $1 (valid: 3003, 3004, 3005, 3006, latest)" exit 1 fi fi @@ -635,16 +649,19 @@ elif [ "$ITYPE" = "onedir" ]; then if [ "$#" -eq 0 ];then ONEDIR_REV="latest" else - if [ "$(echo "$1" | grep -E '^(latest|3005)$')" != "" ]; then + if [ "$(echo "$1" | grep -E '^(nightly|latest|3005|3006)$')" != "" ]; then ONEDIR_REV="$1" shift - elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then - # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix - ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') + elif [ "$(echo "$1" | grep -E '^(3005(\.[0-9]*)?)')" != "" ]; then + # Handle the 3005.0 version as 3005 archive (pin to minor) and strip the fake ".0" suffix + ONEDIR_REV=$(echo "$1" | sed -E 's/^(3005)\.0$/\1/') ONEDIR_REV="minor/$ONEDIR_REV" shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then + ONEDIR_REV="minor/$1" + shift else - echo "Unknown stable version: $1 (valid: 3005, latest.)" + echo "Unknown onedir version: $1 (valid: 3005, 3006, latest, nightly.)" exit 1 fi fi @@ -667,8 +684,13 @@ elif [ "$ITYPE" = "onedir_rc" ]; then #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') ONEDIR_REV="minor/$1" shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}\.[0-9]?rc[0-9]$)')" != "" ]; then + # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix + #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') + ONEDIR_REV="minor/$1" + shift else - echo "Unknown stable version: $1 (valid: 3005-1, latest.)" + echo "Unknown onedir_rc version: $1 (valid: 3005-1, latest.)" exit 1 fi fi @@ -877,6 +899,18 @@ __fetch_verify() { return 1 } +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __check_url_exists +# DESCRIPTION: Checks if a URL exists +#---------------------------------------------------------------------------------------------------------------------- +__check_url_exists() { + _URL="$1" + if curl --output /dev/null --silent --fail "${_URL}"; then + return 0 + else + return 1 + fi +} #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __gather_hardware_info # DESCRIPTION: Discover hardware information @@ -1365,7 +1399,7 @@ __gather_system_info() { #---------------------------------------------------------------------------------------------------------------------- # shellcheck disable=SC2034 __ubuntu_derivatives_translation() { - UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon)" + UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon|pop)" # Mappings trisquel_6_ubuntu_base="12.04" linuxmint_13_ubuntu_base="12.04" @@ -1378,6 +1412,8 @@ __ubuntu_derivatives_translation() { neon_16_ubuntu_base="16.04" neon_18_ubuntu_base="18.04" neon_20_ubuntu_base="20.04" + neon_22_ubuntu_base="22.04" + pop_22_ubuntu_base="22.04" # Translate Ubuntu derivatives to their base Ubuntu version match=$(echo "$DISTRO_NAME_L" | grep -E ${UBUNTU_DERIVATIVES}) @@ -1437,9 +1473,13 @@ __check_dpkg_architecture() { if [ "$_CUSTOM_REPO_URL" != "null" ]; then warn_msg="Support for arm64 is experimental, make sure the custom repository used has the expected structure and contents." else - # Saltstack official repository does not yet have arm64 metadata, - # use arm64 repositories on arm64, since all pkgs are arch-independent - __REPO_ARCH="arm64" + # Saltstack official repository has arm64 metadata beginning with Debian 11, + # use amd64 repositories on arm64 for anything older, since all pkgs are arch-independent + if [ "$DISTRO_NAME_L" = "debian" ] || [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then + __REPO_ARCH="amd64" + else + __REPO_ARCH="arm64" + fi __REPO_ARCH_DEB="deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg arch=$__REPO_ARCH]" warn_msg="Support for arm64 packages is experimental and might rely on architecture-independent packages from the amd64 repository." fi @@ -1914,10 +1954,6 @@ if [ "$ITYPE" = "git" ]; then if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then _POST_NEON_INSTALL=$BS_TRUE __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}" - if [ "$(echo "${GIT_REV}" | cut -c -1)" != "v" ]; then - # We do this to properly clone tags - GIT_REV="v${GIT_REV}" - fi echodebug "Post Neon Tag Regex Match On: ${GIT_REV}" else __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed -E 's/^(v?[0-9]{1,4}\.[0-9]{1,2})(\.[0-9]{1,2})?.*$/MATCH/') @@ -1929,10 +1965,6 @@ if [ "$ITYPE" = "git" ]; then if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then _POST_NEON_INSTALL=$BS_TRUE __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}" - if [ "$(echo "${GIT_REV}" | cut -c -1)" != "v" ]; then - # We do this to properly clone tags - GIT_REV="v${GIT_REV}" - fi echodebug "Post Neon Tag Regex Match On: ${GIT_REV}" else __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed 's/^.*\(v\?[[:digit:]]\{1,4\}\.[[:digit:]]\{1,2\}\)\(\.[[:digit:]]\{1,2\}\)\?.*$/MATCH/') @@ -2095,20 +2127,13 @@ __rpm_import_gpg() { #---------------------------------------------------------------------------------------------------------------------- __yum_install_noinput() { - ENABLE_EPEL_CMD="" - # Skip Amazon Linux for the first round, since EPEL is no longer required. - # See issue #724 - if [ $_DISABLE_REPOS -eq $BS_FALSE ] && [ "$DISTRO_NAME_L" != "amazon_linux_ami" ]; then - ENABLE_EPEL_CMD="--enablerepo=${_EPEL_REPO}" - fi - if [ "$DISTRO_NAME_L" = "oracle_linux" ]; then # We need to install one package at a time because --enablerepo=X disables ALL OTHER REPOS!!!! for package in "${@}"; do - yum -y install "${package}" || yum -y install "${package}" ${ENABLE_EPEL_CMD} || return $? + yum -y install "${package}" || yum -y install "${package}" || return $? done else - yum -y install "${@}" ${ENABLE_EPEL_CMD} || return $? + yum -y install "${@}" || return $? fi } # ---------- end of function __yum_install_noinput ---------- @@ -2121,6 +2146,15 @@ __dnf_install_noinput() { dnf -y install "${@}" || return $? } # ---------- end of function __dnf_install_noinput ---------- +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __tdnf_install_noinput +# DESCRIPTION: (DRY) dnf install with noinput options +#---------------------------------------------------------------------------------------------------------------------- +__tdnf_install_noinput() { + + tdnf -y install "${@}" || return $? +} # ---------- end of function __tdnf_install_noinput ---------- + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __git_clone_and_checkout # DESCRIPTION: (DRY) Helper function to clone and checkout salt to a @@ -2646,7 +2680,7 @@ __activate_virtualenv() { # NAME: __install_pip_pkgs # DESCRIPTION: Return 0 or 1 if successfully able to install pip packages. Can provide a different python version to # install pip packages with. If $py_ver is not specified it will use the default python version. -# PARAMETERS: pkgs, py_ver +# PARAMETERS: pkgs, py_ver, upgrade #---------------------------------------------------------------------------------------------------------------------- __install_pip_pkgs() { @@ -2815,15 +2849,15 @@ EOM fi echodebug "Running '${_pip_cmd} install wheel ${_setuptools_dep}'" - ${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} wheel "${_setuptools_dep}" + ${_pip_cmd} install --upgrade ${_POST_NEON_PIP_INSTALL_ARGS} wheel "${_setuptools_dep}" echoinfo "Installing salt using ${_py_exe}" cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 mkdir /tmp/git/deps echoinfo "Downloading Salt Dependencies from PyPi" - echodebug "Running '${_pip_cmd} download -d /tmp/git/deps .'" - ${_pip_cmd} download -d /tmp/git/deps . || (echo "Failed to download salt dependencies" && return 1) + echodebug "Running '${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} .'" + ${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} . || (echo "Failed to download salt dependencies" && return 1) echoinfo "Installing Downloaded Salt Dependencies" echodebug "Running '${_pip_cmd} install --ignore-installed ${_POST_NEON_PIP_INSTALL_ARGS} /tmp/git/deps/*'" @@ -3057,9 +3091,19 @@ __install_saltstack_ubuntu_onedir_repository() { # SaltStack's stable Ubuntu repository: SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/" + fi echo "$__REPO_ARCH_DEB $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/salt.list - __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || return 1 + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || return 1 + elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || \ + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + else + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + fi __wait_for_apt apt-get update || return 1 } @@ -3318,7 +3362,15 @@ install_ubuntu_git() { _POST_NEON_PIP_INSTALL_ARGS="" __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 - sed -i 's:/usr/bin:/usr/local/bin:g' pkg/*.service + + # Account for new path for services files in later releases + if [ -d "pkg/common" ]; then + _SERVICE_DIR="pkg/common" + else + _SERVICE_DIR="pkg" + fi + + sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service return 0 fi @@ -3390,8 +3442,15 @@ install_ubuntu_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -3406,8 +3465,8 @@ install_ubuntu_git_post() { if [ ! -f $_upstart_conf ]; then # upstart does not know about our service, let's copy the proper file echowarn "Upstart does not appear to know about salt-$fname" - echodebug "Copying ${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-$fname.upstart to $_upstart_conf" - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.upstart" "$_upstart_conf" + echodebug "Copying ${_SERVICE_DIR}/salt-$fname.upstart to $_upstart_conf" + __copyfile "${_SERVICE_DIR}/salt-${fname}.upstart" "$_upstart_conf" # Set service to know about virtualenv if [ "${_VIRTUALENV_DIR}" != "null" ]; then echo "SALT_USE_VIRTUALENV=${_VIRTUALENV_DIR}" > /etc/default/salt-${fname} @@ -3579,9 +3638,19 @@ __install_saltstack_debian_onedir_repository() { # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/" + fi echo "$__REPO_ARCH_DEB $SALTSTACK_DEBIAN_URL $DEBIAN_CODENAME main" > "/etc/apt/sources.list.d/salt.list" - __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || return 1 + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || return 1 + elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || \ + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + else + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + fi __wait_for_apt apt-get update || return 1 } @@ -3940,7 +4009,15 @@ install_debian_git() { _POST_NEON_PIP_INSTALL_ARGS="" __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 - sed -i 's:/usr/bin:/usr/local/bin:g' pkg/*.service + + # Account for new path for services files in later releases + if [ -d "pkg/common" ]; then + _SERVICE_DIR="pkg/common" + else + _SERVICE_DIR="pkg" + fi + + sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service return 0 fi @@ -3999,16 +4076,23 @@ install_debian_git_post() { [ "$fname" = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ "$fname" = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + # Configure SystemD for Debian 8 "Jessie" and later if [ -f /bin/systemctl ]; then if [ ! -f /lib/systemd/system/salt-${fname}.service ] || \ { [ -f /lib/systemd/system/salt-${fname}.service ] && [ $_FORCE_OVERWRITE -eq $BS_TRUE ]; }; then - if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" /lib/systemd/system - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.environment" "/etc/default/salt-${fname}" + if [ -f "${_SERVICE_DIR}/salt-${fname}.service" ]; then + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" /lib/systemd/system + __copyfile "${_SERVICE_DIR}/salt-${fname}.environment" "/etc/default/salt-${fname}" else # workaround before adding Debian-specific unit files to the Salt main repo - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" /lib/systemd/system + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" /lib/systemd/system sed -i -e '/^Type/ s/notify/simple/' /lib/systemd/system/salt-${fname}.service fi fi @@ -4103,6 +4187,41 @@ install_debian_check_services() { # Fedora Install Functions # +__install_saltstack_fedora_onedir_repository() { + if [ "$ITYPE" = "stable" ]; then + REPO_REV="$ONEDIR_REV" + else + REPO_REV="latest" + fi + + __PY_VERSION_REPO="yum" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi + + GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub" + + REPO_FILE="/etc/yum.repos.d/salt.repo" + + if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi + + __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo" + + __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1 + + yum clean metadata || return 1 + elif [ "$REPO_REV" != "latest" ]; then + echowarn "salt.repo already exists, ignoring salt version argument." + echowarn "Use -F (forced overwrite) to install $REPO_REV." + fi + + return 0 +} + install_fedora_deps() { if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then dnf -y update || return 1 @@ -4308,12 +4427,18 @@ install_fedora_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Salt executables are located under `/usr/local/bin/` on Fedora 36+ - if [ "${DISTRO_VERSION}" -ge 36 ]; then - sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service - fi + #if [ "${DISTRO_VERSION}" -ge 36 ]; then + # sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service + #fi # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -4361,6 +4486,83 @@ install_fedora_check_services() { return 0 } + +install_fedora_onedir_deps() { + + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + yum -y update || return 1 + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_TRUE" ] && [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + echowarn "Detected -r or -R option while installing Salt packages for Python 3." + echowarn "Python 3 packages for older Salt releases requires the EPEL repository to be installed." + echowarn "Installing the EPEL repository automatically is disabled when using the -r or -R options." + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then + __install_saltstack_fedora_onedir_repository || return 1 + fi + + # If -R was passed, we need to configure custom repo url with rsync-ed packages + # Which is still handled in __install_saltstack_rhel_repository. This call has + # its own check in case -r was passed without -R. + if [ "$_CUSTOM_REPO_URL" != "null" ]; then + __install_saltstack_fedora_onedir_repository || return 1 + fi + + if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then + __PACKAGES="dnf-utils chkconfig" + else + __PACKAGES="yum-utils chkconfig" + fi + + __PACKAGES="${__PACKAGES} procps" + + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 + + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + # shellcheck disable=SC2086 + __yum_install_noinput ${_EXTRA_PACKAGES} || return 1 + fi + + return 0 + +} + + +install_fedora_onedir() { + STABLE_REV=$ONEDIR_REV + #install_fedora_stable || return 1 + + __PACKAGES="" + + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-cloud" + fi + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-master" + fi + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-minion" + fi + if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-syndic" + fi + + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 + + return 0 +} + +install_fedora_onedir_post() { + STABLE_REV=$ONEDIR_REV + install_fedora_stable_post || return 1 + + return 0 +} # # Ended Fedora Install Functions # @@ -4370,30 +4572,6 @@ install_fedora_check_services() { # # CentOS Install Functions # -__install_epel_repository() { - if [ ${_EPEL_REPOS_INSTALLED} -eq $BS_TRUE ]; then - return 0 - fi - - # Check if epel repo is already enabled and flag it accordingly - if yum repolist | grep -q "^[!]\\?${_EPEL_REPO}/"; then - _EPEL_REPOS_INSTALLED=$BS_TRUE - return 0 - fi - - # Download latest 'epel-next-release' package for the distro version directly - epel_next_repo_url="${HTTP_VAL}://dl.fedoraproject.org/pub/epel/epel-next-release-latest-${DISTRO_MAJOR_VERSION}.noarch.rpm" - - # Download latest 'epel-release' package for the distro version directly - epel_repo_url="${HTTP_VAL}://dl.fedoraproject.org/pub/epel/epel-release-latest-${DISTRO_MAJOR_VERSION}.noarch.rpm" - - yum -y install "${epel_next_repo_url}" "${epel_repo_url}" - - _EPEL_REPOS_INSTALLED=$BS_TRUE - - return 0 -} - __install_saltstack_rhel_repository() { if [ "$ITYPE" = "stable" ]; then repo_rev="$STABLE_REV" @@ -4465,10 +4643,17 @@ __install_saltstack_rhel_onedir_repository() { # Avoid using '$releasever' variable for yum. # Instead, this should work correctly on all RHEL variants. base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${ONEDIR_REV}/" - if [ "${DISTRO_MAJOR_VERSION}" -eq 9 ]; then - gpg_key="SALTSTACK-GPG-KEY2.pub" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/" + fi + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + if [ "${DISTRO_MAJOR_VERSION}" -eq 9 ]; then + gpg_key="SALTSTACK-GPG-KEY2.pub" + else + gpg_key="SALTSTACK-GPG-KEY.pub" + fi else - gpg_key="SALTSTACK-GPG-KEY.pub" + gpg_key="SALT-PROJECT-GPG-PUBKEY-2023.pub" fi gpg_key_urls="" @@ -4491,6 +4676,9 @@ enabled_metadata=1 _eof fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi for key in $gpg_key; do __rpm_import_gpg "${fetch_url}${key}" || return 1 done @@ -4516,7 +4704,6 @@ install_centos_stable_deps() { fi if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then - __install_epel_repository || return 1 __install_saltstack_rhel_repository || return 1 fi @@ -4558,6 +4745,8 @@ install_centos_stable_deps() { fi fi + __PACKAGES="${__PACKAGES} procps" + # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 @@ -4590,6 +4779,13 @@ install_centos_stable() { # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 + # Workaround for 3.11 broken on CentOS Stream 8.x + # Re-install Python 3.6 + _py_version=$(${_PY_EXE} -c "import sys; print('{0}.{1}'.format(*sys.version_info))") + if [ "$DISTRO_MAJOR_VERSION" -eq 8 ] && [ "${_py_version}" = "3.11" ]; then + __yum_install_noinput python3 + fi + return 0 } @@ -4625,7 +4821,15 @@ install_centos_stable_post() { } install_centos_git_deps() { - install_centos_stable_deps || return 1 + # First try stable deps then fall back to onedir deps if that one fails + # if we're installing on a Red Hat based host that doesn't have the classic + # package repos available. + # Set ONEDIR_REV to STABLE_REV in case we + # end up calling install_centos_onedir_deps + ONEDIR_REV=${STABLE_REV} + install_centos_stable_deps || \ + install_centos_onedir_deps || \ + return 1 if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then __yum_install_noinput ca-certificates || return 1 @@ -4785,10 +4989,16 @@ install_centos_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_FILE="${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" + else + _SERVICE_FILE="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" + fi if [ -f /bin/systemctl ]; then if [ ! -f "/usr/lib/systemd/system/salt-${fname}.service" ] || \ { [ -f "/usr/lib/systemd/system/salt-${fname}.service" ] && [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; }; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" /usr/lib/systemd/system + __copyfile "${_SERVICE_FILE}" /usr/lib/systemd/system fi SYSTEMD_RELOAD=$BS_TRUE @@ -4820,7 +5030,6 @@ install_centos_onedir_deps() { fi if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then - __install_epel_repository || return 1 __install_saltstack_rhel_onedir_repository || return 1 fi @@ -4837,6 +5046,8 @@ install_centos_onedir_deps() { __PACKAGES="yum-utils chkconfig" fi + __PACKAGES="${__PACKAGES} procps" + # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 @@ -5344,6 +5555,11 @@ install_oracle_linux_git_post() { return 0 } +install_oracle_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_oracle_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5417,6 +5633,11 @@ install_almalinux_git_post() { return 0 } +install_almalinux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_almalinux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5490,6 +5711,11 @@ install_rocky_linux_git_post() { return 0 } +install_rocky_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_rocky_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5563,6 +5789,11 @@ install_scientific_linux_git_post() { return 0 } +install_scientific_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_scientific_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -6206,9 +6437,17 @@ install_amazon_linux_ami_2_onedir_deps() { fi base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/$repo_rev/" - gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7" - if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + if [ "${ONEDIR_REV}" = "nightly" ] ; then + base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/" + fi + + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then gpg_key="${base_url}SALTSTACK-GPG-KEY.pub" + fi + else + gpg_key="${base_url}SALT-PROJECT-GPG-PUBKEY-2023.pub" fi # This should prob be refactored to use __install_saltstack_rhel_repository() @@ -6427,6 +6666,10 @@ install_arch_linux_git_deps() { return 0 } +install_arch_linux_onedir_deps() { + install_arch_linux_stable_deps || return 1 +} + install_arch_linux_stable() { # Pacman does not resolve dependencies on outdated versions # They always need to be updated @@ -6445,6 +6688,8 @@ install_arch_linux_stable() { install_arch_linux_git() { + _POST_NEON_PIP_INSTALL_ARGS="${_POST_NEON_PIP_INSTALL_ARGS} --use-pep517" + _PIP_DOWNLOAD_ARGS="${_PIP_DOWNLOAD_ARGS} --use-pep517" if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 return 0 @@ -6502,8 +6747,15 @@ install_arch_linux_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + if [ -f /usr/bin/systemctl ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -6570,11 +6822,342 @@ install_arch_check_services() { return 0 } + +install_arch_linux_onedir() { + install_arch_linux_stable || return 1 + + return 0 +} + +install_arch_linux_onedir_post() { + install_arch_linux_post || return 1 + + return 0 +} # # Ended Arch Install Functions # ####################################################################################################################### +####################################################################################################################### +# +# Photon OS Install Functions +# + +__install_saltstack_photon_onedir_repository() { + if [ "$ITYPE" = "stable" ]; then + REPO_REV="$ONEDIR_REV" + else + REPO_REV="latest" + fi + + __PY_VERSION_REPO="yum" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi + + REPO_FILE="/etc/yum.repos.d/salt.repo" + + if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi + + __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo" + + GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub" + + __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1 + + tdnf makecache || return 1 + elif [ "$REPO_REV" != "latest" ]; then + echowarn "salt.repo already exists, ignoring salt version argument." + echowarn "Use -F (forced overwrite) to install $REPO_REV." + fi + + return 0 +} + +install_photon_deps() { + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + tdnf -y update || return 1 + fi + + __PACKAGES="${__PACKAGES:=}" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -lt 3 ]; then + echoerror "There are no Python 2 stable packages for Fedora, only Py3 packages" + return 1 + fi + + PY_PKG_VER=3 + + __PACKAGES="${__PACKAGES} libyaml procps-ng python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-zmq" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pip python${PY_PKG_VER}-m2crypto python${PY_PKG_VER}-pyyaml" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd" + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + fi + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} ${_EXTRA_PACKAGES} || return 1 + + return 0 +} + +install_photon_stable_post() { + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service) + sleep 1 + systemctl daemon-reload + done +} + +install_photon_git_deps() { + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + # Packages are named python3- + PY_PKG_VER=3 + else + PY_PKG_VER=2 + fi + + __PACKAGES="" + if ! __check_command_exists ps; then + __PACKAGES="${__PACKAGES} procps-ng" + fi + if ! __check_command_exists git; then + __PACKAGES="${__PACKAGES} git" + fi + + if [ -n "${__PACKAGES}" ]; then + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + __PACKAGES="" + fi + + __git_clone_and_checkout || return 1 + + if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then + + if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then + __PACKAGES="${__PACKAGES} ca-certificates" + fi + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr" + fi + + install_photon_deps || return 1 + + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + if __check_command_exists python3; then + __python="python3" + fi + elif [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then + if __check_command_exists python2; then + __python="python2" + fi + else + if ! __check_command_exists python; then + echoerror "Unable to find a python binary?!" + return 1 + fi + # Let's hope it's the right one + __python="python" + fi + + grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" | while IFS=' + ' read -r dep; do + echodebug "Running '${__python}' -m pip install '${dep}'" + "${__python}" -m pip install "${dep}" || return 1 + done + else + __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc" + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + fi + + # Need newer version of setuptools on Photon + _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}" + echodebug "Running '${_PY_EXE} -m pip --upgrade install ${_setuptools_dep}'" + ${_PY_EXE} -m pip install --upgrade "${_setuptools_dep}" + + # Let's trigger config_salt() + if [ "$_TEMP_CONFIG_DIR" = "null" ]; then + _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/" + CONFIG_SALT_FUNC="config_salt" + fi + + return 0 +} + +install_photon_git() { + if [ "${_PY_EXE}" != "" ]; then + _PYEXE=${_PY_EXE} + echoinfo "Using the following python version: ${_PY_EXE} to install salt" + else + _PYEXE='python2' + fi + + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then + ${_PYEXE} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1 + else + ${_PYEXE} setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1 + fi + return 0 +} + +install_photon_git_post() { + for fname in api master minion syndic; do + # Skip if not meant to be installed + [ $fname = "api" ] && \ + ([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + + # Salt executables are located under `/usr/local/bin/` on Fedora 36+ + #if [ "${DISTRO_VERSION}" -ge 36 ]; then + # sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service + #fi + + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service) + sleep 1 + systemctl daemon-reload + done +} + +install_photon_restart_daemons() { + [ $_START_DAEMONS -eq $BS_FALSE ] && return + + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + systemctl stop salt-$fname > /dev/null 2>&1 + systemctl start salt-$fname.service && continue + echodebug "Failed to start salt-$fname using systemd" + if [ "$_ECHO_DEBUG" -eq $BS_TRUE ]; then + systemctl status salt-$fname.service + journalctl -xe + fi + done +} + +install_photon_check_services() { + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + __check_services_systemd salt-$fname || return 1 + done + + return 0 +} + +install_photon_onedir_deps() { + + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + tdnf -y update || return 1 + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_TRUE" ] && [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + echowarn "Detected -r or -R option while installing Salt packages for Python 3." + echowarn "Python 3 packages for older Salt releases requires the EPEL repository to be installed." + echowarn "Installing the EPEL repository automatically is disabled when using the -r or -R options." + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then + __install_saltstack_photon_onedir_repository || return 1 + fi + + # If -R was passed, we need to configure custom repo url with rsync-ed packages + # Which is still handled in __install_saltstack_rhel_repository. This call has + # its own check in case -r was passed without -R. + if [ "$_CUSTOM_REPO_URL" != "null" ]; then + __install_saltstack_photon_onedir_repository || return 1 + fi + + __PACKAGES="procps-ng" + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + # shellcheck disable=SC2086 + __tdnf_install_noinput ${_EXTRA_PACKAGES} || return 1 + fi + + return 0 + +} + + +install_photon_onedir() { + STABLE_REV=$ONEDIR_REV + + __PACKAGES="" + + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-cloud" + fi + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-master" + fi + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-minion" + fi + if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-syndic" + fi + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + + return 0 +} + +install_photon_onedir_post() { + STABLE_REV=$ONEDIR_REV + install_photon_stable_post || return 1 + + return 0 +} +# +# Ended Fedora Install Functions +# +####################################################################################################################### + ####################################################################################################################### # # FreeBSD Install Functions @@ -6748,6 +7331,15 @@ install_freebsd_restart_daemons() { service salt_$fname start done } + +install_freebsd_onedir() { +# +# call install_freebsd_stable +# + install_freebsd_stable || return 1 + + return 0 +} # # Ended FreeBSD Install Functions # @@ -6866,6 +7458,14 @@ install_openbsd_restart_daemons() { return 0 } +install_openbsd_onedir() { +# +# Call install_openbsd_stable +# + install_openbsd_stable || return 1 + + return 0 +} # # Ended OpenBSD Install Functions # @@ -7066,6 +7666,14 @@ install_smartos_restart_daemons() { return 0 } +install_smartos_onedir() { +# +# call install_smartos_stable +# + install_smartos_stable || return 1 + + return 0 +} # # Ended SmartOS Install Functions # @@ -7233,7 +7841,7 @@ install_opensuse_git_deps() { fi # Check for Tumbleweed elif [ "${DISTRO_MAJOR_VERSION}" -ge 20210101 ]; then - __PACKAGES="python3-pip gcc-c++ python310-pyzmq-devel" + __PACKAGES="python3-pip gcc-c++ python3-pyzmq-devel" else __PACKAGES="python-pip python-setuptools gcc" fi @@ -7250,6 +7858,10 @@ install_opensuse_git_deps() { return 0 } +install_opensuse_onedir_deps() { + install_opensuse_stable_deps || return 1 +} + install_opensuse_stable() { __PACKAGES="" @@ -7282,6 +7894,10 @@ install_opensuse_git() { return 0 } +install_opensuse_onedir() { + install_opensuse_stable || return 1 +} + install_opensuse_stable_post() { for fname in api master minion syndic; do # Skip salt-api since the service should be opt-in and not necessarily started on boot @@ -7326,10 +7942,17 @@ install_opensuse_git_post() { use_usr_lib=$BS_TRUE fi - if [ "${use_usr_lib}" -eq $BS_TRUE ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/usr/lib/systemd/system/salt-${fname}.service" + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" else - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/" + fi + + if [ "${use_usr_lib}" -eq $BS_TRUE ]; then + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/usr/lib/systemd/system/salt-${fname}.service" + else + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" fi continue @@ -7344,6 +7967,10 @@ install_opensuse_git_post() { return 0 } +install_opensuse_onedir_post() { + install_opensuse_stable_post || return 1 +} + install_opensuse_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return @@ -7849,11 +8476,6 @@ __gentoo_pre_dep() { mkdir /etc/portage fi - # Enable Python 3.6 target for pre Neon Salt release - if echo "${STABLE_REV}" | grep -q "2019" || [ "${ITYPE}" = "git" ] && [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then - EXTRA_PYTHON_TARGET=python3_6 - fi - # Enable Python 3.7 target for Salt Neon using GIT if [ "${ITYPE}" = "git" ] && [ "${GIT_REV}" = "v3000" ]; then EXTRA_PYTHON_TARGET=python3_7 @@ -7949,6 +8571,9 @@ install_gentoo_git_deps() { __emerge ${GENTOO_GIT_PACKAGES} || return 1 fi + echoinfo "Running emerge -v1 setuptools" + __emerge -v1 setuptools || return 1 + __git_clone_and_checkout || return 1 __gentoo_post_dep || return 1 } @@ -7996,6 +8621,11 @@ install_gentoo_git() { return 0 } +install_gentoo_onedir() { + STABLE_REV=${ONEDIR_REV} + install_gentoo_stable || return 1 +} + install_gentoo_post() { for fname in api master minion syndic; do # Skip salt-api since the service should be opt-in and not necessarily started on boot @@ -8031,8 +8661,15 @@ install_gentoo_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + if __check_command_exists systemctl ; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -8078,6 +8715,10 @@ _eof return 0 } +install_gentoo_onedir_post() { + install_gentoo_post || return 1 +} + install_gentoo_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return @@ -8229,7 +8870,46 @@ __macosx_get_packagesite() { fi PKG="salt-${STABLE_REV}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg" - SALTPKGCONFURL="https://repo.saltproject.io/osx/${PKG}" + SALTPKGCONFURL="https://${_REPO_URL}/osx/${PKG}" +} + +__parse_repo_json_python() { + + # Using latest, grab the right + # version from the repo.json + _JSON_VERSION=$(python - <<-EOF +import json, urllib.request +url = "https://repo.saltproject.io/salt/py3/macos/repo.json" +response = urllib.request.urlopen(url) +data = json.loads(response.read()) +version = data["${_ONEDIR_REV}"][list(data["${_ONEDIR_REV}"])[0]]['version'] +print(version) +EOF +) +echo "${_JSON_VERSION}" +} + +__macosx_get_packagesite_onedir() { + DARWIN_ARCH="x86_64" + + __PY_VERSION_REPO="py2" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi + + if [ "$(echo "$_ONEDIR_REV" | grep -E '^(latest)$')" != "" ]; then + _PKG_VERSION=$(__parse_repo_json_python) + elif [ "$(echo "$_ONEDIR_REV" | grep -E '^([3-9][0-9]{3}(\.[0-9]*))')" != "" ]; then + _PKG_VERSION=$_ONEDIR_REV + else + _PKG_VERSION=$(__parse_repo_json_python) + fi + if [ "$(echo "$_ONEDIR_REV" | grep -E '^(3005)')" != "" ]; then + PKG="salt-${_PKG_VERSION}-macos-${DARWIN_ARCH}.pkg" + else + PKG="salt-${_PKG_VERSION}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg" + fi + SALTPKGCONFURL="https://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/macos/${ONEDIR_REV}/${PKG}" } # Using a separate conf step to head for idempotent install... @@ -8238,11 +8918,21 @@ __configure_macosx_pkg_details() { return 0 } +__configure_macosx_pkg_details_onedir() { + __macosx_get_packagesite_onedir || return 1 + return 0 +} + install_macosx_stable_deps() { __configure_macosx_pkg_details || return 1 return 0 } +install_macosx_onedir_deps() { + __configure_macosx_pkg_details_onedir || return 1 + return 0 +} + install_macosx_git_deps() { install_macosx_stable_deps || return 1 @@ -8289,6 +8979,16 @@ install_macosx_stable() { return 0 } +install_macosx_onedir() { + install_macosx_onedir_deps || return 1 + + __fetch_url "/tmp/${PKG}" "${SALTPKGCONFURL}" || return 1 + + /usr/sbin/installer -pkg "/tmp/${PKG}" -target / || return 1 + + return 0 +} + install_macosx_git() { if [ -n "$_PY_EXE" ]; then @@ -8326,6 +9026,11 @@ install_macosx_stable_post() { return 0 } +install_macosx_onedir_post() { + install_macosx_stable_post || return 1 + return 0 +} + install_macosx_git_post() { install_macosx_stable_post || return 1 return 0 @@ -8334,8 +9039,15 @@ install_macosx_git_post() { install_macosx_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return - /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 - /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + fi + + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ]; then + /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.master.plist || return 1 + /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.master.plist || return 1 + fi return 0 } @@ -8554,7 +9266,11 @@ daemons_running_onedir() { [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue - salt_path="/opt/saltstack/salt/run/run ${fname}" + if [ -f "/opt/saltstack/salt/run/run" ]; then + salt_path="/opt/saltstack/salt/run/run ${fname}" + else + salt_path="salt-${fname}" + fi process_running=$(pgrep -f "${salt_path}") if [ "${process_running}" = "" ]; then echoerror "${salt_path} was not found running" From 34f2ed4b4ec6d43db3e27ee8a31c2252de905b56 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Fri, 28 Apr 2023 17:55:26 -0700 Subject: [PATCH 004/121] Check that the return data from the cloud create function is a dictionary before attempting to pull values out. --- changelog/61236.fixed.md | 1 + salt/cloud/__init__.py | 3 ++- tests/pytests/unit/cloud/test_cloud.py | 23 +++++++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 changelog/61236.fixed.md diff --git a/changelog/61236.fixed.md b/changelog/61236.fixed.md new file mode 100644 index 00000000000..4c50beedcba --- /dev/null +++ b/changelog/61236.fixed.md @@ -0,0 +1 @@ +Check that the return data from the cloud create function is a dictionary before attempting to pull values out. diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py index 2b21483599d..a4d11eed59a 100644 --- a/salt/cloud/__init__.py +++ b/salt/cloud/__init__.py @@ -1427,7 +1427,8 @@ class Cloud: raise SaltCloudSystemExit("Failed to deploy VM") continue if self.opts.get("show_deploy_args", False) is False: - ret[name].pop("deploy_kwargs", None) + if isinstance(ret[name], dict): + ret[name].pop("deploy_kwargs", None) except (SaltCloudSystemExit, SaltCloudConfigError) as exc: if len(names) == 1: raise diff --git a/tests/pytests/unit/cloud/test_cloud.py b/tests/pytests/unit/cloud/test_cloud.py index 303374a3715..bd8595dcf86 100644 --- a/tests/pytests/unit/cloud/test_cloud.py +++ b/tests/pytests/unit/cloud/test_cloud.py @@ -1,6 +1,7 @@ import pytest from salt.cloud import Cloud +from salt.exceptions import SaltCloudSystemExit from tests.support.mock import MagicMock, patch @@ -123,3 +124,25 @@ def test_vm_config_merger(): } vm = Cloud.vm_config("test_vm", main, provider, profile, {}) assert expected == vm + + +def test_cloud_run_profile_create_returns_boolean(master_config): + + master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}} + master_config["providers"] = { + "test_provider": { + "saltify": {"profiles": {"provider": "test_provider:saltify"}} + } + } + master_config["show_deploy_args"] = False + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=True): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": True} + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=False): + with pytest.raises(SaltCloudSystemExit): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": False} From 3448a6c99089a0c1633ba410adfe0c8c11b77fc0 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Fri, 28 Apr 2023 10:18:16 -0600 Subject: [PATCH 005/121] Add support for test=True to file.cached --- changelog/63785.fixed.md | 1 + salt/states/file.py | 19 ++++ .../functional/states/file/test_cached.py | 96 +++++++++++++++++++ 3 files changed, 116 insertions(+) create mode 100644 changelog/63785.fixed.md create mode 100644 tests/pytests/functional/states/file/test_cached.py diff --git a/changelog/63785.fixed.md b/changelog/63785.fixed.md new file mode 100644 index 00000000000..4a8406126ea --- /dev/null +++ b/changelog/63785.fixed.md @@ -0,0 +1 @@ +Added support for ``test=True`` to the ``file.cached`` state module diff --git a/salt/states/file.py b/salt/states/file.py index 9f32151b8b1..a7b9e896234 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -8937,6 +8937,25 @@ def cached( else: source_sum = {} + if __opts__["test"]: + local_copy = __salt__["cp.is_cached"](name, saltenv=saltenv) + if local_copy: + if source_sum: + hash = __salt__["file.get_hash"](local_copy, __opts__["hash_type"]) + if hash == source_sum["hsum"]: + ret["comment"] = "File already cached: {}".format(name) + else: + ret[ + "comment" + ] = "Hashes don't match.\nFile will be cached: {}".format(name) + else: + ret["comment"] = "No hash found. File will be cached: {}".format(name) + else: + ret["comment"] = "File will be cached: {}".format(name) + ret["changes"] = {} + ret["result"] = None + return ret + if parsed.scheme in salt.utils.files.LOCAL_PROTOS: # Source is a local file path full_path = os.path.realpath(os.path.expanduser(parsed.path)) diff --git a/tests/pytests/functional/states/file/test_cached.py b/tests/pytests/functional/states/file/test_cached.py new file mode 100644 index 00000000000..1b052382071 --- /dev/null +++ b/tests/pytests/functional/states/file/test_cached.py @@ -0,0 +1,96 @@ +import secrets + +import pytest + +import salt.states.file as file +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture +def configure_loader_modules(): + return { + file: {"__opts__": {"test": False}}, + } + + +def test_cached_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value=""), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File already cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=source_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_different_hash_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "Hashes don't match.\nFile will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_no_source_hash_test_true(): + name = "salt://test/file.exe" + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "No hash found. File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name) + assert result == expected From c0f4e4fc8d578d5aeb6c1ec37b9b9dc991f85032 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Mon, 17 Apr 2023 14:20:01 -0500 Subject: [PATCH 006/121] lower y --- CONTRIBUTING.rst | 2 +- doc/topics/tutorials/writing_tests.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 42255742eb0..5903a856723 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -538,7 +538,7 @@ You can enable or disable test groups locally by passing their respected flag: * --core-tests - Tests of any speed that test the root parts of salt. * --flaky-jail - Test that need to be temporarily skipped. -In Your PR, you can enable or disable test groups by setting a label. +In your PR, you can enable or disable test groups by setting a label. All fast, slow, and core tests specified in the change file will always run. * test:no-fast diff --git a/doc/topics/tutorials/writing_tests.rst b/doc/topics/tutorials/writing_tests.rst index b0505060215..244091c663e 100644 --- a/doc/topics/tutorials/writing_tests.rst +++ b/doc/topics/tutorials/writing_tests.rst @@ -507,7 +507,7 @@ You can enable or disable test groups locally by passing there respected flag: * --core-tests * --flaky-jail -In Your PR you can enable or disable test groups by setting a label. +In your PR you can enable or disable test groups by setting a label. All thought the fast, slow and core tests specified in the change file will always run. * test:no-fast From 3f23a4b14d33d6fa3a51e6f989309966ac4b885f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 22 Apr 2023 11:47:00 +0100 Subject: [PATCH 007/121] Fix SSH username for CentOS Stream 8 Arm64 golden image Signed-off-by: Pedro Algarvio --- cicd/golden-images.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 0f338a3992b..75341e64aeb 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -97,7 +97,7 @@ "cloudwatch-agent-available": "true", "instance_type": "m6g.large", "is_windows": "false", - "ssh_username": "cloud-user" + "ssh_username": "centos" }, "centosstream-8": { "ami": "ami-055e35dc7180defad", From 186e10a8de107f6f7bce391e554f48543ca6eb6e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 18 Apr 2023 17:19:10 +0100 Subject: [PATCH 008/121] Re-enable package download tests for the release workflow Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 54 +++++++++++++++++++++++++++++++++++ tools/pre_commit.py | 2 +- 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3c44a807a99..c69decf674a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -155,6 +155,57 @@ jobs: run: | tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} + test-linux-pkg-downloads: + name: Test Linux Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-latest + platform: linux + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + test-macos-pkg-downloads: + name: Test macOS Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-macos.yml + with: + distro-slug: macos-12 + platform: darwin + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + test-windows-pkg-downloads: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} runs-on: @@ -165,6 +216,9 @@ jobs: - prepare-workflow - backup - publish-repositories + - test-linux-pkg-downloads + - test-macos-pkg-downloads + - test-windows-pkg-downloads environment: release steps: - name: Clone The Salt Repository diff --git a/tools/pre_commit.py b/tools/pre_commit.py index 1ecfc2b0050..af054876d80 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -77,7 +77,7 @@ def generate_workflows(ctx: Context): "lint": False, "pkg-tests": False, "salt-tests": False, - "test-pkg-downloads": False, + "test-pkg-downloads": True, }, }, } From 1b765b1d540820b65df0b313ac21f38bac70fbf9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 01:01:28 +0100 Subject: [PATCH 009/121] Run Linux package download tests in VMs instead of containers Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 332 ++++++++++++- .github/workflows/staging.yml | 315 +++++++++++- .../test-pkg-repo-downloads.yml.jinja | 33 +- .../test-package-downloads-action-linux.yml | 210 ++++---- pkg/tests/download/test_pkg_download.py | 447 ++++-------------- 5 files changed, 876 insertions(+), 461 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c69decf674a..cd6cf7a495e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -155,16 +155,323 @@ jobs: run: | tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} - test-linux-pkg-downloads: - name: Test Linux Package Downloads + almalinux-8-pkg-download-tests: + name: Test Alma Linux 8 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow - publish-repositories uses: ./.github/workflows/test-package-downloads-action-linux.yml with: - distro-slug: ubuntu-latest + distro-slug: almalinux-8 platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + almalinux-9-pkg-download-tests: + name: Test Alma Linux 9 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + amazonlinux-2-pkg-download-tests: + name: Test Amazon Linux 2 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + centos-7-pkg-download-tests: + name: Test CentOS 7 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + centosstream-8-pkg-download-tests: + name: Test CentOS Stream 8 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + centosstream-9-pkg-download-tests: + name: Test CentOS Stream 9 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + debian-10-pkg-download-tests: + name: Test Debian 10 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-10 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + debian-11-pkg-download-tests: + name: Test Debian 11 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + debian-11-arm64-pkg-download-tests: + name: Test Debian 11 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + fedora-36-pkg-download-tests: + name: Test Fedora 36 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + fedora-37-pkg-download-tests: + name: Test Fedora 37 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + fedora-38-pkg-download-tests: + name: Test Fedora 38 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + photonos-3-pkg-download-tests: + name: Test Photon OS 3 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-3 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + photonos-4-pkg-download-tests: + name: Test Photon OS 4 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-4 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + ubuntu-2004-pkg-download-tests: + name: Test Ubuntu 20.04 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + ubuntu-2004-arm64-pkg-download-tests: + name: Test Ubuntu 20.04 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + ubuntu-2204-pkg-download-tests: + name: Test Ubuntu 22.04 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + ubuntu-2204-arm64-pkg-download-tests: + name: Test Ubuntu 22.04 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release @@ -216,7 +523,24 @@ jobs: - prepare-workflow - backup - publish-repositories - - test-linux-pkg-downloads + - almalinux-8-pkg-download-tests + - almalinux-9-pkg-download-tests + - amazonlinux-2-pkg-download-tests + - centos-7-pkg-download-tests + - centosstream-8-pkg-download-tests + - centosstream-9-pkg-download-tests + - debian-10-pkg-download-tests + - debian-11-pkg-download-tests + - debian-11-arm64-pkg-download-tests + - fedora-36-pkg-download-tests + - fedora-37-pkg-download-tests + - fedora-38-pkg-download-tests + - photonos-3-pkg-download-tests + - photonos-4-pkg-download-tests + - ubuntu-2004-pkg-download-tests + - ubuntu-2004-arm64-pkg-download-tests + - ubuntu-2204-pkg-download-tests + - ubuntu-2204-arm64-pkg-download-tests - test-macos-pkg-downloads - test-windows-pkg-downloads environment: release diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 0c0f5c5f88a..2aefeb8c5bc 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2084,16 +2084,306 @@ jobs: retention-days: 7 if-no-files-found: error - test-linux-pkg-downloads: - name: Test Linux Package Downloads + almalinux-8-pkg-download-tests: + name: Test Alma Linux 8 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - publish-repositories uses: ./.github/workflows/test-package-downloads-action-linux.yml with: - distro-slug: ubuntu-latest + distro-slug: almalinux-8 platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + almalinux-9-pkg-download-tests: + name: Test Alma Linux 9 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + amazonlinux-2-pkg-download-tests: + name: Test Amazon Linux 2 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + centos-7-pkg-download-tests: + name: Test CentOS 7 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + centosstream-8-pkg-download-tests: + name: Test CentOS Stream 8 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + centosstream-9-pkg-download-tests: + name: Test CentOS Stream 9 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + debian-10-pkg-download-tests: + name: Test Debian 10 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-10 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + debian-11-pkg-download-tests: + name: Test Debian 11 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + debian-11-arm64-pkg-download-tests: + name: Test Debian 11 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + fedora-36-pkg-download-tests: + name: Test Fedora 36 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + fedora-37-pkg-download-tests: + name: Test Fedora 37 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + fedora-38-pkg-download-tests: + name: Test Fedora 38 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + photonos-3-pkg-download-tests: + name: Test Photon OS 3 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-3 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + photonos-4-pkg-download-tests: + name: Test Photon OS 4 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-4 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + ubuntu-2004-pkg-download-tests: + name: Test Ubuntu 20.04 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + ubuntu-2004-arm64-pkg-download-tests: + name: Test Ubuntu 20.04 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + ubuntu-2204-pkg-download-tests: + name: Test Ubuntu 22.04 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + ubuntu-2204-arm64-pkg-download-tests: + name: Test Ubuntu 22.04 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging @@ -2182,7 +2472,24 @@ jobs: - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests - - test-linux-pkg-downloads + - almalinux-8-pkg-download-tests + - almalinux-9-pkg-download-tests + - amazonlinux-2-pkg-download-tests + - centos-7-pkg-download-tests + - centosstream-8-pkg-download-tests + - centosstream-9-pkg-download-tests + - debian-10-pkg-download-tests + - debian-11-pkg-download-tests + - debian-11-arm64-pkg-download-tests + - fedora-36-pkg-download-tests + - fedora-37-pkg-download-tests + - fedora-38-pkg-download-tests + - photonos-3-pkg-download-tests + - photonos-4-pkg-download-tests + - ubuntu-2004-pkg-download-tests + - ubuntu-2004-arm64-pkg-download-tests + - ubuntu-2204-pkg-download-tests + - ubuntu-2204-arm64-pkg-download-tests - test-macos-pkg-downloads - test-windows-pkg-downloads environment: staging diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 6c2c956c9b1..899530e1edf 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -1,8 +1,32 @@ - <%- set job_name = "test-linux-pkg-downloads" %> + + <%- set linux_pkg_tests = ( + ("almalinux-8", "Alma Linux 8", "x86_64"), + ("almalinux-9", "Alma Linux 9", "x86_64"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64"), + ("centos-7", "CentOS 7", "x86_64"), + ("centosstream-8", "CentOS Stream 8", "x86_64"), + ("centosstream-9", "CentOS Stream 9", "x86_64"), + ("debian-10", "Debian 10", "x86_64"), + ("debian-11", "Debian 11", "x86_64"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), + ("fedora-36", "Fedora 36", "x86_64"), + ("fedora-37", "Fedora 37", "x86_64"), + ("fedora-38", "Fedora 38", "x86_64"), + ("photonos-3", "Photon OS 3", "x86_64"), + ("photonos-4", "Photon OS 4", "x86_64"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64") + ) %> + + + <%- for slug, display_name, arch in linux_pkg_tests %> + <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test Linux Package Downloads + name: Test <{ display_name }> Package Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -13,8 +37,9 @@ - publish-repositories uses: ./.github/workflows/test-package-downloads-action-linux.yml with: - distro-slug: ubuntu-latest + distro-slug: <{ slug }> platform: linux + arch: <{ arch }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> @@ -24,6 +49,8 @@ <%- endif %> secrets: inherit + <%- endfor %> + <%- set job_name = "test-macos-pkg-downloads" %> diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index 65c9b60e18e..434ec77f54b 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -11,6 +11,10 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -62,37 +66,13 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: ubuntu-latest - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix linux - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: - self-hosted - linux - - ${{ matrix.arch }} + - bastion timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -102,13 +82,13 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Download Onedir Tarball as an Artifact(from a different workflow) @@ -119,7 +99,7 @@ jobs: workflow_conclusion: "" branch: ${{ github.event.ref }} if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -128,36 +108,65 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - - name: Install Nox + - name: Setup Python Tools Scripts + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} - name: Install Dependencies if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - env: - PRINT_TEST_SELECTION: "0" - PRINT_SYSTEM_INFO: "0" run: | - nox --force-color --install-only -e ${{ inputs.nox-session }} + tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} - name: Cleanup .nox Directory if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - nox --force-color -e "pre-archive-cleanup(pkg=False)" + tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - name: Compress .nox Directory if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }} + tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} + + - name: Download Compressed .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + + - name: Destroy VM + if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} - name: Upload Onedir Tarball as an Artifact uses: actions/upload-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* retention-days: 7 if-no-files-found: error @@ -172,16 +181,11 @@ jobs: runs-on: - self-hosted - linux - - ${{ matrix.arch }} + - bastion environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - dependencies - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -190,102 +194,121 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ - name: Decompress Onedir Tarball shell: bash run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - - - name: Install Nox - run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} - name: Decompress .nox Directory run: | - nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }} + tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} - name: Show System Info & Test Plan env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_TEST_SELECTION: "1" - PRINT_TEST_PLAN_ONLY: "1" - PRINT_SYSTEM_INFO: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" - run: | - nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs - - - name: Run Package Download Tests - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_TEST_SELECTION: "0" - PRINT_TEST_PLAN_ONLY: "0" - PRINT_SYSTEM_INFO: "0" - RERUN_FAILURES: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} - + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" run: | - nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - - name: Fix file ownership + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ inputs.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" run: | - sudo chown -R "$(id -un)" . + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' run: | - nox --force-color -e combine-coverage + tools --timestamps vm combine-coverage ${{ inputs.distro-slug }} - - name: Prepare Test Run Artifacts + - name: Download Test Run Artifacts id: download-artifacts-from-vm - if: always() && job.status != 'cancelled' + if: always() && steps.spin-up-vm.outcome == 'success' run: | + tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} # Delete the salt onedir, we won't need it anymore and it will prevent # from it showing in the tree command below rm -rf artifacts/salt* tree -a artifacts + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + - name: Upload Test Run Artifacts - if: always() && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) runs-on: ubuntu-latest + environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -295,7 +318,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -307,7 +330,6 @@ jobs: uses: actions/setup-python@v4 with: python-version: "${{ inputs.python-version }}" - update-environment: true - name: Install Nox run: | @@ -318,6 +340,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index bbe26df549f..c346e2d3ac2 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -4,141 +4,14 @@ Test Salt Pkg Downloads import logging import os import pathlib -import re -import shutil -import attr import packaging import pytest from pytestskipmarkers.utils import platform -from saltfactories.utils import random_string log = logging.getLogger(__name__) -@attr.s(kw_only=True, slots=True) -class PkgImage: - name = attr.ib() - os_type = attr.ib() - os_version = attr.ib() - os_codename = attr.ib(default=None) - container_id = attr.ib() - container = attr.ib(default=None) - - def __str__(self): - return f"{self.container_id}" - - -def get_test_versions(): - test_versions = [] - - containers = [ - { - "image": "ghcr.io/saltstack/salt-ci-containers/amazon-linux:2", - "os_type": "amazon", - "os_version": 2, - "container_id": "amazon_2", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos:7", - "os_type": "redhat", - "os_version": 7, - "container_id": "centos_7", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:8", - "os_type": "redhat", - "os_version": 8, - "container_id": "centosstream_8", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:9", - "os_type": "redhat", - "os_version": 9, - "container_id": "centosstream_9", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:36", - "os_type": "fedora", - "os_version": 36, - "container_id": "fedora_36", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:37", - "os_type": "fedora", - "os_version": 37, - "container_id": "fedora_37", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:38", - "os_type": "fedora", - "os_version": 38, - "container_id": "fedora_38", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/debian:10", - "os_type": "debian", - "os_version": 10, - "os_codename": "buster", - "container_id": "debian_10", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/debian:11", - "os_type": "debian", - "os_version": 11, - "os_codename": "bullseye", - "container_id": "debian_11", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:20.04", - "os_type": "ubuntu", - "os_version": 20.04, - "os_codename": "focal", - "container_id": "ubuntu_20_04", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:22.04", - "os_type": "ubuntu", - "os_version": 22.04, - "os_codename": "jammy", - "container_id": "ubuntu_22_04", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/photon:3", - "os_type": "photon", - "os_version": 3, - "container_id": "photon_3", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/photon:4", - "os_type": "photon", - "os_version": 4, - "container_id": "photon_4", - }, - ] - for container in containers: - test_versions.append( - PkgImage( - name=container["image"], - os_type=container["os_type"], - os_version=container["os_version"], - os_codename=container.get("os_codename", ""), - container_id=container["container_id"], - ) - ) - - return test_versions - - -def get_container_type_id(value): - return f"{value}" - - -@pytest.fixture(scope="module", params=get_test_versions(), ids=get_container_type_id) -def download_test_image(request): - return request.param - - def get_salt_test_commands(): salt_release = get_salt_release() @@ -176,54 +49,6 @@ def get_salt_test_commands(): return salt_test_commands -@pytest.fixture(scope="module") -def pkg_container( - salt_factories, - download_test_image, - root_url, - salt_release, - tmp_path_factory, - gpg_key_name, -): - downloads_path = tmp_path_factory.mktemp("downloads") - container = salt_factories.get_container( - random_string(f"{download_test_image.container_id}_"), - download_test_image.name, - pull_before_start=True, - skip_on_pull_failure=True, - skip_if_docker_client_not_connectable=True, - container_run_kwargs=dict( - volumes={ - str(downloads_path): {"bind": "/downloads", "mode": "z"}, - } - ), - ) - try: - container_setup_func = globals()[f"setup_{download_test_image.os_type}"] - except KeyError: - raise pytest.skip.Exception( - f"Unable to handle {download_test_image.os_type}. Skipping.", - _use_item_location=True, - ) - container.before_terminate(shutil.rmtree, str(downloads_path), ignore_errors=True) - - with container.started(): - download_test_image.container = container - try: - container_setup_func( - container, - download_test_image.os_version, - download_test_image.os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, - ) - yield download_test_image - except Exception as exc: - pytest.fail(f"Failed to setup {pkg_container.os_type}: {exc}") - - @pytest.fixture(scope="module") def root_url(salt_release): if os.environ.get("SALT_REPO_TYPE", "release") == "staging": @@ -282,14 +107,74 @@ def salt_release(): yield get_salt_release() +@pytest.fixture(scope="module") +def setup_system(grains, shell, root_url, salt_release, downloads_path, gpg_key_name): + if grains["os_family"] == "Windows": + setup_windows(shell, root_url=root_url, salt_release=salt_release) + elif grains["os_family"] == "MacOS": + setup_macos(shell, root_url=root_url, salt_release=salt_release) + elif grains["os"] == "Amazon": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os"] == "Fedora": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os"] == "VMware Photon OS": + setup_redhat_family( + shell, + os_name="photon", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os_family"] == "RedHat": + setup_redhat_family( + shell, + os_name="redhat", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os_family"] == "Debian": + setup_debian_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osrelease"], + os_codename=grains["oscodename"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + else: + pytest.fail("Don't know how to handle %s", grains["osfinger"]) + + def setup_redhat_family( - container, + shell, + os_name, os_version, - os_codename, root_url, salt_release, downloads_path, - os_name, gpg_key_name, ): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" @@ -297,13 +182,13 @@ def setup_redhat_family( arch = "arm64" repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" - gpg_file_url = f"{repo_url_base}/{gpg_key_name}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: pytest.fail(f"Failed to download {gpg_file_url}: {exc}") - ret = container.run("rpm", "--import", f"/downloads/{gpg_key_name}") + ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False) if ret.returncode != 0: pytest.fail("Failed to import gpg key") @@ -311,11 +196,9 @@ def setup_redhat_family( f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo" ) - clean_command = "all" if os_name == "photon" else "expire-cache" - install_dmesg = ("yum", "install", "-y", "util-linux") commands = [ - ("mv", f"/downloads/{repo_file.name}", f"/etc/yum.repos.d/salt-{os_name}.repo"), - ("yum", "clean", clean_command), + ("mv", str(repo_file), "/etc/yum.repos.d/salt.repo"), + ("yum", "clean", "all" if os_name == "photon" else "expire-cache"), ( "yum", "install", @@ -331,106 +214,22 @@ def setup_redhat_family( # For some reason, the centosstream9 container doesn't have dmesg installed if os_version == 9 and os_name == "redhat": - commands.insert(2, install_dmesg) + commands.insert(2, ("yum", "install", "-y", "util-linux")) for cmd in commands: - ret = container.run(*cmd) + ret = shell.run(*cmd, check=False) if ret.returncode != 0: - pytest.fail(f"Failed to run: {' '.join(cmd)!r}") - - -def setup_amazon( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "amazon", - gpg_key_name, - ) - - -def setup_redhat( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "redhat", - gpg_key_name, - ) - - -def setup_fedora( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "fedora", - gpg_key_name, - ) - - -def setup_photon( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "photon", - gpg_key_name, - ) + pytest.fail(f"Failed to run '{' '.join(cmd)!r}':\n{ret}") def setup_debian_family( - container, + shell, + os_name, os_version, os_codename, root_url, salt_release, downloads_path, - os_name, gpg_key_name, ): arch = os.environ.get("SALT_REPO_ARCH") or "amd64" @@ -439,12 +238,12 @@ def setup_debian_family( elif arch == "x86_64": arch = "amd64" - ret = container.run("apt-get", "update", "-y") + ret = shell.run("apt-get", "update", "-y", check=False) if ret.returncode != 0: - pytest.fail("Failed to run: 'apt-get update -y'") + pytest.fail(str(ret)) repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" - gpg_file_url = f"{repo_url_base}/{gpg_key_name}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: @@ -455,10 +254,14 @@ def setup_debian_family( f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n" ) commands = [ - ("mv", f"/downloads/{gpg_key_name}", f"/usr/share/keyrings/{gpg_key_name}"), ( "mv", - f"/downloads/{salt_sources_path.name}", + str(downloads_path / gpg_key_name), + f"/usr/share/keyrings/{gpg_key_name}", + ), + ( + "mv", + str(salt_sources_path), "/etc/apt/sources.list.d/salt.list", ), ("apt-get", "install", "-y", "ca-certificates"), @@ -477,55 +280,12 @@ def setup_debian_family( ), ] for cmd in commands: - ret = container.run(*cmd) + ret = shell.run(*cmd) if ret.returncode != 0: - pytest.fail(f"Failed to run: {' '.join(cmd)!r}\n{ret}") + pytest.fail(str(ret)) -def setup_debian( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_debian_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "debian", - gpg_key_name, - ) - - -def setup_ubuntu( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_debian_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "ubuntu", - gpg_key_name, - ) - - -@pytest.fixture(scope="module") -def setup_macos(root_url, salt_release, shell): +def setup_macos(shell, root_url, salt_release): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": @@ -554,8 +314,7 @@ def setup_macos(root_url, salt_release, shell): yield -@pytest.fixture(scope="module") -def setup_windows(root_url, salt_release, shell): +def setup_windows(shell, root_url, salt_release): root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") @@ -594,39 +353,15 @@ def setup_windows(root_url, salt_release, shell): assert ret.returncode == 0, ret -@pytest.mark.skip_unless_on_linux +@pytest.mark.usefixtures("setup_system") @pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -@pytest.mark.skip_if_binaries_missing("dockerd") -def test_download_linux(salt_test_command, pkg_container, root_url, salt_release): +def test_download(shell, grains, salt_test_command): """ - Test downloading of Salt packages and running various commands on Linux hosts - """ - res = pkg_container.container.run(salt_test_command) - assert res.returncode == 0 - - -@pytest.mark.skip_unless_on_darwin -@pytest.mark.usefixtures("setup_macos") -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download_macos(salt_test_command, shell): - """ - Test downloading of Salt packages and running various commands on Mac OS hosts + Test downloading of Salt packages and running various commands. """ _cmd = salt_test_command.split() - ret = shell.run(*_cmd, check=False) - assert ret.returncode == 0, ret - - -@pytest.mark.skip_unless_on_windows -@pytest.mark.usefixtures("setup_windows") -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download_windows(salt_test_command, shell): - """ - Test downloading of Salt packages and running various commands on Windows hosts - """ - _cmd = salt_test_command.split() - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - _cmd[0] = str(root_dir / _cmd[0]) - + if grains["os_family"] == "Windows": + root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + _cmd[0] = str(root_dir / _cmd[0]) ret = shell.run(*_cmd, check=False) assert ret.returncode == 0, ret From eb20a93fcc78d5ac2161fea1b10af3d1365490fd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 01:22:44 +0100 Subject: [PATCH 010/121] Also drop matrix usage for macOS and Windows Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 35 ++++++-- .github/workflows/staging.yml | 34 ++++++-- .../test-pkg-repo-downloads.yml.jinja | 23 ++++-- .../test-package-downloads-action-macos.yml | 67 +++++----------- .../test-package-downloads-action-windows.yml | 79 ++++++------------- tools/ci.py | 41 ---------- 6 files changed, 118 insertions(+), 161 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cd6cf7a495e..734a2b316eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -479,8 +479,8 @@ jobs: artifacts-from-workflow: staging.yml secrets: inherit - test-macos-pkg-downloads: - name: Test macOS Package Downloads + macos-12-pkg-download-tests: + name: Test macOS 12 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -489,6 +489,7 @@ jobs: with: distro-slug: macos-12 platform: darwin + arch: x86_64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release @@ -496,9 +497,9 @@ jobs: artifacts-from-workflow: staging.yml secrets: inherit - test-windows-pkg-downloads: + windows-2022-nsis-pkg-download-tests: if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - name: Test Windows Package Downloads + name: Test Windows 2022 NSIS Package Downloads needs: - prepare-workflow - publish-repositories @@ -506,6 +507,27 @@ jobs: with: distro-slug: windows-2022 platform: windows + arch: amd64 + pkg-type: NSIS + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + artifacts-from-workflow: staging.yml + secrets: inherit + + windows-2022-msi-pkg-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: MSI cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release @@ -541,8 +563,9 @@ jobs: - ubuntu-2004-arm64-pkg-download-tests - ubuntu-2204-pkg-download-tests - ubuntu-2204-arm64-pkg-download-tests - - test-macos-pkg-downloads - - test-windows-pkg-downloads + - macos-12-pkg-download-tests + - windows-2022-nsis-pkg-download-tests + - windows-2022-msi-pkg-download-tests environment: release steps: - name: Clone The Salt Repository diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 2aefeb8c5bc..1f6539be512 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2390,8 +2390,8 @@ jobs: skip-code-coverage: true secrets: inherit - test-macos-pkg-downloads: - name: Test macOS Package Downloads + macos-12-pkg-download-tests: + name: Test macOS 12 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2400,15 +2400,16 @@ jobs: with: distro-slug: macos-12 platform: darwin + arch: x86_64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true secrets: inherit - test-windows-pkg-downloads: + windows-2022-nsis-pkg-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows Package Downloads + name: Test Windows 2022 NSIS Package Downloads needs: - prepare-workflow - publish-repositories @@ -2416,6 +2417,26 @@ jobs: with: distro-slug: windows-2022 platform: windows + arch: amd64 + pkg-type: NSIS + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + windows-2022-msi-pkg-download-tests: + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + name: Test Windows 2022 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: MSI cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging @@ -2490,8 +2511,9 @@ jobs: - ubuntu-2004-arm64-pkg-download-tests - ubuntu-2204-pkg-download-tests - ubuntu-2204-arm64-pkg-download-tests - - test-macos-pkg-downloads - - test-windows-pkg-downloads + - macos-12-pkg-download-tests + - windows-2022-nsis-pkg-download-tests + - windows-2022-msi-pkg-download-tests environment: staging runs-on: - self-hosted diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 899530e1edf..7ebfa4e4a07 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -52,11 +52,12 @@ <%- endfor %> - <%- set job_name = "test-macos-pkg-downloads" %> + <%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %> + <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test macOS Package Downloads + name: Test <{ display_name }> Package Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -67,8 +68,9 @@ - publish-repositories uses: ./.github/workflows/test-package-downloads-action-macos.yml with: - distro-slug: macos-12 + distro-slug: <{ slug }> platform: darwin + arch: <{ arch }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> @@ -77,9 +79,14 @@ artifacts-from-workflow: staging.yml <%- endif %> secrets: inherit + <%- endfor %> - <%- set job_name = "test-windows-pkg-downloads" %> + <%- for slug, display_name, arch in ( + ("windows-2022", "Windows 2022", "amd64"), + ) %> + <%- for pkg_type in ("NSIS", "MSI") %> + <%- set job_name = "{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower()) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> @@ -88,14 +95,16 @@ <%- else %> if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} <%- endif %> - name: Test Windows Package Downloads + name: Test <{ display_name }> <{ pkg_type }> Package Downloads needs: - prepare-workflow - publish-repositories uses: ./.github/workflows/test-package-downloads-action-windows.yml with: - distro-slug: windows-2022 + distro-slug: <{ slug }> platform: windows + arch: <{ arch }> + pkg-type: <{ pkg_type }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> @@ -104,3 +113,5 @@ artifacts-from-workflow: staging.yml <%- endif %> secrets: inherit + <%- endfor %> + <%- endfor %> diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index b68542be268..b576eebe011 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -11,6 +11,10 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -62,34 +66,10 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: ubuntu-latest - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix macos - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: ${{ inputs.distro-slug }} timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -99,13 +79,13 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Download Onedir Tarball as an Artifact(from a different workflow) @@ -116,7 +96,7 @@ jobs: workflow_conclusion: "" branch: ${{ github.event.ref }} if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -125,7 +105,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Set up Python ${{ inputs.python-version }} if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -169,8 +149,8 @@ jobs: - name: Upload Onedir Tarball as an Artifact uses: actions/upload-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* retention-days: 7 if-no-files-found: error @@ -187,11 +167,6 @@ jobs: timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - dependencies - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -200,7 +175,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts - name: Install System Dependencies @@ -212,7 +187,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 @@ -228,7 +203,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Decompress .nox Directory run: | @@ -236,6 +211,7 @@ jobs: - name: Show System Info & Test Plan env: + SALT_RELEASE: "${{ inputs.salt-version }}" SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "1" PRINT_TEST_PLAN_ONLY: "1" @@ -257,7 +233,7 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -288,24 +264,19 @@ jobs: if: always() && job.status != 'cancelled' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) runs-on: ubuntu-latest environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -315,7 +286,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -337,6 +308,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index c13ef47d62c..cd8d73de874 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -11,6 +11,14 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + pkg-type: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -67,40 +75,13 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: - - self-hosted - - linux - - x86_64 - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix windows - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: - self-hosted - linux - bastion timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -110,13 +91,13 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Download Onedir Tarball as an Artifact(from a different workflow) @@ -127,7 +108,7 @@ jobs: workflow_conclusion: "" branch: ${{ github.event.ref }} if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -136,7 +117,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Setup Python Tools Scripts if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -193,8 +174,8 @@ jobs: - name: Upload Onedir Tarball as an Artifact uses: actions/upload-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* retention-days: 7 if-no-files-found: error @@ -213,12 +194,7 @@ jobs: environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - - generate-matrix - dependencies - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -227,7 +203,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -235,13 +211,13 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -271,9 +247,9 @@ jobs: - name: Show System Info & Test Plan env: - INSTALL_TYPE: ${{ matrix.install_type }} + INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.install_arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -288,9 +264,9 @@ jobs: - name: Run Package Download Tests env: - INSTALL_TYPE: ${{ matrix.install_type }} + INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.install_arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -331,24 +307,19 @@ jobs: if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) runs-on: ubuntu-latest environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -358,7 +329,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -380,6 +351,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }} ${{ inputs.pkg-type }} ) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/tools/ci.py b/tools/ci.py index 748bea85ac5..a3904b81693 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -645,44 +645,3 @@ def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"matrix={json.dumps(matrix)}\n") ctx.exit(0) - - -@ci.command( - name="pkg-download-matrix", - arguments={ - "platform": { - "help": "The OS platform to generate the matrix for", - "choices": ("linux", "windows", "macos", "darwin"), - }, - }, -) -def pkg_download_matrix(ctx: Context, platform: str): - """ - Generate the test matrix. - """ - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is None: - ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") - - tests = [] - arches = [] - if platform == "windows": - for arch in ("amd64", "x86"): - arches.append({"arch": arch}) - for install_type in ("msi", "nsis"): - tests.append({"arch": arch, "install_type": install_type}) - else: - for arch in ("x86_64", "aarch64"): - if platform in ("macos", "darwin") and arch == "aarch64": - continue - arches.append({"arch": arch}) - tests.append({"arch": arch}) - ctx.info("Generated arch matrix:") - ctx.print(arches, soft_wrap=True) - ctx.info("Generated test matrix:") - ctx.print(tests, soft_wrap=True) - if github_output is not None: - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"arch={json.dumps(arches)}\n") - wfh.write(f"tests={json.dumps(tests)}\n") - ctx.exit(0) From 63e45bc5aad8fadd3f988982b4922b86de9d6cfc Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 14:31:21 +0100 Subject: [PATCH 011/121] Add supporting code to get the system's grains as a fixture Signed-off-by: Pedro Algarvio --- pkg/tests/conftest.py | 11 + pkg/tests/download/test_pkg_download.py | 122 +++++------ pkg/tests/support/paths.py | 102 ++++++++++ pkg/tests/support/runtests.py | 209 +++++++++++++++++++ pkg/tests/support/sminion.py | 256 ++++++++++++++++++++++++ 5 files changed, 642 insertions(+), 58 deletions(-) create mode 100644 pkg/tests/support/paths.py create mode 100644 pkg/tests/support/runtests.py create mode 100644 pkg/tests/support/sminion.py diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index d8b39e7070a..9e6ea6fad51 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -21,6 +21,7 @@ from tests.support.helpers import ( SaltPkgInstall, TestUser, ) +from tests.support.sminion import create_sminion log = logging.getLogger(__name__) @@ -33,6 +34,16 @@ def version(install_salt): return install_salt.get_version(version_only=True) +@pytest.fixture(scope="session") +def sminion(): + return create_sminion() + + +@pytest.fixture(scope="session") +def grains(sminion): + return sminion.opts["grains"].copy() + + def pytest_addoption(parser): """ register argparse-style options and ini-style config values. diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index c346e2d3ac2..aa13f39068d 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -4,6 +4,7 @@ Test Salt Pkg Downloads import logging import os import pathlib +import shutil import packaging import pytest @@ -108,64 +109,69 @@ def salt_release(): @pytest.fixture(scope="module") -def setup_system(grains, shell, root_url, salt_release, downloads_path, gpg_key_name): - if grains["os_family"] == "Windows": - setup_windows(shell, root_url=root_url, salt_release=salt_release) - elif grains["os_family"] == "MacOS": - setup_macos(shell, root_url=root_url, salt_release=salt_release) - elif grains["os"] == "Amazon": - setup_redhat_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - ) - elif grains["os"] == "Fedora": - setup_redhat_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - ) - elif grains["os"] == "VMware Photon OS": - setup_redhat_family( - shell, - os_name="photon", - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - ) - elif grains["os_family"] == "RedHat": - setup_redhat_family( - shell, - os_name="redhat", - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - ) - elif grains["os_family"] == "Debian": - setup_debian_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osrelease"], - os_codename=grains["oscodename"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - ) - else: - pytest.fail("Don't know how to handle %s", grains["osfinger"]) +def setup_system(tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name): + downloads_path = tmp_path_factory.mktemp("downloads") + try: + if grains["os_family"] == "Windows": + setup_windows(shell, root_url=root_url, salt_release=salt_release) + elif grains["os_family"] == "MacOS": + setup_macos(shell, root_url=root_url, salt_release=salt_release) + elif grains["os"] == "Amazon": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os"] == "Fedora": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os"] == "VMware Photon OS": + setup_redhat_family( + shell, + os_name="photon", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os_family"] == "RedHat": + setup_redhat_family( + shell, + os_name="redhat", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + elif grains["os_family"] == "Debian": + setup_debian_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osrelease"], + os_codename=grains["oscodename"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + ) + else: + pytest.fail("Don't know how to handle %s", grains["osfinger"]) + yield + finally: + shutil.rmtree(downloads_path, ignore_errors=True) def setup_redhat_family( diff --git a/pkg/tests/support/paths.py b/pkg/tests/support/paths.py new file mode 100644 index 00000000000..a8a82bce0e0 --- /dev/null +++ b/pkg/tests/support/paths.py @@ -0,0 +1,102 @@ +""" + :codeauthor: Pedro Algarvio (pedro@algarvio.me) + :copyright: Copyright 2017 by the SaltStack Team, see AUTHORS for more details. + :license: Apache 2.0, see LICENSE for more details. + + + tests.support.paths + ~~~~~~~~~~~~~~~~~~~ + + Tests related paths +""" + +import logging +import os +import re +import sys +import tempfile + +log = logging.getLogger(__name__) + +SALT_CODE_DIR = os.path.join( + os.path.dirname( + os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.normpath(os.path.abspath(__file__))) + ) + ) + ), + "salt", +) +TESTS_DIR = os.path.join(os.path.dirname(SALT_CODE_DIR), "tests") +if TESTS_DIR.startswith("//"): + # Have we been given an initial double forward slash? Ditch it! + TESTS_DIR = TESTS_DIR[1:] +if sys.platform.startswith("win"): + TESTS_DIR = os.path.normcase(TESTS_DIR) +CODE_DIR = os.path.dirname(TESTS_DIR) +if sys.platform.startswith("win"): + CODE_DIR = CODE_DIR.replace("\\", "\\\\") +UNIT_TEST_DIR = os.path.join(TESTS_DIR, "unit") +INTEGRATION_TEST_DIR = os.path.join(TESTS_DIR, "integration") + +# Let's inject CODE_DIR so salt is importable if not there already +if TESTS_DIR in sys.path: + sys.path.remove(TESTS_DIR) +if CODE_DIR in sys.path and sys.path[0] != CODE_DIR: + sys.path.remove(CODE_DIR) +if CODE_DIR not in sys.path: + sys.path.insert(0, CODE_DIR) +if TESTS_DIR not in sys.path: + sys.path.insert(1, TESTS_DIR) + +SYS_TMP_DIR = os.path.abspath( + os.path.realpath( + # Avoid ${TMPDIR} and gettempdir() on MacOS as they yield a base path too long + # for unix sockets: ``error: AF_UNIX path too long`` + # Gentoo Portage prefers ebuild tests are rooted in ${TMPDIR} + os.environ.get("TMPDIR", tempfile.gettempdir()) + if not sys.platform.startswith("darwin") + else "/tmp" + ) +) +TMP = os.path.join(SYS_TMP_DIR, "salt-tests-tmpdir") +TMP_ROOT_DIR = os.path.join(TMP, "rootdir") +FILES = os.path.join(INTEGRATION_TEST_DIR, "files") +BASE_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "base") +PROD_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "prod") +PYEXEC = "python{}.{}".format(*sys.version_info) +MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, "mockbin") +SCRIPT_DIR = os.path.join(CODE_DIR, "scripts") +TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-state-tree") +TMP_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-pillar-tree") +TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-state-tree") +TMP_PRODENV_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-pillar-tree") +TMP_CONF_DIR = TMP_MINION_CONF_DIR = os.path.join(TMP, "config") +TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "sub-minion") +TMP_SYNDIC_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-minion") +TMP_SYNDIC_MASTER_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-master") +TMP_SSH_CONF_DIR = TMP_MINION_CONF_DIR +CONF_DIR = os.path.join(INTEGRATION_TEST_DIR, "files", "conf") +PILLAR_DIR = os.path.join(FILES, "pillar") +TMP_SCRIPT_DIR = os.path.join(TMP, "scripts") +ENGINES_DIR = os.path.join(FILES, "engines") +LOG_HANDLERS_DIR = os.path.join(FILES, "log_handlers") + + +def list_test_mods(): + """ + A generator which returns all of the test files + """ + test_re = re.compile(r"^test_.+\.py$") + for dirname in (UNIT_TEST_DIR, INTEGRATION_TEST_DIR): + test_type = os.path.basename(dirname) + for root, _, files in os.walk(dirname): + parent_mod = root[len(dirname) :].lstrip(os.sep).replace(os.sep, ".") + for filename in files: + if test_re.match(filename): + mod_name = test_type + if parent_mod: + mod_name += "." + parent_mod + mod_name += "." + filename[:-3] + yield mod_name diff --git a/pkg/tests/support/runtests.py b/pkg/tests/support/runtests.py new file mode 100644 index 00000000000..ce5c9644cd3 --- /dev/null +++ b/pkg/tests/support/runtests.py @@ -0,0 +1,209 @@ +""" + :codeauthor: Pedro Algarvio (pedro@algarvio.me) + + .. _runtime_vars: + + Runtime Variables + ----------------- + + :command:`salt-runtests` provides a variable, :py:attr:`RUNTIME_VARS` which has some common paths defined at + startup: + + .. autoattribute:: tests.support.runtests.RUNTIME_VARS + :annotation: + + :TMP: Tests suite temporary directory + :TMP_CONF_DIR: Configuration directory from where the daemons that :command:`salt-runtests` starts get their + configuration files. + :TMP_CONF_MASTER_INCLUDES: Salt Master configuration files includes directory. See + :salt_conf_master:`default_include`. + :TMP_CONF_MINION_INCLUDES: Salt Minion configuration files includes directory. Seei + :salt_conf_minion:`include`. + :TMP_CONF_CLOUD_INCLUDES: Salt cloud configuration files includes directory. The same as the salt master and + minion includes configuration, though under a different directory name. + :TMP_CONF_CLOUD_PROFILE_INCLUDES: Salt cloud profiles configuration files includes directory. Same as above. + :TMP_CONF_CLOUD_PROVIDER_INCLUDES: Salt cloud providers configuration files includes directory. Same as above. + :TMP_SCRIPT_DIR: Temporary scripts directory from where the Salt CLI tools will be called when running tests. + :TMP_SALT_INTEGRATION_FILES: Temporary directory from where Salt's test suite integration files are copied to. + :TMP_BASEENV_STATE_TREE: Salt master's **base** environment state tree directory + :TMP_PRODENV_STATE_TREE: Salt master's **production** environment state tree directory + :TMP_BASEENV_PILLAR_TREE: Salt master's **base** environment pillar tree directory + :TMP_PRODENV_PILLAR_TREE: Salt master's **production** environment pillar tree directory + + + Use it on your test case in case of need. As simple as: + + .. code-block:: python + + import os + from tests.support.runtests import RUNTIME_VARS + + # Path to the testing minion configuration file + minion_config_path = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'minion') + + .. _`pytest`: http://pytest.org + """ + +import logging +import os +import shutil + +import salt.utils.path +import salt.utils.platform +import tests.support.paths as paths + +try: + import pwd +except ImportError: + import salt.utils.win_functions + +log = logging.getLogger(__name__) + + +def this_user(): + """ + Get the user associated with the current process. + """ + if salt.utils.platform.is_windows(): + return salt.utils.win_functions.get_current_user(with_domain=False) + return pwd.getpwuid(os.getuid())[0] + + +class RootsDict(dict): + def merge(self, data): + for key, values in data.items(): + if key not in self: + self[key] = values + continue + for value in values: + if value not in self[key]: + self[key].append(value) + return self + + def to_dict(self): + return dict(self) + + +def recursive_copytree(source, destination, overwrite=False): + for root, dirs, files in os.walk(source): + for item in dirs: + src_path = os.path.join(root, item) + dst_path = os.path.join( + destination, src_path.replace(source, "").lstrip(os.sep) + ) + if not os.path.exists(dst_path): + log.debug("Creating directory: %s", dst_path) + os.makedirs(dst_path) + for item in files: + src_path = os.path.join(root, item) + dst_path = os.path.join( + destination, src_path.replace(source, "").lstrip(os.sep) + ) + if os.path.exists(dst_path) and not overwrite: + if os.stat(src_path).st_mtime > os.stat(dst_path).st_mtime: + log.debug("Copying %s to %s", src_path, dst_path) + shutil.copy2(src_path, dst_path) + else: + if not os.path.isdir(os.path.dirname(dst_path)): + log.debug("Creating directory: %s", os.path.dirname(dst_path)) + os.makedirs(os.path.dirname(dst_path)) + log.debug("Copying %s to %s", src_path, dst_path) + shutil.copy2(src_path, dst_path) + + +class RuntimeVars: + + __self_attributes__ = ("_vars", "_locked", "lock") + + def __init__(self, **kwargs): + self._vars = kwargs + self._locked = False + + def lock(self): + # Late import + from salt.utils.immutabletypes import freeze + + frozen_vars = freeze(self._vars.copy()) + self._vars = frozen_vars + self._locked = True + + def __iter__(self): + yield from self._vars.items() + + def __getattribute__(self, name): + if name in object.__getattribute__(self, "_vars"): + return object.__getattribute__(self, "_vars")[name] + return object.__getattribute__(self, name) + + def __setattr__(self, name, value): + if getattr(self, "_locked", False) is True: + raise RuntimeError( + "After {} is locked, no additional data can be added to it".format( + self.__class__.__name__ + ) + ) + if name in object.__getattribute__(self, "__self_attributes__"): + object.__setattr__(self, name, value) + return + self._vars[name] = value + + +# <---- Helper Methods ----------------------------------------------------------------------------------------------- + + +# ----- Global Variables --------------------------------------------------------------------------------------------> +XML_OUTPUT_DIR = os.environ.get( + "SALT_XML_TEST_REPORTS_DIR", os.path.join(paths.TMP, "xml-test-reports") +) +# <---- Global Variables --------------------------------------------------------------------------------------------- + + +# ----- Tests Runtime Variables -------------------------------------------------------------------------------------> + +RUNTIME_VARS = RuntimeVars( + TMP=paths.TMP, + SYS_TMP_DIR=paths.SYS_TMP_DIR, + FILES=paths.FILES, + CONF_DIR=paths.CONF_DIR, + PILLAR_DIR=paths.PILLAR_DIR, + ENGINES_DIR=paths.ENGINES_DIR, + LOG_HANDLERS_DIR=paths.LOG_HANDLERS_DIR, + TMP_ROOT_DIR=paths.TMP_ROOT_DIR, + TMP_CONF_DIR=paths.TMP_CONF_DIR, + TMP_MINION_CONF_DIR=paths.TMP_MINION_CONF_DIR, + TMP_CONF_MASTER_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "master.d"), + TMP_CONF_MINION_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "minion.d"), + TMP_CONF_PROXY_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "proxy.d"), + TMP_CONF_CLOUD_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "cloud.conf.d"), + TMP_CONF_CLOUD_PROFILE_INCLUDES=os.path.join( + paths.TMP_CONF_DIR, "cloud.profiles.d" + ), + TMP_CONF_CLOUD_PROVIDER_INCLUDES=os.path.join( + paths.TMP_CONF_DIR, "cloud.providers.d" + ), + TMP_SUB_MINION_CONF_DIR=paths.TMP_SUB_MINION_CONF_DIR, + TMP_SYNDIC_MASTER_CONF_DIR=paths.TMP_SYNDIC_MASTER_CONF_DIR, + TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR, + TMP_SSH_CONF_DIR=paths.TMP_SSH_CONF_DIR, + TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR, + TMP_STATE_TREE=paths.TMP_STATE_TREE, + TMP_BASEENV_STATE_TREE=paths.TMP_STATE_TREE, + TMP_PILLAR_TREE=paths.TMP_PILLAR_TREE, + TMP_BASEENV_PILLAR_TREE=paths.TMP_PILLAR_TREE, + TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE, + TMP_PRODENV_PILLAR_TREE=paths.TMP_PRODENV_PILLAR_TREE, + SHELL_TRUE_PATH=salt.utils.path.which("true") + if not salt.utils.platform.is_windows() + else "cmd /c exit 0 > nul", + SHELL_FALSE_PATH=salt.utils.path.which("false") + if not salt.utils.platform.is_windows() + else "cmd /c exit 1 > nul", + RUNNING_TESTS_USER=this_user(), + RUNTIME_CONFIGS={}, + CODE_DIR=paths.CODE_DIR, + SALT_CODE_DIR=paths.SALT_CODE_DIR, + BASE_FILES=paths.BASE_FILES, + PROD_FILES=paths.PROD_FILES, + TESTS_DIR=paths.TESTS_DIR, +) +# <---- Tests Runtime Variables -------------------------------------------------------------------------------------- diff --git a/pkg/tests/support/sminion.py b/pkg/tests/support/sminion.py new file mode 100644 index 00000000000..abf45fd7bde --- /dev/null +++ b/pkg/tests/support/sminion.py @@ -0,0 +1,256 @@ +""" +tests.support.sminion +~~~~~~~~~~~~~~~~~~~~~ + +SMinion's support functions +""" + +import fnmatch +import hashlib +import logging +import os +import shutil +import sys + +import salt.minion +import salt.utils.path +import salt.utils.stringutils +from tests.support.runtests import RUNTIME_VARS + +log = logging.getLogger(__name__) + +DEFAULT_SMINION_ID = "pytest-internal-sminion" + + +def build_minion_opts( + minion_id=None, + root_dir=None, + initial_conf_file=None, + minion_opts_overrides=None, + skip_cached_opts=False, + cache_opts=True, + minion_role=None, +): + if minion_id is None: + minion_id = DEFAULT_SMINION_ID + if skip_cached_opts is False: + try: + opts_cache = build_minion_opts.__cached_opts__ + except AttributeError: + opts_cache = build_minion_opts.__cached_opts__ = {} + cached_opts = opts_cache.get(minion_id) + if cached_opts: + return cached_opts + + log.info("Generating testing minion %r configuration...", minion_id) + if root_dir is None: + hashed_minion_id = hashlib.sha1() + hashed_minion_id.update(salt.utils.stringutils.to_bytes(minion_id)) + root_dir = os.path.join( + RUNTIME_VARS.TMP_ROOT_DIR, hashed_minion_id.hexdigest()[:6] + ) + + if initial_conf_file is not None: + minion_opts = salt.config._read_conf_file( + initial_conf_file + ) # pylint: disable=protected-access + else: + minion_opts = {} + + conf_dir = os.path.join(root_dir, "conf") + conf_file = os.path.join(conf_dir, "minion") + + minion_opts["id"] = minion_id + minion_opts["conf_file"] = conf_file + minion_opts["root_dir"] = root_dir + minion_opts["cachedir"] = "cache" + minion_opts["user"] = RUNTIME_VARS.RUNNING_TESTS_USER + minion_opts["pki_dir"] = "pki" + minion_opts["hosts.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "hosts") + minion_opts["aliases.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "aliases") + minion_opts["file_client"] = "local" + minion_opts["server_id_use_crc"] = "adler32" + minion_opts["pillar_roots"] = {"base": [RUNTIME_VARS.TMP_PILLAR_TREE]} + minion_opts["file_roots"] = { + "base": [ + # Let's support runtime created files that can be used like: + # salt://my-temp-file.txt + RUNTIME_VARS.TMP_STATE_TREE + ], + # Alternate root to test __env__ choices + "prod": [ + os.path.join(RUNTIME_VARS.FILES, "file", "prod"), + RUNTIME_VARS.TMP_PRODENV_STATE_TREE, + ], + } + if initial_conf_file and initial_conf_file.startswith(RUNTIME_VARS.FILES): + # We assume we were passed a minion configuration file defined fo testing and, as such + # we define the file and pillar roots to include the testing states/pillar trees + minion_opts["pillar_roots"]["base"].append( + os.path.join(RUNTIME_VARS.FILES, "pillar", "base"), + ) + minion_opts["file_roots"]["base"].append( + os.path.join(RUNTIME_VARS.FILES, "file", "base"), + ) + minion_opts["file_roots"]["prod"].append( + os.path.join(RUNTIME_VARS.FILES, "file", "prod"), + ) + + # We need to copy the extension modules into the new master root_dir or + # it will be prefixed by it + extension_modules_path = os.path.join(root_dir, "extension_modules") + if not os.path.exists(extension_modules_path): + shutil.copytree( + os.path.join(RUNTIME_VARS.FILES, "extension_modules"), + extension_modules_path, + ) + minion_opts["extension_modules"] = extension_modules_path + + # Custom grains + if "grains" not in minion_opts: + minion_opts["grains"] = {} + if minion_role is not None: + minion_opts["grains"]["role"] = minion_role + + # Under windows we can't seem to properly create a virtualenv off of another + # virtualenv, we can on linux but we will still point to the virtualenv binary + # outside the virtualenv running the test suite, if that's the case. + try: + real_prefix = sys.real_prefix + # The above attribute exists, this is a virtualenv + if salt.utils.platform.is_windows(): + virtualenv_binary = os.path.join(real_prefix, "Scripts", "virtualenv.exe") + else: + # We need to remove the virtualenv from PATH or we'll get the virtualenv binary + # from within the virtualenv, we don't want that + path = os.environ.get("PATH") + if path is not None: + path_items = path.split(os.pathsep) + for item in path_items[:]: + if item.startswith(sys.base_prefix): + path_items.remove(item) + os.environ["PATH"] = os.pathsep.join(path_items) + virtualenv_binary = salt.utils.path.which("virtualenv") + if path is not None: + # Restore previous environ PATH + os.environ["PATH"] = path + if not virtualenv_binary.startswith(real_prefix): + virtualenv_binary = None + if virtualenv_binary and not os.path.exists(virtualenv_binary): + # It doesn't exist?! + virtualenv_binary = None + except AttributeError: + # We're not running inside a virtualenv + virtualenv_binary = None + if virtualenv_binary: + minion_opts["venv_bin"] = virtualenv_binary + + # Override minion_opts with minion_opts_overrides + if minion_opts_overrides: + minion_opts.update(minion_opts_overrides) + + if not os.path.exists(conf_dir): + os.makedirs(conf_dir) + + with salt.utils.files.fopen(conf_file, "w") as fp_: + salt.utils.yaml.safe_dump(minion_opts, fp_, default_flow_style=False) + + log.info("Generating testing minion %r configuration completed.", minion_id) + minion_opts = salt.config.minion_config( + conf_file, minion_id=minion_id, cache_minion_id=True + ) + salt.utils.verify.verify_env( + [ + os.path.join(minion_opts["pki_dir"], "accepted"), + os.path.join(minion_opts["pki_dir"], "rejected"), + os.path.join(minion_opts["pki_dir"], "pending"), + os.path.dirname(minion_opts["log_file"]), + minion_opts["extension_modules"], + minion_opts["cachedir"], + minion_opts["sock_dir"], + RUNTIME_VARS.TMP_STATE_TREE, + RUNTIME_VARS.TMP_PILLAR_TREE, + RUNTIME_VARS.TMP_PRODENV_STATE_TREE, + RUNTIME_VARS.TMP, + ], + RUNTIME_VARS.RUNNING_TESTS_USER, + root_dir=root_dir, + ) + if cache_opts: + try: + opts_cache = build_minion_opts.__cached_opts__ + except AttributeError: + opts_cache = build_minion_opts.__cached_opts__ = {} + opts_cache[minion_id] = minion_opts + return minion_opts + + +def create_sminion( + minion_id=None, + root_dir=None, + initial_conf_file=None, + sminion_cls=salt.minion.SMinion, + minion_opts_overrides=None, + skip_cached_minion=False, + cache_sminion=True, +): + if minion_id is None: + minion_id = DEFAULT_SMINION_ID + if skip_cached_minion is False: + try: + minions_cache = create_sminion.__cached_minions__ + except AttributeError: + create_sminion.__cached_minions__ = {} + cached_minion = create_sminion.__cached_minions__.get(minion_id) + if cached_minion: + return cached_minion + minion_opts = build_minion_opts( + minion_id=minion_id, + root_dir=root_dir, + initial_conf_file=initial_conf_file, + minion_opts_overrides=minion_opts_overrides, + skip_cached_opts=skip_cached_minion, + cache_opts=cache_sminion, + ) + log.info("Instantiating a testing %s(%s)", sminion_cls.__name__, minion_id) + sminion = sminion_cls(minion_opts) + if cache_sminion: + try: + minions_cache = create_sminion.__cached_minions__ + except AttributeError: + minions_cache = create_sminion.__cached_minions__ = {} + minions_cache[minion_id] = sminion + return sminion + + +def check_required_sminion_attributes(sminion_attr, required_items): + """ + :param sminion_attr: The name of the sminion attribute to check, such as 'functions' or 'states' + :param required_items: The items that must be part of the designated sminion attribute for the decorated test + :return The packages that are not available + """ + required_salt_items = set(required_items) + sminion = create_sminion(minion_id=DEFAULT_SMINION_ID) + available_items = list(getattr(sminion, sminion_attr)) + not_available_items = set() + + name = "__not_available_{items}s__".format(items=sminion_attr) + if not hasattr(sminion, name): + setattr(sminion, name, set()) + + cached_not_available_items = getattr(sminion, name) + + for not_available_item in cached_not_available_items: + if not_available_item in required_salt_items: + not_available_items.add(not_available_item) + required_salt_items.remove(not_available_item) + + for required_item_name in required_salt_items: + search_name = required_item_name + if "." not in search_name: + search_name += ".*" + if not fnmatch.filter(available_items, search_name): + not_available_items.add(required_item_name) + cached_not_available_items.add(required_item_name) + + return not_available_items From 9e769baec2c2f58eb0ebd31a193bf8800f0e6de6 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 09:28:24 +0100 Subject: [PATCH 012/121] `true/false` is really not explicit on the GH Actions UI, switch to `onedir/src` Signed-off-by: Pedro Algarvio --- .github/workflows/build-deb-packages.yml | 39 +++++++++++------------- .github/workflows/build-rpm-packages.yml | 36 +++++++++++----------- 2 files changed, 35 insertions(+), 40 deletions(-) diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index f9aa6af3ae3..3823a620ed1 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -22,9 +22,9 @@ jobs: arch: - x86_64 - aarch64 - src: - - true - - false + source: + - onedir + - src container: image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11 @@ -79,34 +79,31 @@ jobs: - name: Build Deb working-directory: pkgs/checkout/ - if: ${{ matrix.src != true}} run: | - tools pkg build deb --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz - - - name: Build Deb - working-directory: pkgs/checkout/ - if: ${{ matrix.src == true}} - run: | - tools pkg build deb --arch ${{ matrix.arch }} + tools pkg build deb ${{ + matrix.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} - name: Cleanup run: | rm -rf pkgs/checkout/ - - name: Upload DEBs - uses: actions/upload-artifact@v3 - if: ${{ matrix.src == false}} - with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb - path: ${{ github.workspace }}/pkgs/* - retention-days: 7 - if-no-files-found: error + - name: Set Artifact Name + id: set-artifact-name + run: | + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload DEBs uses: actions/upload-artifact@v3 - if: ${{ matrix.src == true}} with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ${{ github.workspace }}/pkgs/* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index ef032568063..72464818307 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -25,9 +25,9 @@ jobs: arch: - x86_64 - aarch64 - src: - - true - - false + source: + - onedir + - src container: image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9 @@ -67,29 +67,27 @@ jobs: tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Build RPM - if: ${{ matrix.src != true}} run: | - tools pkg build rpm --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tools pkg build rpm ${{ + matrix.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} - - name: Build RPM - if: ${{ matrix.src == true}} + - name: Set Artifact Name + id: set-artifact-name run: | - tools pkg build rpm --arch ${{ matrix.arch }} + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload RPMs uses: actions/upload-artifact@v3 - if: ${{ matrix.src != true}} with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm - path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm - retention-days: 7 - if-no-files-found: error - - - name: Upload RPMs - uses: actions/upload-artifact@v3 - if: ${{ matrix.src == true}} - with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm retention-days: 7 if-no-files-found: error From 01a92787002bb3bde36051167a6dcad7d039cacb Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 09:58:45 +0100 Subject: [PATCH 013/121] Fix/Improve artifact download/upload for package download tests Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 84 ++++++++++++++----- .github/workflows/templates/release.yml.jinja | 42 ++++++++++ .../test-pkg-repo-downloads.yml.jinja | 18 ++-- .../test-package-downloads-action-linux.yml | 29 +------ .../test-package-downloads-action-macos.yml | 31 +------ .../test-package-downloads-action-windows.yml | 29 +------ 6 files changed, 118 insertions(+), 115 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 734a2b316eb..3c6fe2a2063 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -98,6 +98,48 @@ jobs: run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + download-onedir-artifact: + name: Download Staging Onedir Artifact + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + runs-on: + - self-hosted + - linux + environment: release + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + steps: + - name: Download Onedir Tarball as an Artifact(from a different workflow) + uses: dawidd6/action-download-artifact@v2 + with: + workflow: staging.yml + workflow_conclusion: "" + branch: ${{ github.event.ref }} + if_no_artifact_found: fail + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/ + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + backup: name: Backup runs-on: @@ -161,6 +203,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: almalinux-8 @@ -170,7 +213,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit almalinux-9-pkg-download-tests: @@ -179,6 +221,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: almalinux-9 @@ -188,7 +231,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit amazonlinux-2-pkg-download-tests: @@ -197,6 +239,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: amazonlinux-2 @@ -206,7 +249,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit centos-7-pkg-download-tests: @@ -215,6 +257,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: centos-7 @@ -224,7 +267,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit centosstream-8-pkg-download-tests: @@ -233,6 +275,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: centosstream-8 @@ -242,7 +285,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit centosstream-9-pkg-download-tests: @@ -251,6 +293,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: centosstream-9 @@ -260,7 +303,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit debian-10-pkg-download-tests: @@ -269,6 +311,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: debian-10 @@ -278,7 +321,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit debian-11-pkg-download-tests: @@ -287,6 +329,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: debian-11 @@ -296,7 +339,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit debian-11-arm64-pkg-download-tests: @@ -305,6 +347,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: debian-11-arm64 @@ -314,7 +357,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit fedora-36-pkg-download-tests: @@ -323,6 +365,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: fedora-36 @@ -332,7 +375,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit fedora-37-pkg-download-tests: @@ -341,6 +383,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: fedora-37 @@ -350,7 +393,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit fedora-38-pkg-download-tests: @@ -359,6 +401,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: fedora-38 @@ -368,7 +411,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit photonos-3-pkg-download-tests: @@ -377,6 +419,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: photonos-3 @@ -386,7 +429,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit photonos-4-pkg-download-tests: @@ -395,6 +437,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: photonos-4 @@ -404,7 +447,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit ubuntu-2004-pkg-download-tests: @@ -413,6 +455,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: ubuntu-20.04 @@ -422,7 +465,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit ubuntu-2004-arm64-pkg-download-tests: @@ -431,6 +473,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: ubuntu-20.04-arm64 @@ -440,7 +483,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit ubuntu-2204-pkg-download-tests: @@ -449,6 +491,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: ubuntu-22.04 @@ -458,7 +501,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit ubuntu-2204-arm64-pkg-download-tests: @@ -467,6 +509,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: ubuntu-22.04-arm64 @@ -476,7 +519,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit macos-12-pkg-download-tests: @@ -485,6 +527,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-macos.yml with: distro-slug: macos-12 @@ -494,7 +537,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit windows-2022-nsis-pkg-download-tests: @@ -503,6 +545,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-windows.yml with: distro-slug: windows-2022 @@ -513,7 +556,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit windows-2022-msi-pkg-download-tests: @@ -522,6 +564,7 @@ jobs: needs: - prepare-workflow - publish-repositories + - download-onedir-artifact uses: ./.github/workflows/test-package-downloads-action-windows.yml with: distro-slug: windows-2022 @@ -532,7 +575,6 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true - artifacts-from-workflow: staging.yml secrets: inherit release: diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 73852ffefba..ed343a6d66a 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -132,6 +132,48 @@ permissions: <%- block jobs %> <{- super() }> + download-onedir-artifact: + name: Download Staging Onedir Artifact + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + runs-on: + - self-hosted + - linux + environment: <{ gh_environment }> + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + steps: + - name: Download Onedir Tarball as an Artifact(from a different workflow) + uses: dawidd6/action-download-artifact@v2 + with: + workflow: staging.yml + workflow_conclusion: "" + branch: ${{ github.event.ref }} + if_no_artifact_found: fail + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/ + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + backup: name: Backup runs-on: diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 7ebfa4e4a07..e7e964c5ce2 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -35,6 +35,9 @@ needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-linux.yml with: distro-slug: <{ slug }> @@ -44,9 +47,6 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> secrets: inherit <%- endfor %> @@ -66,6 +66,9 @@ needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-macos.yml with: distro-slug: <{ slug }> @@ -75,9 +78,6 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> secrets: inherit <%- endfor %> @@ -99,6 +99,9 @@ needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-windows.yml with: distro-slug: <{ slug }> @@ -109,9 +112,6 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> secrets: inherit <%- endfor %> <%- endfor %> diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index 434ec77f54b..99bd06c08a1 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -47,13 +47,6 @@ on: type: string description: The nox session to run default: test-pkgs-onedir - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -85,23 +78,11 @@ jobs: key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ - - name: Decompress Onedir Tarball if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' shell: bash @@ -162,18 +143,10 @@ jobs: run: | tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error - - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download path: nox.${{ inputs.distro-slug }}.tar.* test: diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index b576eebe011..80f2a9210c6 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -47,13 +47,6 @@ on: type: string description: The nox session to run default: test-pkgs-onedir - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -82,23 +75,11 @@ jobs: key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ - - name: Decompress Onedir Tarball if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' shell: bash @@ -146,18 +127,10 @@ jobs: run: | nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error - - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -176,7 +149,7 @@ jobs: uses: actions/download-artifact@v3 with: name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts + path: artifacts/ - name: Install System Dependencies run: | diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index cd8d73de874..f4c0a9165e2 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -56,13 +56,6 @@ on: type: boolean description: Skip Publishing JUnit Reports default: false - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -94,23 +87,11 @@ jobs: key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ - - name: Decompress Onedir Tarball if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' shell: bash @@ -171,18 +152,10 @@ jobs: run: | tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error - - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download path: nox.${{ inputs.distro-slug }}.tar.* test: From 7e5d9d92517b3b58ec7e7c2874e38d4681323fb0 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 14:07:05 +0100 Subject: [PATCH 014/121] Distinguish nox upload artifact names Signed-off-by: Pedro Algarvio --- .github/workflows/test-packages-action.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index d167eda634e..b73329fa26d 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -166,10 +166,19 @@ jobs: run: | tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} + - name: Define Nox Upload Artifact Name + id: nox-artifact-name + run: | + if [ "${{ contains(inputs.distro-slug, 'windows') }}" != "true" ]; then + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}" >> "${GITHUB_OUTPUT}" + else + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}" >> "${GITHUB_OUTPUT}" + fi + - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: ${{ steps.nox-artifact-name.outputs.name }} path: nox.${{ inputs.distro-slug }}.tar.* test: From 76026d0c1e9e5a54ad153f7e24cb64c104bc4636 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 17 Apr 2023 19:33:51 +0100 Subject: [PATCH 015/121] Use the tests downloads path Signed-off-by: Pedro Algarvio --- pkg/tests/download/test_pkg_download.py | 27 ++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index aa13f39068d..ae6bbcad975 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -113,9 +113,19 @@ def setup_system(tmp_path_factory, grains, shell, root_url, salt_release, gpg_ke downloads_path = tmp_path_factory.mktemp("downloads") try: if grains["os_family"] == "Windows": - setup_windows(shell, root_url=root_url, salt_release=salt_release) + setup_windows( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + ) elif grains["os_family"] == "MacOS": - setup_macos(shell, root_url=root_url, salt_release=salt_release) + setup_macos( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + ) elif grains["os"] == "Amazon": setup_redhat_family( shell, @@ -291,7 +301,7 @@ def setup_debian_family( pytest.fail(str(ret)) -def setup_macos(shell, root_url, salt_release): +def setup_macos(shell, root_url, salt_release, downloads_path): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": @@ -304,13 +314,13 @@ def setup_macos(shell, root_url, salt_release): mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" - mac_pkg_path = f"/tmp/{mac_pkg}" - pytest.helpers.download_file(mac_pkg_url, f"/tmp/{mac_pkg}") + mac_pkg_path = downloads_path / mac_pkg + pytest.helpers.download_file(mac_pkg_url, mac_pkg_path) ret = shell.run( "installer", "-pkg", - mac_pkg_path, + str(mac_pkg_path), "-target", "/", check=False, @@ -320,7 +330,7 @@ def setup_macos(shell, root_url, salt_release): yield -def setup_windows(shell, root_url, salt_release): +def setup_windows(shell, root_url, salt_release, downloads_path): root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") @@ -342,8 +352,7 @@ def setup_windows(shell, root_url, salt_release): win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" ssm_bin = root_dir / "bin" / "ssm_bin" - pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) - pkg_path.parent.mkdir(exist_ok=True) + pkg_path = downloads_path / win_pkg pytest.helpers.download_file(win_pkg_url, pkg_path) if install_type.lower() == "nsis": From 15a1e7b7852f384a32b3c304747219abdddfc139 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 18 Apr 2023 10:30:55 +0100 Subject: [PATCH 016/121] Update `PATH` to include the directory where the salt CLI scripts are Signed-off-by: Pedro Algarvio --- pkg/tests/download/test_pkg_download.py | 38 +++++++++++++++++++------ 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index ae6bbcad975..aa99e0d5dd5 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -109,7 +109,9 @@ def salt_release(): @pytest.fixture(scope="module") -def setup_system(tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name): +def _setup_system( + tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name +): downloads_path = tmp_path_factory.mktemp("downloads") try: if grains["os_family"] == "Windows": @@ -368,15 +370,35 @@ def setup_windows(shell, root_url, salt_release, downloads_path): assert ret.returncode == 0, ret -@pytest.mark.usefixtures("setup_system") +@pytest.fixture(scope="module") +def environ(_setup_system): + env = os.environ.copy() + if platform.is_windows(): + install_dir = pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + elif platform.is_darwin(): + install_dir = pathlib.Path("/opt", "salt") + else: + install_dir = pathlib.Path("/opt", "saltstack", "salt") + + # Get the defined PATH environment variable + path = os.environ.get("PATH") + if path is not None: + path_items = path.split(os.pathsep) + else: + path_items = [] + path_items.insert(0, str(install_dir)) + + # Set the PATH environment variable + env["PATH"] = os.pathsep.join(path_items) + return env + + @pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download(shell, grains, salt_test_command): +def test_download(shell, environ, salt_test_command): """ Test downloading of Salt packages and running various commands. """ - _cmd = salt_test_command.split() - if grains["os_family"] == "Windows": - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - _cmd[0] = str(root_dir / _cmd[0]) - ret = shell.run(*_cmd, check=False) + ret = shell.run(*salt_test_command.split(), env=environ, check=False) assert ret.returncode == 0, ret From 90622b6248b5fcbe910f7861b5124c61299f521e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 18 Apr 2023 21:58:27 +0100 Subject: [PATCH 017/121] Fix logic in release workflow `if:` steps to allow skipping and still publish release Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 9 +++------ .github/workflows/templates/release.yml.jinja | 9 +++------ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3c6fe2a2063..1407bb475d6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -579,6 +579,7 @@ jobs: release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -706,7 +707,7 @@ jobs: restore: name: Restore Release Bucket From Backup - if: ${{ always() }} + if: ${{ failure() || cancelled() }} runs-on: - self-hosted - linux @@ -716,29 +717,25 @@ jobs: environment: release steps: - name: Clone The Salt Repository - if: ${{ failure() || cancelled() }} uses: actions/checkout@v3 with: ssh-key: ${{ secrets.GHA_SSH_KEY }} - name: Setup Rclone - if: ${{ failure() || cancelled() }} uses: AnimMouse/setup-rclone@v1 with: version: v1.61.1 - name: Setup Python Tools Scripts - if: ${{ failure() || cancelled() }} uses: ./.github/actions/setup-python-tools-scripts - name: Restore Release Bucket - if: ${{ failure() || cancelled() }} run: | tools pkg repo restore-previous-releases publish-pypi: name: Publish to PyPi - if: ${{ github.event.repository.fork != true }} + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - release diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index ed343a6d66a..2bcfb8d14ac 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -239,6 +239,7 @@ permissions: release: <%- do conclusion_needs.append('release') %> name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -349,7 +350,7 @@ permissions: restore: <%- do conclusion_needs.append('restore') %> name: Restore Release Bucket From Backup - if: ${{ always() }} + if: ${{ failure() || cancelled() }} runs-on: - self-hosted - linux @@ -362,30 +363,26 @@ permissions: environment: <{ gh_environment }> steps: - name: Clone The Salt Repository - if: ${{ failure() || cancelled() }} uses: actions/checkout@v3 with: ssh-key: ${{ secrets.GHA_SSH_KEY }} - name: Setup Rclone - if: ${{ failure() || cancelled() }} uses: AnimMouse/setup-rclone@v1 with: version: v1.61.1 - name: Setup Python Tools Scripts - if: ${{ failure() || cancelled() }} uses: ./.github/actions/setup-python-tools-scripts - name: Restore Release Bucket - if: ${{ failure() || cancelled() }} run: | tools pkg repo restore-previous-releases publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi - if: ${{ github.event.repository.fork != true }} + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - release From 8ca57f465f0a81d7cd85522bedec64cce85e196c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 18 Apr 2023 22:50:57 +0100 Subject: [PATCH 018/121] Package download tests on Arm64 too Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 171 ++++++++++++++++++ .github/workflows/staging.yml | 162 +++++++++++++++++ .../test-pkg-repo-downloads.yml.jinja | 9 + 3 files changed, 342 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1407bb475d6..262820ce8f6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -215,6 +215,24 @@ jobs: skip-code-coverage: true secrets: inherit + almalinux-8-arm64-pkg-download-tests: + name: Test Alma Linux 8 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + almalinux-9-pkg-download-tests: name: Test Alma Linux 9 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -233,6 +251,24 @@ jobs: skip-code-coverage: true secrets: inherit + almalinux-9-arm64-pkg-download-tests: + name: Test Alma Linux 9 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + amazonlinux-2-pkg-download-tests: name: Test Amazon Linux 2 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -251,6 +287,24 @@ jobs: skip-code-coverage: true secrets: inherit + amazonlinux-2-arm64-pkg-download-tests: + name: Test Amazon Linux 2 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + centos-7-pkg-download-tests: name: Test CentOS 7 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -269,6 +323,24 @@ jobs: skip-code-coverage: true secrets: inherit + centos-7-arm64-pkg-download-tests: + name: Test CentOS 7 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + centosstream-8-pkg-download-tests: name: Test CentOS Stream 8 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -287,6 +359,24 @@ jobs: skip-code-coverage: true secrets: inherit + centosstream-8-arm64-pkg-download-tests: + name: Test CentOS Stream 8 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + centosstream-9-pkg-download-tests: name: Test CentOS Stream 9 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -305,6 +395,24 @@ jobs: skip-code-coverage: true secrets: inherit + centosstream-9-arm64-pkg-download-tests: + name: Test CentOS Stream 9 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + debian-10-pkg-download-tests: name: Test Debian 10 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -377,6 +485,24 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-36-arm64-pkg-download-tests: + name: Test Fedora 36 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + fedora-37-pkg-download-tests: name: Test Fedora 37 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -395,6 +521,24 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-37-arm64-pkg-download-tests: + name: Test Fedora 37 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + fedora-38-pkg-download-tests: name: Test Fedora 38 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -413,6 +557,24 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-38-arm64-pkg-download-tests: + name: Test Fedora 38 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + photonos-3-pkg-download-tests: name: Test Photon OS 3 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -589,17 +751,26 @@ jobs: - backup - publish-repositories - almalinux-8-pkg-download-tests + - almalinux-8-arm64-pkg-download-tests - almalinux-9-pkg-download-tests + - almalinux-9-arm64-pkg-download-tests - amazonlinux-2-pkg-download-tests + - amazonlinux-2-arm64-pkg-download-tests - centos-7-pkg-download-tests + - centos-7-arm64-pkg-download-tests - centosstream-8-pkg-download-tests + - centosstream-8-arm64-pkg-download-tests - centosstream-9-pkg-download-tests + - centosstream-9-arm64-pkg-download-tests - debian-10-pkg-download-tests - debian-11-pkg-download-tests - debian-11-arm64-pkg-download-tests - fedora-36-pkg-download-tests + - fedora-36-arm64-pkg-download-tests - fedora-37-pkg-download-tests + - fedora-37-arm64-pkg-download-tests - fedora-38-pkg-download-tests + - fedora-38-arm64-pkg-download-tests - photonos-3-pkg-download-tests - photonos-4-pkg-download-tests - ubuntu-2004-pkg-download-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 1f6539be512..ea1912e6e0f 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2101,6 +2101,23 @@ jobs: skip-code-coverage: true secrets: inherit + almalinux-8-arm64-pkg-download-tests: + name: Test Alma Linux 8 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + almalinux-9-pkg-download-tests: name: Test Alma Linux 9 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2118,6 +2135,23 @@ jobs: skip-code-coverage: true secrets: inherit + almalinux-9-arm64-pkg-download-tests: + name: Test Alma Linux 9 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + amazonlinux-2-pkg-download-tests: name: Test Amazon Linux 2 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2135,6 +2169,23 @@ jobs: skip-code-coverage: true secrets: inherit + amazonlinux-2-arm64-pkg-download-tests: + name: Test Amazon Linux 2 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + centos-7-pkg-download-tests: name: Test CentOS 7 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2152,6 +2203,23 @@ jobs: skip-code-coverage: true secrets: inherit + centos-7-arm64-pkg-download-tests: + name: Test CentOS 7 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + centosstream-8-pkg-download-tests: name: Test CentOS Stream 8 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2169,6 +2237,23 @@ jobs: skip-code-coverage: true secrets: inherit + centosstream-8-arm64-pkg-download-tests: + name: Test CentOS Stream 8 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + centosstream-9-pkg-download-tests: name: Test CentOS Stream 9 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2186,6 +2271,23 @@ jobs: skip-code-coverage: true secrets: inherit + centosstream-9-arm64-pkg-download-tests: + name: Test CentOS Stream 9 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + debian-10-pkg-download-tests: name: Test Debian 10 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2254,6 +2356,23 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-36-arm64-pkg-download-tests: + name: Test Fedora 36 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + fedora-37-pkg-download-tests: name: Test Fedora 37 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2271,6 +2390,23 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-37-arm64-pkg-download-tests: + name: Test Fedora 37 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + fedora-38-pkg-download-tests: name: Test Fedora 38 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2288,6 +2424,23 @@ jobs: skip-code-coverage: true secrets: inherit + fedora-38-arm64-pkg-download-tests: + name: Test Fedora 38 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + photonos-3-pkg-download-tests: name: Test Photon OS 3 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2494,17 +2647,26 @@ jobs: - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests - almalinux-8-pkg-download-tests + - almalinux-8-arm64-pkg-download-tests - almalinux-9-pkg-download-tests + - almalinux-9-arm64-pkg-download-tests - amazonlinux-2-pkg-download-tests + - amazonlinux-2-arm64-pkg-download-tests - centos-7-pkg-download-tests + - centos-7-arm64-pkg-download-tests - centosstream-8-pkg-download-tests + - centosstream-8-arm64-pkg-download-tests - centosstream-9-pkg-download-tests + - centosstream-9-arm64-pkg-download-tests - debian-10-pkg-download-tests - debian-11-pkg-download-tests - debian-11-arm64-pkg-download-tests - fedora-36-pkg-download-tests + - fedora-36-arm64-pkg-download-tests - fedora-37-pkg-download-tests + - fedora-37-arm64-pkg-download-tests - fedora-38-pkg-download-tests + - fedora-38-arm64-pkg-download-tests - photonos-3-pkg-download-tests - photonos-4-pkg-download-tests - ubuntu-2004-pkg-download-tests diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index e7e964c5ce2..25f3d91d5b4 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -1,17 +1,26 @@ <%- set linux_pkg_tests = ( ("almalinux-8", "Alma Linux 8", "x86_64"), + ("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64"), ("almalinux-9", "Alma Linux 9", "x86_64"), + ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), ("amazonlinux-2", "Amazon Linux 2", "x86_64"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), ("centos-7", "CentOS 7", "x86_64"), + ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), ("centosstream-8", "CentOS Stream 8", "x86_64"), + ("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64"), ("centosstream-9", "CentOS Stream 9", "x86_64"), + ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64"), ("debian-10", "Debian 10", "x86_64"), ("debian-11", "Debian 11", "x86_64"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), ("fedora-36", "Fedora 36", "x86_64"), + ("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"), ("fedora-37", "Fedora 37", "x86_64"), + ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"), ("fedora-38", "Fedora 38", "x86_64"), + ("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"), ("photonos-3", "Photon OS 3", "x86_64"), ("photonos-4", "Photon OS 4", "x86_64"), ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), From 9d502045d33c5c5d4f68d85e144aa59c25d42044 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 19 Apr 2023 19:06:59 +0100 Subject: [PATCH 019/121] Point the command directly at the scripts Signed-off-by: Pedro Algarvio --- pkg/tests/download/test_pkg_download.py | 32 +++++++------------------ 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index aa99e0d5dd5..67f6202c714 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -371,34 +371,20 @@ def setup_windows(shell, root_url, salt_release, downloads_path): @pytest.fixture(scope="module") -def environ(_setup_system): - env = os.environ.copy() +def install_dir(_setup_system): if platform.is_windows(): - install_dir = pathlib.Path( - os.getenv("ProgramFiles"), "Salt Project", "Salt" - ).resolve() - elif platform.is_darwin(): - install_dir = pathlib.Path("/opt", "salt") - else: - install_dir = pathlib.Path("/opt", "saltstack", "salt") - - # Get the defined PATH environment variable - path = os.environ.get("PATH") - if path is not None: - path_items = path.split(os.pathsep) - else: - path_items = [] - path_items.insert(0, str(install_dir)) - - # Set the PATH environment variable - env["PATH"] = os.pathsep.join(path_items) - return env + return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt").resolve() + if platform.is_darwin(): + return pathlib.Path("/opt", "salt") + return pathlib.Path("/opt", "saltstack", "salt") @pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download(shell, environ, salt_test_command): +def test_download(shell, install_dir, salt_test_command): """ Test downloading of Salt packages and running various commands. """ - ret = shell.run(*salt_test_command.split(), env=environ, check=False) + _cmd = salt_test_command.split() + _cmd[0] = str(install_dir / _cmd[0]) + ret = shell.run(*_cmd, check=False) assert ret.returncode == 0, ret From 79d3acce18084a27f013e09776fc40eb7d1ca09b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 19 Apr 2023 19:28:26 +0100 Subject: [PATCH 020/121] Use an indirect fixture Signed-off-by: Pedro Algarvio --- pkg/tests/download/test_pkg_download.py | 57 ++++++++++++++----------- 1 file changed, 31 insertions(+), 26 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 67f6202c714..e7b543f8544 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -19,33 +19,33 @@ def get_salt_test_commands(): if platform.is_windows(): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): salt_test_commands = [ - "salt-call.exe --local test.versions", - "salt-call.exe --local grains.items", - "salt-minion.exe --version", + ["salt-call.exe", "--local", "test.versions"], + ["salt-call.exe", "--local", "grains.items"], + ["salt-minion.exe", "--version"], ] else: salt_test_commands = [ - "salt-call.bat --local test.versions", - "salt-call.bat --local grains.items", - "salt.bat --version", - "salt-master.bat --version", - "salt-minion.bat --version", - "salt-ssh.bat --version", - "salt-syndic.bat --version", - "salt-api.bat --version", - "salt-cloud.bat --version", + ["salt-call.bat", "--local", "test.versions"], + ["salt-call.bat", "--local", "grains.items"], + ["salt.bat", "--version"], + ["salt-master.bat", "--version"], + ["salt-minion.bat", "--version"], + ["salt-ssh.bat", "--version"], + ["salt-syndic.bat", "--version"], + ["salt-api.bat", "--version"], + ["salt-cloud.bat", "--version"], ] else: salt_test_commands = [ - "salt-call --local test.versions", - "salt-call --local grains.items", - "salt --version", - "salt-master --version", - "salt-minion --version", - "salt-ssh --version", - "salt-syndic --version", - "salt-api --version", - "salt-cloud --version", + ["salt-call", "--local", "test.versions"], + ["salt-call", "--local", "grains.items"], + ["salt", "--version"], + ["salt-master", "--version"], + ["salt-minion", "--version"], + ["salt-ssh", "--version"], + ["salt-syndic", "--version"], + ["salt-api", "--version"], + ["salt-cloud", "--version"], ] return salt_test_commands @@ -379,12 +379,17 @@ def install_dir(_setup_system): return pathlib.Path("/opt", "saltstack", "salt") -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download(shell, install_dir, salt_test_command): +@pytest.fixture(scope="module") +def salt_test_command(request, install_dir): + command = request.param + command[0] = str(install_dir / command[0]) + return command + + +@pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True) +def test_download(shell, salt_test_command): """ Test downloading of Salt packages and running various commands. """ - _cmd = salt_test_command.split() - _cmd[0] = str(install_dir / _cmd[0]) - ret = shell.run(*_cmd, check=False) + ret = shell.run(*salt_test_command, check=False) assert ret.returncode == 0, ret From 0fe922f3cd3e16030a5707af0cad500487139b91 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 19 Apr 2023 22:13:00 +0100 Subject: [PATCH 021/121] Don't `yield`! Signed-off-by: Pedro Algarvio --- pkg/tests/download/test_pkg_download.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index e7b543f8544..bd6e0454215 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -329,8 +329,6 @@ def setup_macos(shell, root_url, salt_release, downloads_path): ) assert ret.returncode == 0, ret - yield - def setup_windows(shell, root_url, salt_release, downloads_path): From cd8aae6f5de1af1fcccddc90be8af458a7e42b01 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 20 Apr 2023 07:44:40 +0100 Subject: [PATCH 022/121] Start publishing the nightly builds to the release bucket under `salt-dev/` Signed-off-by: Pedro Algarvio --- .github/workflows/nightly.yml | 12 +-- .../templates/build-deb-repo.yml.jinja | 2 +- .../templates/build-macos-repo.yml.jinja | 2 +- .../templates/build-onedir-repo.yml.jinja | 2 +- .../templates/build-rpm-repo.yml.jinja | 2 +- .../templates/build-src-repo.yml.jinja | 2 +- .../templates/build-windows-repo.yml.jinja | 2 +- tools/pkg/repo.py | 89 +++++++++++-------- tools/utils.py | 1 - 9 files changed, 62 insertions(+), 52 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 1cc7062c94d..4d7792d86c3 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1409,7 +1409,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1530,7 +1530,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ + tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1674,7 +1674,7 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ + tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1769,7 +1769,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1845,7 +1845,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1957,7 +1957,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja index e44e9837a3a..4f88e516a3d 100644 --- a/.github/workflows/templates/build-deb-repo.yml.jinja +++ b/.github/workflows/templates/build-deb-repo.yml.jinja @@ -76,7 +76,7 @@ - name: Create Repository run: | - tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 409f6e12246..f4494b24d74 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -52,7 +52,7 @@ - name: Create Repository run: | - tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index 29c555a82d7..b68049c9a5c 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -88,7 +88,7 @@ - name: Create Repository run: | - tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 49316861d47..6f1b27c6eca 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -93,7 +93,7 @@ SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index 0fcf4eed9ef..8931af711fd 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -52,7 +52,7 @@ - name: Create Repository run: | - tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-windows-repo.yml.jinja b/.github/workflows/templates/build-windows-repo.yml.jinja index 2ffbfad6885..0142e5cc09c 100644 --- a/.github/workflows/templates/build-windows-repo.yml.jinja +++ b/.github/workflows/templates/build-windows-repo.yml.jinja @@ -70,7 +70,7 @@ - name: Create Repository run: | - tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 88f0115a810..cab8495b2f1 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -122,7 +122,7 @@ _deb_distro_info = { ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -136,7 +136,7 @@ def debian( repo_path: pathlib.Path = None, key_id: str = None, distro_arch: str = "amd64", - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the debian repository. @@ -178,7 +178,7 @@ def debian( ftp_archive_config_suite = ( f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" ) - archive_description = f"SaltProject {display_name} Python 3{'' if nightly_build else ' development'} Salt package repo" + archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" ftp_archive_config = f"""\ APT::FTPArchive::Release::Origin "SaltProject"; APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} @@ -203,6 +203,7 @@ def debian( distro, distro_version=distro_version, distro_arch=distro_arch, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) @@ -213,7 +214,7 @@ def debian( distro, distro_version=distro_version, distro_arch=distro_arch, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, ) ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" ctx.info(f"Writing {ftp_archive_config_file} ...") @@ -300,7 +301,7 @@ def debian( ctx.info(f"Running '{' '.join(cmdline)}' ...") ctx.run(*cmdline, cwd=create_repo_path) - if nightly_build is False: + if not nightly_build_from: remote_versions = _get_remote_versions( tools.utils.STAGING_BUCKET_NAME, create_repo_path.parent.relative_to(repo_path), @@ -373,7 +374,7 @@ _rpm_distro_info = { ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -387,7 +388,7 @@ def rpm( repo_path: pathlib.Path = None, key_id: str = None, distro_arch: str = "amd64", - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the redhat repository. @@ -415,6 +416,7 @@ def rpm( distro, distro_version=distro_version, distro_arch=distro_arch, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) @@ -425,7 +427,7 @@ def rpm( distro, distro_version=distro_version, distro_arch=distro_arch, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use @@ -472,7 +474,7 @@ def rpm( else: ctx.run("createrepo", ".", cwd=create_repo_path) - if nightly_build: + if nightly_build_from: repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") else: repo_domain = os.environ.get( @@ -496,8 +498,8 @@ def rpm( def _create_repo_file(create_repo_path, url_suffix): ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") - if nightly_build: - base_url = "salt-dev/" + if nightly_build_from: + base_url = f"salt-dev/{nightly_build_from}/" repo_file_contents = "[salt-nightly-repo]" elif "rc" in salt_version: base_url = "salt_rc/" @@ -532,14 +534,14 @@ def rpm( ) create_repo_path.write_text(repo_file_contents) - if nightly_build: + if nightly_build_from: repo_file_path = create_repo_path.parent / "nightly.repo" else: repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" _create_repo_file(repo_file_path, f"minor/{salt_version}") - if nightly_build is False: + if not nightly_build_from: remote_versions = _get_remote_versions( tools.utils.STAGING_BUCKET_NAME, create_repo_path.parent.relative_to(repo_path), @@ -594,7 +596,7 @@ def rpm( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -605,7 +607,7 @@ def windows( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the windows repository. @@ -618,7 +620,7 @@ def windows( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -650,7 +652,7 @@ def windows( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -661,7 +663,7 @@ def macos( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the windows repository. @@ -674,7 +676,7 @@ def macos( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -706,7 +708,7 @@ def macos( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -717,7 +719,7 @@ def onedir( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the onedir repository. @@ -730,7 +732,7 @@ def onedir( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -762,7 +764,7 @@ def onedir( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -773,7 +775,7 @@ def src( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the onedir repository. @@ -1458,7 +1460,7 @@ def _get_remote_versions(bucket_name: str, remote_path: str): def _create_onedir_based_repo( ctx: Context, salt_version: str, - nightly_build: bool, + nightly_build_from: str | None, repo_path: pathlib.Path, incoming: pathlib.Path, key_id: str, @@ -1470,20 +1472,21 @@ def _create_onedir_based_repo( repo_path, salt_version, distro, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( - repo_path, salt_version, distro, nightly_build=nightly_build + repo_path, salt_version, distro, nightly_build_from=nightly_build_from ) - if nightly_build is False: + if not nightly_build_from: repo_json_path = create_repo_path.parent.parent / "repo.json" else: repo_json_path = create_repo_path.parent / "repo.json" - if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME + if nightly_build_from: + bucket_name = tools.utils.RELEASE_BUCKET_NAME else: bucket_name = tools.utils.STAGING_BUCKET_NAME @@ -1559,7 +1562,7 @@ def _create_onedir_based_repo( repo_json = _get_repo_json_file_contents( ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path ) - if nightly_build is True: + if nightly_build_from: latest_link = create_repo_path.parent / "latest" ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") latest_link.symlink_to(create_repo_path.name) @@ -1706,7 +1709,7 @@ def _publish_repo( Publish packaging repositories. """ if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME + bucket_name = tools.utils.RELEASE_BUCKET_NAME elif stage: bucket_name = tools.utils.STAGING_BUCKET_NAME else: @@ -1821,11 +1824,16 @@ def _create_top_level_repo_path( distro: str, distro_version: str | None = None, # pylint: disable=bad-whitespace distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build: bool = False, + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace ): create_repo_path = repo_path - if nightly_build: - create_repo_path = create_repo_path / "salt-dev" + if nightly_build_from: + create_repo_path = ( + create_repo_path + / "salt-dev" + / nightly_build_from + / datetime.utcnow().strftime("%Y-%m-%d") + ) elif "rc" in salt_version: create_repo_path = create_repo_path / "salt_rc" create_repo_path = create_repo_path / "salt" / "py3" / distro @@ -1843,15 +1851,18 @@ def _create_repo_path( distro: str, distro_version: str | None = None, # pylint: disable=bad-whitespace distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build: bool = False, + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace ): create_repo_path = _create_top_level_repo_path( - repo_path, salt_version, distro, distro_version, distro_arch + repo_path, + salt_version, + distro, + distro_version, + distro_arch, + nightly_build_from=nightly_build_from, ) - if nightly_build is False: + if not nightly_build_from: create_repo_path = create_repo_path / "minor" / salt_version - else: - create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d") create_repo_path.mkdir(exist_ok=True, parents=True) return create_repo_path diff --git a/tools/utils.py b/tools/utils.py index 8c7c220e2b4..bf4bfb4d1a5 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -19,7 +19,6 @@ from rich.progress import ( REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" -NIGHTLY_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-nightly" STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging" RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release" BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup" From 1f145d37f29b02e629fcfd449d1c2b460d651030 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 20 Apr 2023 07:59:19 +0100 Subject: [PATCH 023/121] Fix Linux only logical check Signed-off-by: Pedro Algarvio --- pkg/tests/support/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 90abf8b88e4..57b6ccd4d00 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -601,7 +601,7 @@ class SaltPkgInstall: else: log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) - if not (platform.is_darwin() or platform.is_windows()): + if not platform.is_darwin() and not platform.is_windows(): # Make sure we don't have any trailing references to old package file locations assert "No such file or directory" not in ret.stdout assert "/saltstack/salt/run" not in ret.stdout From d7bc15851bd216f0379909d79bbc2f2fb00ef1ec Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 21 Apr 2023 14:20:25 +0100 Subject: [PATCH 024/121] Fail when we get cache misses on certain steps of the workflows Signed-off-by: Pedro Algarvio --- .github/actions/setup-actionlint/action.yml | 2 +- .github/workflows/test-action-macos.yml | 2 ++ .github/workflows/test-action.yml | 2 ++ .github/workflows/test-package-downloads-action-linux.yml | 2 ++ .github/workflows/test-package-downloads-action-macos.yml | 2 ++ .github/workflows/test-package-downloads-action-windows.yml | 2 ++ .github/workflows/test-packages-action-macos.yml | 2 ++ .github/workflows/test-packages-action.yml | 2 ++ 8 files changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/actions/setup-actionlint/action.yml b/.github/actions/setup-actionlint/action.yml index 539d34bf100..6605d5db1bc 100644 --- a/.github/actions/setup-actionlint/action.yml +++ b/.github/actions/setup-actionlint/action.yml @@ -4,7 +4,7 @@ description: Setup actionlint inputs: version: description: The version of actionlint - default: 1.6.23 + default: 1.6.24 cache-seed: required: true type: string diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 81dcc99ba20..6da084d692e 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -227,6 +227,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index f7220b21d6e..99f6ba35cf6 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -248,6 +248,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: PyPi Proxy run: | diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index 99bd06c08a1..b922c2954e2 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -182,6 +182,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index 80f2a9210c6..fdb321782f9 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -177,6 +177,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Decompress .nox Directory run: | diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index f4c0a9165e2..d7525481b28 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -191,6 +191,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index d2cda1f4d48..f0231d256e5 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -214,6 +214,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Decompress .nox Directory run: | diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index b73329fa26d..ff19950bd46 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -228,6 +228,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts From 8f8178a905955b93d62061557b36c83ae37668da Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 21 Apr 2023 18:36:11 +0100 Subject: [PATCH 025/121] Signed and unsigned macOS packages use the same name, like windows. Signed-off-by: Pedro Algarvio --- pkg/macos/package.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/macos/package.sh b/pkg/macos/package.sh index 6221fde2947..70734b89b78 100755 --- a/pkg/macos/package.sh +++ b/pkg/macos/package.sh @@ -250,6 +250,7 @@ else fi +PKG_FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH.pkg" if [ "${SIGN}" -eq 1 ]; then _msg "Building the product package (signed)" # This is not a nightly build, so we want to sign it @@ -260,7 +261,7 @@ if [ "${SIGN}" -eq 1 ]; then --version="$VERSION" \ --sign "$DEV_INSTALL_CERT" \ --timestamp \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure @@ -268,12 +269,11 @@ if [ "${SIGN}" -eq 1 ]; then else _msg "Building the product package (unsigned)" # This is a nightly build, so we don't sign it - FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-unsigned.pkg" if productbuild --resources="$SCRIPT_DIR/pkg-resources" \ --distribution="$DIST_XML" \ --package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \ --version="$VERSION" \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure From d7091565809740e17acec38700482f39e25de2bd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 21 Apr 2023 18:56:10 +0100 Subject: [PATCH 026/121] Restore backup should always happen on failures and when cancellations occur Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 2 +- .github/workflows/templates/release.yml.jinja | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 262820ce8f6..bbb66aa1309 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -878,7 +878,7 @@ jobs: restore: name: Restore Release Bucket From Backup - if: ${{ failure() || cancelled() }} + if: ${{ always() && (failure() || cancelled()) }} runs-on: - self-hosted - linux diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 2bcfb8d14ac..d4fea6562da 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -350,7 +350,7 @@ permissions: restore: <%- do conclusion_needs.append('restore') %> name: Restore Release Bucket From Backup - if: ${{ failure() || cancelled() }} + if: ${{ always() && (failure() || cancelled()) }} runs-on: - self-hosted - linux From 99a7e54b6e3b231666eeeaf21958baf33485bb7a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 21 Apr 2023 19:03:23 +0100 Subject: [PATCH 027/121] Only restore when backup has completed Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 6 +++++- .github/workflows/templates/release.yml.jinja | 6 +++++- tools/pkg/repo.py | 4 ++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bbb66aa1309..8e6c8ac7c73 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -149,6 +149,8 @@ jobs: needs: - prepare-workflow environment: release + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository @@ -163,6 +165,7 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -878,12 +881,13 @@ jobs: restore: name: Restore Release Bucket From Backup - if: ${{ always() && (failure() || cancelled()) }} + if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} runs-on: - self-hosted - linux - repo-release needs: + - backup - release environment: release steps: diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index d4fea6562da..77fde28848a 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -183,6 +183,8 @@ permissions: needs: - prepare-workflow environment: <{ gh_environment }> + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository @@ -197,6 +199,7 @@ permissions: uses: ./.github/actions/setup-python-tools-scripts - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -350,12 +353,13 @@ permissions: restore: <%- do conclusion_needs.append('restore') %> name: Restore Release Bucket From Backup - if: ${{ always() && (failure() || cancelled()) }} + if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} runs-on: - self-hosted - linux - repo-<{ gh_environment }> needs: + - backup - release <%- for need in test_repo_needs.iter(consume=True) %> - <{ need }> diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index cab8495b2f1..25e06a6e072 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -870,6 +870,10 @@ def restore_previous_releases(ctx: Context): Restore release bucket from backup. """ _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"backup-complete=true\n") ctx.info("Done") From 98b2c9dbb8ffb48f3f5b2719db2ff22ff5a7a5d7 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 22 Apr 2023 12:00:29 +0100 Subject: [PATCH 028/121] Create a `latest` symlink on nightly builds pointing to the latest date Signed-off-by: Pedro Algarvio --- tools/pkg/repo.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 25e06a6e072..a8c261f358a 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -198,6 +198,7 @@ def debian( """ ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, @@ -209,6 +210,7 @@ def debian( tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro, @@ -411,6 +413,7 @@ def rpm( ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, @@ -422,6 +425,7 @@ def rpm( tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro, @@ -1059,6 +1063,7 @@ def release(ctx: Context, salt_version: str): for distro in ("windows", "macos", "onedir"): create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro=distro, @@ -1473,6 +1478,7 @@ def _create_onedir_based_repo( ): ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, @@ -1482,7 +1488,11 @@ def _create_onedir_based_repo( tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( - repo_path, salt_version, distro, nightly_build_from=nightly_build_from + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, ) if not nightly_build_from: repo_json_path = create_repo_path.parent.parent / "repo.json" @@ -1823,6 +1833,7 @@ def _publish_repo( def _create_top_level_repo_path( + ctx: Context, repo_path: pathlib.Path, salt_version: str, distro: str, @@ -1838,6 +1849,16 @@ def _create_top_level_repo_path( / nightly_build_from / datetime.utcnow().strftime("%Y-%m-%d") ) + create_repo_path.mkdir(exist_ok=True, parents=True) + with ctx.chdir(create_repo_path.parent): + latest_nightly_symlink = pathlib.Path("latest") + if not latest_nightly_symlink.exists(): + ctx.info( + f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." + ) + latest_nightly_symlink.symlink_to( + create_repo_path.name, target_is_directory=True + ) elif "rc" in salt_version: create_repo_path = create_repo_path / "salt_rc" create_repo_path = create_repo_path / "salt" / "py3" / distro @@ -1850,6 +1871,7 @@ def _create_top_level_repo_path( def _create_repo_path( + ctx: Context, repo_path: pathlib.Path, salt_version: str, distro: str, @@ -1858,6 +1880,7 @@ def _create_repo_path( nightly_build_from: str | None = None, # pylint: disable=bad-whitespace ): create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, From 34728d5e018c3fc2c75f4a2964f2aa92815aec27 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 22 Apr 2023 10:55:13 +0100 Subject: [PATCH 029/121] Download onedir artifact directly from the buckets Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 25 ++++-- .github/workflows/templates/release.yml.jinja | 25 ++++-- tools/release.py | 87 +++++++++++++++++++ 3 files changed, 119 insertions(+), 18 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8e6c8ac7c73..f706151a34e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -104,6 +104,7 @@ jobs: runs-on: - self-hosted - linux + - repo-release environment: release needs: - prepare-workflow @@ -122,15 +123,20 @@ jobs: - platform: darwin arch: x86_64 steps: - - name: Download Onedir Tarball as an Artifact(from a different workflow) - uses: dawidd6/action-download-artifact@v2 - with: - workflow: staging.yml - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/ + - uses: actions/checkout@v3 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} - name: Upload Onedir Tarball as an Artifact uses: actions/upload-artifact@v3 @@ -178,6 +184,7 @@ jobs: needs: - prepare-workflow - backup + - download-onedir-artifact environment: release steps: diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 77fde28848a..95fc1812120 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -138,6 +138,7 @@ permissions: runs-on: - self-hosted - linux + - repo-<{ gh_environment }> environment: <{ gh_environment }> needs: - prepare-workflow @@ -156,15 +157,20 @@ permissions: - platform: darwin arch: x86_64 steps: - - name: Download Onedir Tarball as an Artifact(from a different workflow) - uses: dawidd6/action-download-artifact@v2 - with: - workflow: staging.yml - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/ + - uses: actions/checkout@v3 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} - name: Upload Onedir Tarball as an Artifact uses: actions/upload-artifact@v3 @@ -213,6 +219,7 @@ permissions: needs: - prepare-workflow - backup + - download-onedir-artifact environment: <{ gh_environment }> steps: diff --git a/tools/release.py b/tools/release.py index ab6b3592d78..5764bd84f5b 100644 --- a/tools/release.py +++ b/tools/release.py @@ -117,6 +117,93 @@ def upload_artifacts(ctx: Context, salt_version: str, artifacts_path: pathlib.Pa pass +@release.command( + name="download-onedir-artifact", + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "platform": { + "help": "The onedir platform archive to download.", + "required": True, + "choices": ("linux", "windows", "darwin", "macos"), + }, + "arch": { + "help": "The onedir arch archive to download.", + "required": True, + }, + }, +) +def download_onedir_artifact( + ctx: Context, salt_version: str, platform: str = "linux", arch: str = "x86_64" +): + """ + Download onedir artifact from staging bucket. + """ + s3 = boto3.client("s3") + if platform == "macos": + platform = "darwin" + if arch == "arm64": + arch = "aarch64" + arch = arch.lower() + platform = platform.lower() + if platform in ("linux", "darwin") and arch not in ("x86_64", "aarch64"): + ctx.error( + f"The 'arch' value for {platform} must be one of: 'x86_64', 'aarch64', 'arm64'" + ) + ctx.exit(1) + if platform == "windows" and arch not in ("x86", "amd64"): + ctx.error(f"The 'arch' value for {platform} must be one of: 'x86', 'amd64'") + ctx.exit(1) + + archive_name = f"salt-{salt_version}-onedir-{platform}-{arch}.tar.xz" + archive_path = tools.utils.REPO_ROOT / "artifacts" / archive_name + if "rc" in salt_version: + prefix = "salt_rc/salt" + else: + prefix = "salt" + remote_path = f"{prefix}/py3/onedir/minor/{salt_version}/{archive_name}" + archive_path.parent.mkdir() + try: + ret = s3.head_object(Bucket=tools.utils.STAGING_BUCKET_NAME, Key=remote_path) + size = ret["ContentLength"] + with archive_path.open("wb") as wfh: + ctx.info( + f"Downloading s3://{tools.utils.STAGING_BUCKET_NAME}/{remote_path} to {archive_path} ..." + ) + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task( + description="Downloading ...", + total=size, + ) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=remote_path, + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + elif exc.response["Error"]["Code"].startswith("4"): + ctx.error(f"Could not download {remote_path} from bucket: {exc}") + ctx.exit(1) + else: + log.exception(f"Failed to download {remote_path}: {exc}") + ctx.exit(1) + + if not archive_path.exists(): + ctx.error(f"The {archive_path} does not exist") + ctx.exit(1) + if not archive_path.stat().st_size: + ctx.error(f"The {archive_path} size is zero!") + ctx.exit(1) + + @release.command( name="upload-virustotal", arguments={ From 8c93e294ca04af68d84571a3d6ef593b9e6b1e78 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 23 Apr 2023 08:16:52 +0100 Subject: [PATCH 030/121] Unique nox requirements uploads Signed-off-by: Pedro Algarvio --- .github/workflows/test-package-downloads-action-linux.yml | 2 +- .github/workflows/test-package-downloads-action-macos.yml | 2 +- .github/workflows/test-package-downloads-action-windows.yml | 2 +- .github/workflows/test-packages-action-macos.yml | 2 +- .github/workflows/test-packages-action.yml | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index b922c2954e2..9dbc0eab33a 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -146,7 +146,7 @@ jobs: - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index fdb321782f9..c5825a29d5b 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -130,7 +130,7 @@ jobs: - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index d7525481b28..e518c299113 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -155,7 +155,7 @@ jobs: - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download + name: nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index f0231d256e5..b7de16fb5ac 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -154,7 +154,7 @@ jobs: - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index ff19950bd46..71affc00877 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -170,9 +170,9 @@ jobs: id: nox-artifact-name run: | if [ "${{ contains(inputs.distro-slug, 'windows') }}" != "true" ]; then - echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}" >> "${GITHUB_OUTPUT}" + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}" else - echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}" >> "${GITHUB_OUTPUT}" + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}" fi - name: Upload Nox Requirements Tarball From 02661f81d2130a1561e50aa85c38cde0eeb416a6 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 23 Apr 2023 08:36:04 +0100 Subject: [PATCH 031/121] No automatic release bucket restores Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 40 +++++-------------- .github/workflows/templates/layout.yml.jinja | 3 ++ .github/workflows/templates/release.yml.jinja | 18 ++++++++- 3 files changed, 28 insertions(+), 33 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f706151a34e..a98be54dd1f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -886,42 +886,12 @@ jobs: retention-days: 7 if-no-files-found: error - restore: - name: Restore Release Bucket From Backup - if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} - runs-on: - - self-hosted - - linux - - repo-release - needs: - - backup - - release - environment: release - steps: - - name: Clone The Salt Repository - uses: actions/checkout@v3 - with: - ssh-key: ${{ secrets.GHA_SSH_KEY }} - - - name: Setup Rclone - uses: AnimMouse/setup-rclone@v1 - with: - version: v1.61.1 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Restore Release Bucket - run: | - tools pkg repo restore-previous-releases - publish-pypi: name: Publish to PyPi if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - release - - restore environment: release runs-on: - self-hosted @@ -980,13 +950,21 @@ jobs: - prepare-workflow - publish-repositories - release - - restore - publish-pypi steps: - name: Get workflow information id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index bbbae394ff3..37b86d32da8 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -324,6 +324,9 @@ jobs: id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + <%- block set_pipeline_exit_status_extra_steps %> + <%- endblock set_pipeline_exit_status_extra_steps %> + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 95fc1812120..7d103b328d5 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -357,6 +357,7 @@ permissions: retention-days: 7 if-no-files-found: error + {#- Disable automatic backup restore restore: <%- do conclusion_needs.append('restore') %> name: Restore Release Bucket From Backup @@ -389,6 +390,7 @@ permissions: - name: Restore Release Bucket run: | tools pkg repo restore-previous-releases + #} publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> @@ -396,8 +398,8 @@ permissions: if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - - release - - restore + - release {#- Disable automatic backup restore + - restore #} environment: <{ gh_environment }> runs-on: - self-hosted @@ -446,3 +448,15 @@ permissions: tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz <%- endblock jobs %> + +<%- block set_pipeline_exit_status_extra_steps %> + + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi +<%- endblock set_pipeline_exit_status_extra_steps %> From 9b8034221192559f5a320961199fa8173836df17 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 24 Apr 2023 11:53:21 +0100 Subject: [PATCH 032/121] Include `x86` in Windows package download tests Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 52 ++++++++++++++++--- .github/workflows/staging.yml | 50 +++++++++++++++--- .../test-pkg-repo-downloads.yml.jinja | 9 ++-- 3 files changed, 96 insertions(+), 15 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a98be54dd1f..37bfb331e9b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -711,9 +711,9 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-nsis-pkg-download-tests: + windows-2022-nsis-amd64-pkg-download-tests: if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - name: Test Windows 2022 NSIS Package Downloads + name: Test Windows 2022 amd64 NSIS Package Downloads needs: - prepare-workflow - publish-repositories @@ -730,9 +730,9 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-msi-pkg-download-tests: + windows-2022-msi-amd64-pkg-download-tests: if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - name: Test Windows 2022 MSI Package Downloads + name: Test Windows 2022 amd64 MSI Package Downloads needs: - prepare-workflow - publish-repositories @@ -749,6 +749,44 @@ jobs: skip-code-coverage: true secrets: inherit + windows-2022-nsis-x86-pkg-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 x86 NSIS Package Downloads + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: x86 + pkg-type: NSIS + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + + windows-2022-msi-x86-pkg-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 x86 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: x86 + pkg-type: MSI + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + secrets: inherit + release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} if: ${{ always() && ! failure() && ! cancelled() }} @@ -788,8 +826,10 @@ jobs: - ubuntu-2204-pkg-download-tests - ubuntu-2204-arm64-pkg-download-tests - macos-12-pkg-download-tests - - windows-2022-nsis-pkg-download-tests - - windows-2022-msi-pkg-download-tests + - windows-2022-nsis-amd64-pkg-download-tests + - windows-2022-msi-amd64-pkg-download-tests + - windows-2022-nsis-x86-pkg-download-tests + - windows-2022-msi-x86-pkg-download-tests environment: release steps: - name: Clone The Salt Repository diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index ea1912e6e0f..0c26eb4a03a 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2560,9 +2560,9 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-nsis-pkg-download-tests: + windows-2022-nsis-amd64-pkg-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows 2022 NSIS Package Downloads + name: Test Windows 2022 amd64 NSIS Package Downloads needs: - prepare-workflow - publish-repositories @@ -2578,9 +2578,9 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-msi-pkg-download-tests: + windows-2022-msi-amd64-pkg-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows 2022 MSI Package Downloads + name: Test Windows 2022 amd64 MSI Package Downloads needs: - prepare-workflow - publish-repositories @@ -2596,6 +2596,42 @@ jobs: skip-code-coverage: true secrets: inherit + windows-2022-nsis-x86-pkg-download-tests: + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + name: Test Windows 2022 x86 NSIS Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: x86 + pkg-type: NSIS + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + + windows-2022-msi-x86-pkg-download-tests: + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + name: Test Windows 2022 x86 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: x86 + pkg-type: MSI + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + secrets: inherit + publish-pypi: name: Publish to PyPi(test) if: ${{ github.event.repository.fork != true }} @@ -2674,8 +2710,10 @@ jobs: - ubuntu-2204-pkg-download-tests - ubuntu-2204-arm64-pkg-download-tests - macos-12-pkg-download-tests - - windows-2022-nsis-pkg-download-tests - - windows-2022-msi-pkg-download-tests + - windows-2022-nsis-amd64-pkg-download-tests + - windows-2022-msi-amd64-pkg-download-tests + - windows-2022-nsis-x86-pkg-download-tests + - windows-2022-msi-x86-pkg-download-tests environment: staging runs-on: - self-hosted diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 25f3d91d5b4..80fb57794f1 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -61,7 +61,9 @@ <%- endfor %> - <%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %> + <%- for slug, display_name, arch in ( + ("macos-12", "macOS 12", "x86_64"), + ) %> <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> <{ job_name }>: @@ -93,9 +95,10 @@ <%- for slug, display_name, arch in ( ("windows-2022", "Windows 2022", "amd64"), + ("windows-2022", "Windows 2022", "x86"), ) %> <%- for pkg_type in ("NSIS", "MSI") %> - <%- set job_name = "{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower()) %> + <%- set job_name = "{}-{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> @@ -104,7 +107,7 @@ <%- else %> if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} <%- endif %> - name: Test <{ display_name }> <{ pkg_type }> Package Downloads + name: Test <{ display_name }> <{ arch }> <{ pkg_type }> Package Downloads needs: - prepare-workflow - publish-repositories From 8caf5c94b15e071d6251df9b4e4c87d0382043c1 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 24 Apr 2023 15:34:24 +0100 Subject: [PATCH 033/121] Include Winbdows x86 package download tests in a separate PR Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 40 ------------------- .github/workflows/staging.yml | 38 ------------------ .../test-pkg-repo-downloads.yml.jinja | 1 - 3 files changed, 79 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 37bfb331e9b..be472904230 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -749,44 +749,6 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-nsis-x86-pkg-download-tests: - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - name: Test Windows 2022 x86 NSIS Package Downloads - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-windows.yml - with: - distro-slug: windows-2022 - platform: windows - arch: x86 - pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - secrets: inherit - - windows-2022-msi-x86-pkg-download-tests: - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - name: Test Windows 2022 x86 MSI Package Downloads - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-windows.yml - with: - distro-slug: windows-2022 - platform: windows - arch: x86 - pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - secrets: inherit - release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} if: ${{ always() && ! failure() && ! cancelled() }} @@ -828,8 +790,6 @@ jobs: - macos-12-pkg-download-tests - windows-2022-nsis-amd64-pkg-download-tests - windows-2022-msi-amd64-pkg-download-tests - - windows-2022-nsis-x86-pkg-download-tests - - windows-2022-msi-x86-pkg-download-tests environment: release steps: - name: Clone The Salt Repository diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 0c26eb4a03a..eee07f24328 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2596,42 +2596,6 @@ jobs: skip-code-coverage: true secrets: inherit - windows-2022-nsis-x86-pkg-download-tests: - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows 2022 x86 NSIS Package Downloads - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-windows.yml - with: - distro-slug: windows-2022 - platform: windows - arch: x86 - pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - secrets: inherit - - windows-2022-msi-x86-pkg-download-tests: - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows 2022 x86 MSI Package Downloads - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-windows.yml - with: - distro-slug: windows-2022 - platform: windows - arch: x86 - pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - secrets: inherit - publish-pypi: name: Publish to PyPi(test) if: ${{ github.event.repository.fork != true }} @@ -2712,8 +2676,6 @@ jobs: - macos-12-pkg-download-tests - windows-2022-nsis-amd64-pkg-download-tests - windows-2022-msi-amd64-pkg-download-tests - - windows-2022-nsis-x86-pkg-download-tests - - windows-2022-msi-x86-pkg-download-tests environment: staging runs-on: - self-hosted diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 80fb57794f1..cb62f445c7a 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -95,7 +95,6 @@ <%- for slug, display_name, arch in ( ("windows-2022", "Windows 2022", "amd64"), - ("windows-2022", "Windows 2022", "x86"), ) %> <%- for pkg_type in ("NSIS", "MSI") %> <%- set job_name = "{}-{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %> From aa9e0ab88d01244027038ad5ffea7c067786e97a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 25 Apr 2023 05:53:10 +0100 Subject: [PATCH 034/121] Reports run on self hosted runners, with the exception of macOS(for now) Signed-off-by: Pedro Algarvio --- .github/workflows/test-action-macos.yml | 4 ++-- .github/workflows/test-action.yml | 5 ----- .../test-package-downloads-action-linux.yml | 15 ++++----------- .../test-package-downloads-action-windows.yml | 15 ++++----------- 4 files changed, 10 insertions(+), 29 deletions(-) diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 6da084d692e..b8088c64522 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -517,10 +517,10 @@ jobs: run: | tree -a artifacts - - name: Set up Python 3.9 + - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "${{ inputs.python-version }}" - name: Install Nox run: | diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 99f6ba35cf6..0982e7446eb 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -492,11 +492,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index 9dbc0eab33a..f1475a1df69 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -27,11 +27,6 @@ on: required: true type: string description: The environment to run tests against - python-version: - required: false - type: string - description: The python version to run tests with - default: "3.10" package-name: required: false type: string @@ -279,7 +274,10 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - x86_64 environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: @@ -301,11 +299,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index e518c299113..4c253410647 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -31,11 +31,6 @@ on: required: true type: string description: The environment to run tests against - python-version: - required: false - type: string - description: The python version to run tests with - default: "3.10" package-name: required: false type: string @@ -290,7 +285,10 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - x86_64 environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: @@ -312,11 +310,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' From 1b257a4f8d44e4ba452afee1a8e5d1a634d71f17 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 25 Apr 2023 05:54:12 +0100 Subject: [PATCH 035/121] Prefix `update_winrepo` and `upload-virustotal` workflows with `release-` Signed-off-by: Pedro Algarvio --- .../workflows/{update_winrepo.yml => release-update-winrepo.yml} | 0 .../{upload-virustotal.yml => release-upload-virustotal.yml} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{update_winrepo.yml => release-update-winrepo.yml} (100%) rename .github/workflows/{upload-virustotal.yml => release-upload-virustotal.yml} (100%) diff --git a/.github/workflows/update_winrepo.yml b/.github/workflows/release-update-winrepo.yml similarity index 100% rename from .github/workflows/update_winrepo.yml rename to .github/workflows/release-update-winrepo.yml diff --git a/.github/workflows/upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml similarity index 100% rename from .github/workflows/upload-virustotal.yml rename to .github/workflows/release-upload-virustotal.yml From 80954cbc1ec6377bac43d004aca043a5a7e529f8 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 25 Apr 2023 10:11:00 +0100 Subject: [PATCH 036/121] Fix source repository paths Signed-off-by: Pedro Algarvio --- .github/workflows/nightly.yml | 11 +++++-- .github/workflows/staging.yml | 15 ++++++++-- .../templates/build-src-repo.yml.jinja | 11 +++++-- .github/workflows/templates/staging.yml.jinja | 4 +++ tools/pkg/repo.py | 30 ++++++++----------- 5 files changed, 45 insertions(+), 26 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 4d7792d86c3..a73620b37fa 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1413,14 +1413,19 @@ jobs: --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index eee07f24328..c22a3856f1e 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1414,14 +1414,19 @@ jobs: --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error @@ -2070,6 +2075,10 @@ jobs: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + - name: Upload Release Artifacts run: | tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index 8931af711fd..6a9c2634e96 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -56,14 +56,19 @@ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index c34607eeeca..d5b4893e8da 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -132,6 +132,10 @@ concurrency: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + {#- - name: Download Release Documentation (PDF) diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index a8c261f358a..8c4bb3e4fce 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -322,10 +322,6 @@ def debian( latest_link = create_repo_path.parent.parent / "latest" ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") latest_link.symlink_to(f"minor/{salt_version}") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) ctx.info("Done") @@ -568,12 +564,6 @@ def rpm( latest_link.symlink_to(f"minor/{salt_version}") repo_file_path = create_repo_path.parent.parent / "latest.repo" _create_repo_file(repo_file_path, "latest") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") ctx.info("Done") @@ -791,7 +781,16 @@ def src( assert key_id is not None ctx.info("Creating repository directory structure ...") - create_repo_path = repo_path / "salt" / "py3" / "src" / salt_version + create_repo_path = _create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro="src", + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + create_repo_path = create_repo_path / salt_version create_repo_path.mkdir(exist_ok=True, parents=True) hashes_base_path = create_repo_path / f"salt-{salt_version}" for fpath in incoming.iterdir(): @@ -810,6 +809,8 @@ def src( wfh.write(f"{hexdigest} {dpath.name}\n") for fpath in create_repo_path.iterdir(): + if fpath.suffix in (".pub", ".gpg"): + continue tools.utils.gpg_sign(ctx, key_id, fpath) # Export the GPG key in use @@ -1577,10 +1578,6 @@ def _create_onedir_based_repo( ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path ) if nightly_build_from: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - ctx.info(f"Writing {repo_json_path} ...") repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) return @@ -1888,8 +1885,7 @@ def _create_repo_path( distro_arch, nightly_build_from=nightly_build_from, ) - if not nightly_build_from: - create_repo_path = create_repo_path / "minor" / salt_version + create_repo_path = create_repo_path / "minor" / salt_version create_repo_path.mkdir(exist_ok=True, parents=True) return create_repo_path From d98afc03ebffbf167592c455ce30715ff97c2a9f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 25 Apr 2023 10:16:06 +0100 Subject: [PATCH 037/121] Allow skipping the salt test suite on manually triggered nightly builds Signed-off-by: Pedro Algarvio --- .github/workflows/nightly.yml | 14 ++++++++++++-- .github/workflows/staging.yml | 1 + .github/workflows/templates/nightly.yml.jinja | 14 +++++++++++++- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a73620b37fa..1bb92872ca9 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -6,7 +6,16 @@ name: Nightly run-name: "Nightly (branch: ${{ github.ref_name }})" on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - cron: '0 1 * * *' # Every day at 1AM @@ -242,7 +251,7 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs ${{ github.event_name }} changed-files.json + tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | @@ -1977,6 +1986,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c22a3856f1e..94acc31b232 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1980,6 +1980,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index 292e84fd77c..d78faa49c0a 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -1,6 +1,8 @@ <%- set gh_environment = gh_environment|default("nightly") %> <%- set skip_test_coverage_check = skip_test_coverage_check|default("false") %> <%- set skip_junit_reports_check = skip_junit_reports_check|default("false") %> +<%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %> +<%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %> <%- set prepare_workflow_if_check = prepare_workflow_if_check|default("${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}") %> <%- extends 'ci.yml.jinja' %> @@ -14,7 +16,16 @@ run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }})" <%- block on %> on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - cron: '0 1 * * *' # Every day at 1AM @@ -53,6 +64,7 @@ concurrency: publish-repositories: <%- do conclusion_needs.append('publish-repositories') %> name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux From 4ee630759118e5f7147e70988a848c34bbe3fe0c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 26 Apr 2023 18:32:55 +0100 Subject: [PATCH 038/121] Log the bucket names Signed-off-by: Pedro Algarvio --- tools/pkg/repo.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 8c4bb3e4fce..8bfc77395f6 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -952,10 +952,13 @@ def release(ctx: Context, salt_version: str): with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name try: + bucket_name = tools.utils.STAGING_BUCKET_NAME with local_release_files_path.open("wb") as wfh: - ctx.info(f"Downloading {repo_release_files_path} ...") + ctx.info( + f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." + ) s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, + Bucket=bucket_name, Key=str(repo_release_files_path), Fileobj=wfh, ) @@ -979,9 +982,11 @@ def release(ctx: Context, salt_version: str): ) try: with local_release_symlinks_path.open("wb") as wfh: - ctx.info(f"Downloading {repo_release_symlinks_path} ...") + ctx.info( + f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." + ) s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, + Bucket=bucket_name, Key=str(repo_release_symlinks_path), Fileobj=wfh, ) From 48dc65ca3a8068ff4441eb711c44f510b35bacb9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 26 Apr 2023 18:46:18 +0100 Subject: [PATCH 039/121] Package signing is now a checkbox on the staging workflow. Disabled by default. Signed-off-by: Pedro Algarvio --- .github/workflows/build-macos-packages.yml | 25 +++++++++++++++---- .github/workflows/build-windows-packages.yml | 25 +++++++++++++++---- .github/workflows/nightly.yml | 2 ++ .github/workflows/staging.yml | 6 +++++ .../templates/build-packages.yml.jinja | 1 + .github/workflows/templates/staging.yml.jinja | 4 +++ 6 files changed, 53 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index 54a5af28bb8..9e07834fea0 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -8,6 +8,10 @@ on: type: string required: true description: The Salt version to set prior to building packages. + sign-packages: + type: boolean + default: false + description: Sign Packages environment: type: string description: The GitHub Environment where this workflow should run @@ -31,12 +35,23 @@ jobs: shell: bash id: check-pkg-sign run: | - if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then - echo "The packages created will NOT be signed" - echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi else - echo "The packages created WILL be signed" - echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" fi - uses: actions/checkout@v3 diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index 4831a59e718..b50d7cdc618 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -8,6 +8,10 @@ on: type: string required: true description: The Salt version to set prior to building packages + sign-packages: + type: boolean + default: false + description: Sign Packages environment: type: string description: The GitHub Environment where this workflow should run @@ -40,12 +44,23 @@ jobs: shell: bash id: check-pkg-sign run: | - if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then - echo "The packages created will NOT be signed" - echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi else - echo "The packages created WILL be signed" - echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" fi - uses: actions/checkout@v3 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 1bb92872ca9..7d1fe6bcd13 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -545,6 +545,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: nightly + sign-packages: false secrets: inherit build-macos-pkgs: @@ -557,6 +558,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: nightly + sign-packages: false secrets: inherit amazonlinux-2-pkg-tests: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 94acc31b232..4e821731985 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -14,6 +14,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-packages: + type: boolean + default: false + description: Sign Windows and MacOS Packages skip-salt-test-suite: type: boolean default: false @@ -537,6 +541,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging + sign-packages: ${{ inputs.sign-packages }} secrets: inherit build-macos-pkgs: @@ -549,6 +554,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging + sign-packages: ${{ inputs.sign-packages }} secrets: inherit amazonlinux-2-pkg-tests: diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 87a87d69b28..798a739b5fe 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -18,6 +18,7 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" <%- if pkg_type in ("macos", "windows") and gh_environment %> environment: <{ gh_environment }> + sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-packages }} <%- endif %> secrets: inherit <%- endif %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index d5b4893e8da..0ee22cb83d9 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -27,6 +27,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-packages: + type: boolean + default: false + description: Sign Windows and MacOS Packages skip-salt-test-suite: type: boolean default: false From f7aa91c29e3cda17c2713774eb681bbf3f656454 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 26 Apr 2023 18:56:14 +0100 Subject: [PATCH 040/121] Confirm that the release was staged before continuing with the release process. Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 12 ++++- .github/workflows/templates/release.yml.jinja | 12 ++++- tools/pkg/repo.py | 51 +++++++++++++++++++ 3 files changed, 73 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index be472904230..e79b5c3bfad 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -57,7 +57,11 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-release + environment: release needs: - check-requirements outputs: @@ -93,6 +97,12 @@ jobs: run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Set Cache Seed Output id: set-cache-seed run: | diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 7d103b328d5..92b123eeafe 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -81,7 +81,11 @@ permissions: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + environment: <{ gh_environment }> <%- if prepare_workflow_needs %> needs: <%- for need in prepare_workflow_needs.iter(consume=False) %> @@ -121,6 +125,12 @@ permissions: run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Set Cache Seed Output id: set-cache-seed run: | diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 8bfc77395f6..7c27776385f 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -1351,6 +1351,57 @@ def confirm_unreleased( ctx.exit(0) +@repo.command( + name="confirm-staged", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): + """ + Confirm that the passed version has been staged for release. + """ + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + for remote_path in (repo_release_files_path, repo_release_symlinks_path): + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + ctx.info( + f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." + ) + s3.head_object( + Bucket=bucket_name, + Key=str(remote_path), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Could not get information about {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could get information about {remote_path}: {exc}") + ctx.exit(1) + log.exception(f"Error getting information about {remote_path}: {exc}") + ctx.exit(1) + ctx.info(f"Version {salt_version} has been staged for release") + ctx.exit(0) + + def _get_salt_releases(ctx: Context, repository: str) -> list[Version]: """ Return a list of salt versions From e3ca4bfb19a53fd4fc8eaa20e42cff6ce82c47f9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 27 Apr 2023 18:33:52 +0100 Subject: [PATCH 041/121] Always sign MacOS packages. Leave only windows package signing under a checkbox. Signed-off-by: Pedro Algarvio --- .github/workflows/nightly.yml | 2 +- .github/workflows/staging.yml | 8 ++++---- .github/workflows/templates/build-packages.yml.jinja | 6 +++++- .github/workflows/templates/staging.yml.jinja | 4 ++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 7d1fe6bcd13..ca3f409acb1 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -558,7 +558,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: nightly - sign-packages: false + sign-packages: true secrets: inherit amazonlinux-2-pkg-tests: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 4e821731985..684b96183bd 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -14,10 +14,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). - sign-packages: + sign-windows-packages: type: boolean default: false - description: Sign Windows and MacOS Packages + description: Sign Windows Packages skip-salt-test-suite: type: boolean default: false @@ -541,7 +541,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging - sign-packages: ${{ inputs.sign-packages }} + sign-packages: ${{ inputs.sign-windows-packages }} secrets: inherit build-macos-pkgs: @@ -554,7 +554,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging - sign-packages: ${{ inputs.sign-packages }} + sign-packages: true secrets: inherit amazonlinux-2-pkg-tests: diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 798a739b5fe..7646dd6e3e5 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -18,7 +18,11 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" <%- if pkg_type in ("macos", "windows") and gh_environment %> environment: <{ gh_environment }> - sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-packages }} <%- endif %> + <%- if pkg_type == "macos" %> + sign-packages: true + <%- else %> + sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %> + <%- endif %> secrets: inherit <%- endif %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index 0ee22cb83d9..548faa7a5e2 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -27,10 +27,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). - sign-packages: + sign-windows-packages: type: boolean default: false - description: Sign Windows and MacOS Packages + description: Sign Windows Packages skip-salt-test-suite: type: boolean default: false From 8f52a3c1f75cc29750f2fd579155ee634898b32f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 26 Apr 2023 10:32:16 +0100 Subject: [PATCH 042/121] Add backports support Signed-off-by: Pedro Algarvio (cherry picked from commit 99cb7c0e084f2b47ef3a6e9aa9c46b35c3676cc1) --- .backportrc.json | 15 +++++++++++ .github/workflows/backport.yml | 48 ++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 .backportrc.json create mode 100644 .github/workflows/backport.yml diff --git a/.backportrc.json b/.backportrc.json new file mode 100644 index 00000000000..b988c16660f --- /dev/null +++ b/.backportrc.json @@ -0,0 +1,15 @@ +{ + "repoOwner": "saltstack", + "repoName": "salt", + "targetBranchChoices": ["master", "3006.x", "3005.x"], + "autoMerge": false, + "autoMergeMethod": "rebase", + "branchLabelMapping": { + "^backport:(.+)$": "$1" + }, + "prTitle": "[BACKPORT] {commitMessages}", + "publishStatusCommentOnSuccess": true, + "sourcePRLabels": [ + "backport:complete" + ] +} diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 00000000000..b70b84df5b3 --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,48 @@ +name: Backport PR +run-name: "Backport PR #${{ github.event.number }}" + +on: + pull_request_target: + types: + - "labeled" + - "closed" + +permissions: + contents: write + pull-requests: write + +jobs: + backport: + name: Backport PR + runs-on: + - ubuntu-latest + if: | + github.event.pull_request.merged == true + && ( + contains(github.event.pull_request.labels.*.name, 'backport:master') || + contains(github.event.pull_request.labels.*.name, 'backport:3006.x') || + contains(github.event.pull_request.labels.*.name, 'backport:3005.x') + ) + && ( + (github.event.action == 'labeled' && ( + contains(github.event.pull_request.labels.*.name, 'backport:master') || + contains(github.event.pull_request.labels.*.name, 'backport:3006.x') || + contains(github.event.pull_request.labels.*.name, 'backport:3005.x') + )) + || (github.event.action == 'closed') + ) + steps: + - name: Backport Action + uses: sqren/backport-github-action@v8.9.7 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + auto_backport_label_prefix: "backport:" + add_original_reviewers: true + + - name: Info log + if: ${{ success() }} + run: jq -C '.' ~/.backport/backport.info.log + + - name: Debug log + if: ${{ failure() }} + run: jq -C '.' ~/.backport/backport.debug.log From 9dc95bec1910cb664b07166af998643072398db3 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 1 May 2023 16:10:27 -0400 Subject: [PATCH 043/121] Default to a 0 timeout if none is given in the terraform roster --- salt/roster/terraform.py | 18 +++- .../pytests/integration/ssh/test_terraform.py | 92 +++++++++++++++++++ tests/pytests/unit/roster/test_terraform.py | 4 - 3 files changed, 107 insertions(+), 7 deletions(-) create mode 100644 tests/pytests/integration/ssh/test_terraform.py diff --git a/salt/roster/terraform.py b/salt/roster/terraform.py index 0c9f13df2cf..626f0f103c5 100644 --- a/salt/roster/terraform.py +++ b/salt/roster/terraform.py @@ -92,7 +92,9 @@ def _handle_old_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) return ret @@ -110,7 +112,9 @@ def _handle_new_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) log.info(ret) rets.append(ret) return rets @@ -134,8 +138,16 @@ def _add_ssh_key(ret): ret["priv"] = priv -def _cast_output_to_type(value, typ): +def _cast_output_to_type(attr, value, typ): """cast the value depending on the terraform type""" + if value is None: + # Timeout needs to default to 0 if the value is None + # The ssh command that is run cannot handle `-o ConnectTimeout=None` + if attr == "timeout": + return 0 + else: + return value + if value is None: return value if typ == "b": diff --git a/tests/pytests/integration/ssh/test_terraform.py b/tests/pytests/integration/ssh/test_terraform.py new file mode 100644 index 00000000000..12194a48bae --- /dev/null +++ b/tests/pytests/integration/ssh/test_terraform.py @@ -0,0 +1,92 @@ +import textwrap + +import pytest + +import salt.utils.platform +from tests.support.runtests import RUNTIME_VARS + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module") +def minion_id(): + return "terraform_ssh_minion" + + +@pytest.fixture(scope="module") +def terraform_roster_file(sshd_server, salt_master, tmp_path_factory, minion_id): + darwin_addon = "" + if salt.utils.platform.is_darwin(): + darwin_addon = ',\n "set_path": "$PATH:/usr/local/bin/"\n' + roster_contents = textwrap.dedent( + """ {{ + "version": 4, + "terraform_version": "1.4.3", + "serial": 1, + "outputs": {{}}, + "resources": [ + {{ + "mode": "managed", + "type": "salt_host", + "name": "{minion}", + "instances": [ + {{ + "schema_version": 0, + "attributes": {{ + "cmd_umask": null, + "host": "localhost", + "id": "{minion}", + "minion_opts": null, + "passwd": "", + "port": {port}, + "priv": null, + "salt_id": "{minion}", + "sudo": null, + "sudo_user": null, + "thin_dir": null, + "timeout": null, + "tty": null, + "user": "{user}"{darwin_addon} + }} + }} + ] + }} + ], + "check_results": null + }} + """ + ).format( + minion=minion_id, + port=sshd_server.listen_port, + user=RUNTIME_VARS.RUNNING_TESTS_USER, + darwin_addon=darwin_addon, + ) + roster_file = tmp_path_factory.mktemp("terraform_roster") / "terraform.tfstate" + roster_file.write_text(roster_contents) + yield roster_file + roster_file.unlink() + + +@pytest.fixture(scope="module") +def salt_ssh_cli(salt_master, terraform_roster_file, sshd_config_dir): + """ + The ``salt-ssh`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_ssh_cli( + roster_file=terraform_roster_file, + target_host="*", + client_key=str(sshd_config_dir / "client_key"), + base_script_args=["--ignore-host-keys"], + ) + + +def test_terraform_roster(salt_ssh_cli, minion_id): + """ + Test that the terraform roster operates as intended + """ + ret = salt_ssh_cli.run("--roster=terraform", "test.ping") + assert ret.data.get(minion_id) is True diff --git a/tests/pytests/unit/roster/test_terraform.py b/tests/pytests/unit/roster/test_terraform.py index 730c640fab2..b79d7985461 100644 --- a/tests/pytests/unit/roster/test_terraform.py +++ b/tests/pytests/unit/roster/test_terraform.py @@ -27,10 +27,6 @@ def pki_dir(): @pytest.fixture def configure_loader_modules(roster_file, pki_dir): - # opts = salt.config.master_config( - # os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "master") - # ) - # utils = salt.loader.utils(opts, whitelist=["roster_matcher"]) return { terraform: { "__utils__": { From 6b926b70e84ca92bbc95027dc5ccf9140eeb00f8 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 1 May 2023 16:14:06 -0400 Subject: [PATCH 044/121] changelog --- changelog/64109.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/64109.fixed.md diff --git a/changelog/64109.fixed.md b/changelog/64109.fixed.md new file mode 100644 index 00000000000..59c884cb869 --- /dev/null +++ b/changelog/64109.fixed.md @@ -0,0 +1 @@ +Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` From 7e89e3dd9dfa8b3ba1ea96e8b144ecd7fbe5b4ac Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 17:15:49 -0600 Subject: [PATCH 045/121] Include seed in Windows packages --- pkg/windows/prep_salt.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index 85c5bc4c813..a3ee01a36d3 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -360,7 +360,6 @@ $modules = "acme", "runit", "s6", "scsi", - "seed", "sensors", "service", "shadow", From c2a871859c4ab2e4f41adcf4811fd32cdbb8cb7f Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 17:18:09 -0600 Subject: [PATCH 046/121] Don't forget tools --- pkg/common/env-cleanup-rules.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/pkg/common/env-cleanup-rules.yml b/pkg/common/env-cleanup-rules.yml index c04e99fdc0a..6a0ff594a97 100644 --- a/pkg/common/env-cleanup-rules.yml +++ b/pkg/common/env-cleanup-rules.yml @@ -148,7 +148,6 @@ ci: - "**/site-packages/salt/modules/runit.py*" - "**/site-packages/salt/modules/s6.py*" - "**/site-packages/salt/modules/scsi.py*" - - "**/site-packages/salt/modules/seed.py*" - "**/site-packages/salt/modules/sensors.py*" - "**/site-packages/salt/modules/service.py*" - "**/site-packages/salt/modules/shadow.py*" From caaf5feff75bbb61fdc40dbb2a23d4f0a611ed66 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 1 May 2023 14:41:58 -0700 Subject: [PATCH 047/121] Fix building package on amazon linux --- pkg/rpm/salt.spec | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 25a28ea59dd..0df3ec2e774 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -43,9 +43,13 @@ BuildRequires: python3 BuildRequires: python3-pip BuildRequires: openssl BuildRequires: git + +# rhel is not defined on all rpm based distros. +%if %{?rhel:1}%{!?rhel:0} %if %{rhel} >= 9 BuildRequires: libxcrypt-compat %endif +%endif %description Salt is a distributed remote execution system used to execute commands and From 13cd87b2450366f0ed4258be5ae5b019bc4e8690 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 14:55:15 -0600 Subject: [PATCH 048/121] Fix state for User policy --- salt/modules/win_lgpo_reg.py | 6 +- salt/states/win_lgpo_reg.py | 2 +- .../pytests/unit/modules/test_win_lgpo_reg.py | 443 ++++++++++++---- .../pytests/unit/states/test_win_lgpo_reg.py | 491 ++++++++++++++++-- 4 files changed, 793 insertions(+), 149 deletions(-) diff --git a/salt/modules/win_lgpo_reg.py b/salt/modules/win_lgpo_reg.py index 4052de62bd3..2fd04bd3c73 100644 --- a/salt/modules/win_lgpo_reg.py +++ b/salt/modules/win_lgpo_reg.py @@ -381,7 +381,7 @@ def set_value( else: pol_data[key] = {v_name: {"data": v_data, "type": v_type}} - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.set_value( hive=hive, @@ -464,7 +464,7 @@ def disable_value(key, v_name, policy_class="machine"): else: pol_data[key] = {"**del.{}".format(v_name): {"data": " ", "type": "REG_SZ"}} - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) @@ -534,7 +534,7 @@ def delete_value(key, v_name, policy_class="Machine"): else: return None - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) diff --git a/salt/states/win_lgpo_reg.py b/salt/states/win_lgpo_reg.py index 7a514068acb..23120c6fe04 100644 --- a/salt/states/win_lgpo_reg.py +++ b/salt/states/win_lgpo_reg.py @@ -153,7 +153,7 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") key=key, v_name=name, policy_class=policy_class ) - if str(new["data"]) == v_data and new["type"] == v_type: + if str(new["data"]) == str(v_data) and new["type"] == v_type: ret["comment"] = "Registry.pol value has been set" ret["result"] = True else: diff --git a/tests/pytests/unit/modules/test_win_lgpo_reg.py b/tests/pytests/unit/modules/test_win_lgpo_reg.py index 6d4a824b308..b2bd351018b 100644 --- a/tests/pytests/unit/modules/test_win_lgpo_reg.py +++ b/tests/pytests/unit/modules/test_win_lgpo_reg.py @@ -30,7 +30,7 @@ def configure_loader_modules(): @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) if not reg_pol_file.parent.exists(): @@ -47,7 +47,24 @@ def empty_reg_pol(): @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + if not reg_pol_file.parent.exists(): + reg_pol_file.parent.mkdir(parents=True) + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { "SOFTWARE\\MyKey1": { "MyValue1": { @@ -90,43 +107,75 @@ def reg_pol(): f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) -def test_read_reg_pol(empty_reg_pol): - expected = {} - result = lgpo_reg.read_reg_pol() - assert result == expected - - -def test_read_reg_pol_invalid_policy_class(): - pytest.raises(SaltInvocationError, lgpo_reg.read_reg_pol, policy_class="Invalid") - - -def test_write_reg_pol(empty_reg_pol): +@pytest.fixture +def reg_pol_user(): data_to_write = { - r"SOFTWARE\MyKey": { - "MyValue": { - "data": "string", + "SOFTWARE\\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", "type": "REG_SZ", }, }, + "SOFTWARE\\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, } - lgpo_reg.write_reg_pol(data_to_write) - result = lgpo_reg.read_reg_pol() - assert result == data_to_write + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) -def test_write_reg_pol_invalid_policy_class(): +def test_invalid_policy_class_delete_value(): pytest.raises( - SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" + SaltInvocationError, + lgpo_reg.delete_value, + key="", + v_name="", + policy_class="Invalid", ) -def test_get_value(reg_pol): - expected = {"data": "squidward", "type": "REG_SZ"} - result = lgpo_reg.get_value(key="SOFTWARE\\MyKey1", v_name="MyValue1") - assert result == expected +def test_invalid_policy_class_disable_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.disable_value, + key="", + v_name="", + policy_class="Invalid", + ) -def test_get_value_invalid_policy_class(): +def test_invalid_policy_class_get_key(): + pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") + + +def test_invalid_policy_class_get_value(): pytest.raises( SaltInvocationError, lgpo_reg.get_value, @@ -136,73 +185,11 @@ def test_get_value_invalid_policy_class(): ) -def test_get_key(reg_pol): - expected = { - "MyValue3": { - "data": ["spongebob", "squarepants"], - "type": "REG_MULTI_SZ", - }, - } - result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2") - assert result == expected +def test_invalid_policy_class_read_reg_pol(): + pytest.raises(SaltInvocationError, lgpo_reg.read_reg_pol, policy_class="Invalid") -def test_get_key_invalid_policy_class(): - pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") - - -def test_set_value(empty_reg_pol): - key = "SOFTWARE\\MyKey" - v_name = "MyValue" - # Test command return - result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") - assert result is True - # Test value actually set in Registry.pol - expected = {"data": 1, "type": "REG_DWORD"} - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - # Test that the registry value has been set - expected = { - "hive": "HKLM", - "key": key, - "vname": v_name, - "vdata": 1, - "vtype": "REG_DWORD", - "success": True, - } - result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) - assert result == expected - - -def test_set_value_existing_change(reg_pol): - expected = {"data": 1, "type": "REG_DWORD"} - key = "SOFTWARE\\MyKey" - v_name = "MyValue1" - lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - expected = { - "hive": "HKLM", - "key": key, - "vname": v_name, - "vdata": 1, - "vtype": "REG_DWORD", - "success": True, - } - result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) - assert result == expected - - -def test_set_value_existing_no_change(reg_pol): - expected = {"data": "squidward", "type": "REG_SZ"} - key = "SOFTWARE\\MyKey" - v_name = "MyValue1" - lgpo_reg.set_value(key=key, v_name=v_name, v_data="squidward", v_type="REG_SZ") - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - - -def test_set_value_invalid_policy_class(): +def test_invalid_policy_class_set_value(): pytest.raises( SaltInvocationError, lgpo_reg.set_value, @@ -213,6 +200,12 @@ def test_set_value_invalid_policy_class(): ) +def test_invalid_policy_class_write_reg_pol(): + pytest.raises( + SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" + ) + + def test_set_value_invalid_reg_type(): pytest.raises( SaltInvocationError, @@ -252,7 +245,95 @@ def test_set_value_invalid_reg_dword(): ) -def test_disable_value(reg_pol): +def test_mach_read_reg_pol(empty_reg_pol_mach): + expected = {} + result = lgpo_reg.read_reg_pol() + assert result == expected + + +def test_mach_write_reg_pol(empty_reg_pol_mach): + data_to_write = { + r"SOFTWARE\MyKey": { + "MyValue": { + "data": "string", + "type": "REG_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write) + result = lgpo_reg.read_reg_pol() + assert result == data_to_write + + +def test_mach_get_value(reg_pol_mach): + expected = {"data": "squidward", "type": "REG_SZ"} + result = lgpo_reg.get_value(key="SOFTWARE\\MyKey1", v_name="MyValue1") + assert result == expected + + +def test_mach_get_key(reg_pol_mach): + expected = { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + } + result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2") + assert result == expected + + +def test_mach_set_value(empty_reg_pol_mach): + key = "SOFTWARE\\MyKey" + v_name = "MyValue" + # Test command return + result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + # Test that the registry value has been set + expected = { + "hive": "HKLM", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) + assert result == expected + + +def test_mach_set_value_existing_change(reg_pol_mach): + expected = {"data": 1, "type": "REG_DWORD"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + expected = { + "hive": "HKLM", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) + assert result == expected + + +def test_mach_set_value_existing_no_change(reg_pol_mach): + expected = {"data": "squidward", "type": "REG_SZ"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="squidward", v_type="REG_SZ") + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + + +def test_mach_disable_value(reg_pol_mach): key = "SOFTWARE\\MyKey1" # Test that the command completed successfully result = lgpo_reg.disable_value(key=key, v_name="MyValue1") @@ -269,7 +350,7 @@ def test_disable_value(reg_pol): assert result is False -def test_disable_value_no_change(reg_pol): +def test_mach_disable_value_no_change(reg_pol_mach): expected = { "MyValue1": {"data": "squidward", "type": "REG_SZ"}, "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, @@ -280,17 +361,7 @@ def test_disable_value_no_change(reg_pol): assert result == expected -def test_disable_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.disable_value, - key="", - v_name="", - policy_class="Invalid", - ) - - -def test_delete_value_existing(reg_pol): +def test_mach_delete_value_existing(reg_pol_mach): key = "SOFTWARE\\MyKey1" # Test that the command completes successfully result = lgpo_reg.delete_value(key=key, v_name="MyValue1") @@ -309,7 +380,7 @@ def test_delete_value_existing(reg_pol): assert result is False -def test_delete_value_no_change(empty_reg_pol): +def test_mach_delete_value_no_change(empty_reg_pol_mach): expected = {} key = "SOFTWARE\\MyKey1" lgpo_reg.delete_value(key=key, v_name="MyValue2") @@ -317,11 +388,159 @@ def test_delete_value_no_change(empty_reg_pol): assert result == expected -def test_delete_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.delete_value, - key="", - v_name="", - policy_class="Invalid", +def test_user_read_reg_pol(empty_reg_pol_user): + expected = {} + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == expected + + +def test_user_write_reg_pol(empty_reg_pol_user): + data_to_write = { + r"SOFTWARE\MyKey": { + "MyValue": { + "data": "string", + "type": "REG_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == data_to_write + + +def test_user_get_value(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + result = lgpo_reg.get_value( + key="SOFTWARE\\MyKey1", + v_name="MyValue1", + policy_class="User", ) + assert result == expected + + +def test_user_get_key(reg_pol_user): + expected = { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + } + result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2", policy_class="User") + assert result == expected + + +def test_user_set_value(empty_reg_pol_user): + key = "SOFTWARE\\MyKey" + v_name = "MyValue" + # Test command return + result = lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="1", + policy_class="User", + ) + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + # Test that the registry value has been set + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected + + +def test_user_set_value_existing_change(reg_pol_user): + expected = {"data": 1, "type": "REG_DWORD"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="1", policy_class="User") + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected + + +def test_user_set_value_existing_no_change(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="squidward", + v_type="REG_SZ", + policy_class="User", + ) + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + + +def test_user_disable_value(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completed successfully + result = lgpo_reg.disable_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value was actually set in Registry.pol + expected = { + "**del.MyValue1": {"data": " ", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + # Test that the registry value has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue1") + assert result is False + + +def test_user_disable_value_no_change(reg_pol_user): + expected = { + "MyValue1": {"data": "squidward", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + key = "SOFTWARE\\MyKey1" + lgpo_reg.disable_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + + +def test_user_delete_value_existing(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completes successfully + result = lgpo_reg.delete_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value is actually removed from Registry.pol + expected = { + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + } + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + # Test that the registry entry has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue2") + assert result is False + + +def test_user_delete_value_no_change(empty_reg_pol_user): + expected = {} + key = "SOFTWARE\\MyKey1" + lgpo_reg.delete_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index d2ca5cc7433..3a1003bf03e 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -1,9 +1,15 @@ +import os +import pathlib + import pytest +import salt.modules.win_file as file import salt.modules.win_lgpo_reg as win_lgpo_reg import salt.states.win_lgpo_reg as lgpo_reg import salt.utils.files +import salt.utils.win_dacl import salt.utils.win_lgpo_reg +import salt.utils.win_reg from tests.support.mock import patch pytestmark = [ @@ -25,22 +31,50 @@ def configure_loader_modules(): "lgpo_reg.delete_value": win_lgpo_reg.delete_value, }, }, + file: { + "__utils__": { + "dacl.set_perms": salt.utils.win_dacl.set_perms, + }, + }, } @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO - reg_pol_file = class_info["Machine"]["policy_path"] - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes( + salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") + ) yield - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes( + salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") + ) @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes( + salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") + ) + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes( + salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") + ) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { r"SOFTWARE\MyKey1": { "MyValue1": { @@ -51,6 +85,10 @@ def reg_pol(): "data": " ", "type": "REG_SZ", }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, }, r"SOFTWARE\MyKey2": { "MyValue3": { @@ -61,19 +99,55 @@ def reg_pol(): } win_lgpo_reg.write_reg_pol(data_to_write) yield + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = class_info["Machine"]["policy_path"] with salt.utils.files.fopen(reg_pol_file, "wb") as f: f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) +@pytest.fixture +def reg_pol_user(): + data_to_write = { + r"SOFTWARE\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, + }, + r"SOFTWARE\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, + } + win_lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + def test_virtual_name(): assert lgpo_reg.__virtual__() == "lgpo_reg" -def test_value_present(empty_reg_pol): +def test_machine_value_present(empty_reg_pol_mach): """ - Test value.present + Test value.present in Machine policy """ result = lgpo_reg.value_present( name="MyValue", @@ -96,9 +170,9 @@ def test_value_present(empty_reg_pol): assert result == expected -def test_value_present_existing_change(reg_pol): +def test_machine_value_present_existing_change(reg_pol_mach): """ - Test value.present with existing incorrect value + Test value.present with existing incorrect value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -124,9 +198,35 @@ def test_value_present_existing_change(reg_pol): assert result == expected -def test_value_present_existing_no_change(reg_pol): +def test_machine_value_present_existing_change_dword(reg_pol_mach): """ - Test value.present with existing correct value + Test value.present with existing incorrect value in Machine policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "data": 1, + }, + "old": { + "data": 0, + }, + }, + "comment": "Registry.pol value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_machine_value_present_existing_no_change(reg_pol_mach): + """ + Test value.present with existing correct value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -143,9 +243,9 @@ def test_value_present_existing_no_change(reg_pol): assert result == expected -def test_value_present_test_true(empty_reg_pol): +def test_machine_value_present_test_true(empty_reg_pol_mach): """ - Test value.present with test=True + Test value.present with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_present( @@ -163,9 +263,9 @@ def test_value_present_test_true(empty_reg_pol): assert result == expected -def test_value_present_existing_disabled(reg_pol): +def test_machine_value_present_existing_disabled(reg_pol_mach): """ - Test value.present with existing value that is disabled + Test value.present with existing value that is disabled in Machine policy """ result = lgpo_reg.value_present( name="MyValue2", @@ -191,9 +291,9 @@ def test_value_present_existing_disabled(reg_pol): assert result == expected -def test_value_disabled(empty_reg_pol): +def test_machine_value_disabled(empty_reg_pol_mach): """ - Test value.disabled + Test value.disabled in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -214,9 +314,10 @@ def test_value_disabled(empty_reg_pol): assert result == expected -def test_value_disabled_existing_change(reg_pol): +def test_machine_value_disabled_existing_change(reg_pol_mach): """ - Test value.disabled with an existing value that is not disabled + Test value.disabled with an existing value that is not disabled in Machine + policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -238,9 +339,9 @@ def test_value_disabled_existing_change(reg_pol): assert result == expected -def test_value_disabled_existing_no_change(reg_pol): +def test_machine_value_disabled_existing_no_change(reg_pol_mach): """ - Test value.disabled with an existing disabled value + Test value.disabled with an existing disabled value in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue2", @@ -255,9 +356,9 @@ def test_value_disabled_existing_no_change(reg_pol): assert result == expected -def test_value_disabled_test_true(empty_reg_pol): +def test_machine_value_disabled_test_true(empty_reg_pol_mach): """ - Test value.disabled when test=True + Test value.disabled when test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_disabled( @@ -273,9 +374,9 @@ def test_value_disabled_test_true(empty_reg_pol): assert result == expected -def test_value_absent(reg_pol): +def test_machine_value_absent(reg_pol_mach): """ - Test value.absent + Test value.absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { @@ -293,9 +394,9 @@ def test_value_absent(reg_pol): assert result == expected -def test_value_absent_no_change(empty_reg_pol): +def test_machine_value_absent_no_change(empty_reg_pol_mach): """ - Test value.absent when the value is already absent + Test value.absent when the value is already absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { @@ -307,9 +408,9 @@ def test_value_absent_no_change(empty_reg_pol): assert result == expected -def test_value_absent_disabled(reg_pol): +def test_machine_value_absent_disabled(reg_pol_mach): """ - Test value.absent when the value is disabled + Test value.absent when the value is disabled in Machine policy """ result = lgpo_reg.value_absent(name="MyValue2", key="SOFTWARE\\MyKey1") expected = { @@ -327,9 +428,9 @@ def test_value_absent_disabled(reg_pol): assert result == expected -def test_value_absent_test_true(reg_pol): +def test_machine_value_absent_test_true(reg_pol_mach): """ - Test value.absent with test=True + Test value.absent with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") @@ -340,3 +441,327 @@ def test_value_absent_test_true(reg_pol): "result": None, } assert result == expected + + +def test_user_value_present(empty_reg_pol_user): + """ + Test value.present in User policy + """ + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "data": 1, + "type": "REG_DWORD", + }, + "old": {}, + }, + "comment": "Registry.pol value has been set", + "name": "MyValue", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_change(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "data": 2, + "type": "REG_DWORD", + }, + "old": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + "comment": "Registry.pol value has been set", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_change_dword(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "data": 1, + }, + "old": { + "data": 0, + }, + }, + "comment": "Registry.pol value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_no_change(reg_pol_user): + """ + Test value.present with existing correct value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="squidward", + v_type="REG_SZ", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value already present", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_test_true(empty_reg_pol_user): + """ + Test value.present with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value will be set", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_present_existing_disabled(reg_pol_user): + """ + Test value.present with existing value that is disabled in User policy + """ + result = lgpo_reg.value_present( + name="MyValue2", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "data": 2, + "type": "REG_DWORD", + }, + "old": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + }, + "comment": "Registry.pol value has been set", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled(empty_reg_pol_user): + """ + Test value.disabled in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User" + ) + expected = { + "changes": { + "new": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, + "old": {}, + }, + "comment": "Registry.pol value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_change(reg_pol_user): + """ + Test value.disabled with an existing value that is not disabled in User + policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "data": "**del.MyValue1", + }, + "old": { + "data": "squidward", + }, + }, + "comment": "Registry.pol value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_no_change(reg_pol_user): + """ + Test value.disabled with an existing disabled value in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value already disabled", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_test_true(empty_reg_pol_user): + """ + Test value.disabled when test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_disabled( + name="MyValue", + key="SOFTWARE\\MyKey", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value will be disabled", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_absent(reg_pol_user): + """ + Test value.absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": {}, + "old": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + "comment": "Registry.pol value deleted", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_no_change(empty_reg_pol_user): + """ + Test value.absent when the value is already absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value already absent", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_disabled(reg_pol_user): + """ + Test value.absent when the value is disabled in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": {}, + "old": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + }, + "comment": "Registry.pol value deleted", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_absent_test_true(reg_pol_user): + """ + Test value.absent with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry.pol value will be deleted", + "name": "MyValue1", + "result": None, + } + assert result == expected From 8a1a0577aacca969c289f0b251c01b4f91f0ed4b Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 14:59:35 -0600 Subject: [PATCH 049/121] Add changelog --- changelog/64200.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/64200.fixed.md diff --git a/changelog/64200.fixed.md b/changelog/64200.fixed.md new file mode 100644 index 00000000000..9c977309cb9 --- /dev/null +++ b/changelog/64200.fixed.md @@ -0,0 +1 @@ +Fix ``lgpo_reg`` state to work with User policy From 0e86853321b9b639d0e31696901feeae6112ce1c Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 15:06:31 -0600 Subject: [PATCH 050/121] Fix pre-commit --- .../pytests/unit/states/test_win_lgpo_reg.py | 22 +++++-------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index 3a1003bf03e..181cfa1c888 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -44,15 +44,11 @@ def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) reg_pol_file.parent.mkdir(parents=True, exist_ok=True) - reg_pol_file.write_bytes( - salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") - ) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) yield salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") - reg_pol_file.write_bytes( - salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") - ) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) @pytest.fixture @@ -60,17 +56,11 @@ def empty_reg_pol_user(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) reg_pol_file.parent.mkdir(parents=True, exist_ok=True) - reg_pol_file.write_bytes( - salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") - ) - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) yield salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") - reg_pol_file.write_bytes( - salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le") - ) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) @pytest.fixture @@ -600,9 +590,7 @@ def test_user_value_disabled(empty_reg_pol_user): Test value.disabled in User policy """ result = lgpo_reg.value_disabled( - name="MyValue1", - key="SOFTWARE\\MyKey1", - policy_class="User" + name="MyValue1", key="SOFTWARE\\MyKey1", policy_class="User" ) expected = { "changes": { From 9ddc8d25eb95fd9f6af225929a0beec018dd9607 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 17:03:24 -0600 Subject: [PATCH 051/121] Fix test fixture to remove key in HKCU --- tests/pytests/unit/modules/test_win_lgpo_reg.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/modules/test_win_lgpo_reg.py b/tests/pytests/unit/modules/test_win_lgpo_reg.py index b2bd351018b..04284ee2727 100644 --- a/tests/pytests/unit/modules/test_win_lgpo_reg.py +++ b/tests/pytests/unit/modules/test_win_lgpo_reg.py @@ -54,11 +54,11 @@ def empty_reg_pol_user(): reg_pol_file.parent.mkdir(parents=True) with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) - salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") - salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") yield - salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") - salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) From cc627c024be485547361530fa27b0c19987bb705 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Mon, 1 May 2023 18:56:48 -0600 Subject: [PATCH 052/121] Fix some lint --- tests/pytests/unit/states/test_win_lgpo_reg.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index 181cfa1c888..6ae4ef7f84d 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -1,4 +1,3 @@ -import os import pathlib import pytest From 580fb824f1c60f85fb1935eecf9a3c9614fa92aa Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 27 Apr 2023 11:22:23 -0700 Subject: [PATCH 053/121] Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. This event is only used when these functions are called via the schedule execution modules. Inside deltaproxy, then update all scheudle related functions to include fire_event=False, as the event bus is not available when these functions are called. --- salt/metaproxy/deltaproxy.py | 73 +++++++++---- salt/utils/schedule.py | 202 +++++++++++++++++++++-------------- 2 files changed, 175 insertions(+), 100 deletions(-) diff --git a/salt/metaproxy/deltaproxy.py b/salt/metaproxy/deltaproxy.py index c3003b368f7..2d94a6af70e 100644 --- a/salt/metaproxy/deltaproxy.py +++ b/salt/metaproxy/deltaproxy.py @@ -231,10 +231,11 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) log.info("Added mine.update to scheduler") else: - self.schedule.delete_job("__mine_interval", persist=True) + self.schedule.delete_job("__mine_interval", persist=True, fire_event=False) # add master_alive job if enabled if self.opts["transport"] != "tcp" and self.opts["master_alive_interval"] > 0: @@ -250,6 +251,7 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) if ( self.opts["master_failback"] @@ -268,18 +270,24 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) else: self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) else: self.schedule.delete_job( salt.minion.master_event(type="alive", master=self.opts["master"]), persist=True, + fire_event=False, ) self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) # proxy keepalive @@ -304,10 +312,15 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) - self.schedule.enable_schedule() + self.schedule.enable_schedule(fire_event=False) else: - self.schedule.delete_job("__proxy_keepalive", persist=True) + self.schedule.delete_job( + "__proxy_keepalive", + persist=True, + fire_event=False, + ) # Sync the grains here so the proxy can communicate them to the master self.functions["saltutil.sync_grains"](saltenv="base") @@ -321,10 +334,11 @@ def post_master_init(self, master): self.proxy_context = {} self.add_periodic_callback("cleanup", self.cleanup_subprocesses) + _failed = list() if self.opts["proxy"].get("parallel_startup"): log.debug("Initiating parallel startup for proxies") with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [ + futures = { executor.submit( subproxy_post_master_init, _id, @@ -332,12 +346,22 @@ def post_master_init(self, master): self.opts, self.proxy, self.utils, - ) + ): _id for _id in self.opts["proxy"].get("ids", []) - ] + } - for f in concurrent.futures.as_completed(futures): - sub_proxy_data = f.result() + for future in concurrent.futures.as_completed(futures): + try: + sub_proxy_data = future.result() + except Exception as exc: # pylint: disable=broad-except + _id = futures[future] + log.info( + "An exception %s occured during initialization for %s, skipping.", + exc, + _id, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -347,16 +371,24 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) else: log.debug("Initiating non-parallel startup for proxies") for _id in self.opts["proxy"].get("ids", []): - sub_proxy_data = subproxy_post_master_init( - _id, uid, self.opts, self.proxy, self.utils - ) - + try: + sub_proxy_data = subproxy_post_master_init( + _id, uid, self.opts, self.proxy, self.utils + ) + except Exception as exc: # pylint: disable=broad-except + log.info( + "An exception %s occured during initialization for %s, skipping.", + exc, + _id, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -366,10 +398,12 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) + if _failed: + log.info("Following sub proxies failed %s", _failed) self.ready = True @@ -535,10 +569,13 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils): } }, persist=True, + fire_event=False, ) - _proxy_minion.schedule.enable_schedule() + _proxy_minion.schedule.enable_schedule(fire_event=False) else: - _proxy_minion.schedule.delete_job("__proxy_keepalive", persist=True) + _proxy_minion.schedule.delete_job( + "__proxy_keepalive", persist=True, fire_event=False + ) return {"proxy_minion": _proxy_minion, "proxy_opts": proxyopts} diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py index 814c2980d4a..6565dda59e6 100644 --- a/salt/utils/schedule.py +++ b/salt/utils/schedule.py @@ -315,7 +315,7 @@ class Schedule: exc_info_on_loglevel=logging.DEBUG, ) - def delete_job(self, name, persist=True): + def delete_job(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignore jobs from pillar """ @@ -325,12 +325,15 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot delete job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals if name in self.intervals: @@ -349,7 +352,7 @@ class Schedule: self.splay = None self.opts["schedule"] = {} - def delete_job_prefix(self, name, persist=True): + def delete_job_prefix(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignores jobs from pillar """ @@ -361,12 +364,15 @@ class Schedule: if job.startswith(name): log.warning("Cannot delete job %s, it's in the pillar!", job) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals for job in list(self.intervals.keys()): @@ -376,7 +382,7 @@ class Schedule: if persist: self.persist() - def add_job(self, data, persist=True): + def add_job(self, data, persist=True, fire_event=True): """ Adds a new job to the scheduler. The format is the same as required in the configuration file. See the docs on how YAML is interpreted into @@ -410,16 +416,19 @@ class Schedule: self.opts["schedule"].update(data) # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_add_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_add_complete", + ) if persist: self.persist() - def enable_job(self, name, persist=True): + def enable_job(self, name, persist=True, fire_event=True): """ Enable a job in the scheduler. Ignores jobs from pillar """ @@ -430,17 +439,20 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_job_complete", + ) if persist: self.persist() - def disable_job(self, name, persist=True): + def disable_job(self, name, persist=True, fire_event=True): """ Disable a job in the scheduler. Ignores jobs from pillar """ @@ -451,23 +463,26 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - # Fire the complete event back along with updated list of schedule - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_job_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + # Fire the complete event back along with updated list of schedule + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_job_complete", + ) if persist: self.persist() - def modify_job(self, name, schedule, persist=True): + def modify_job(self, name, schedule, persist=True, fire_event=True): """ Modify a job in the scheduler. Ignores jobs from pillar """ # ensure job exists, then replace it if name in self.opts["schedule"]: - self.delete_job(name, persist) + self.delete_job(name, persist, fire_event) elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) return @@ -511,34 +526,40 @@ class Schedule: log.info("Running Job: %s", name) self._run_job(func, data) - def enable_schedule(self, persist=True): + def enable_schedule(self, persist=True, fire_event=True): """ Enable the scheduler. """ self.opts["schedule"]["enabled"] = True - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_complete", + ) if persist: self.persist() - def disable_schedule(self, persist=True): + def disable_schedule(self, persist=True, fire_event=True): """ Disable the scheduler. """ self.opts["schedule"]["enabled"] = False - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_complete", + ) if persist: self.persist() @@ -554,7 +575,7 @@ class Schedule: schedule = schedule["schedule"] self.opts.setdefault("schedule", {}).update(schedule) - def list(self, where): + def list(self, where, fire_event=True): """ List the current schedule items """ @@ -565,24 +586,32 @@ class Schedule: else: schedule = self._get_schedule() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": schedule}, - tag="/salt/minion/minion_schedule_list_complete", - ) + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": schedule}, + tag="/salt/minion/minion_schedule_list_complete", + ) - def save_schedule(self): + def save_schedule(self, fire_event=True): """ Save the current schedule """ self.persist() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event({"complete": True}, tag="/salt/minion/minion_schedule_saved") + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True}, tag="/salt/minion/minion_schedule_saved" + ) - def postpone_job(self, name, data): + def postpone_job(self, name, data, fire_event=True): """ Postpone a job in the scheduler. Ignores jobs from pillar @@ -608,14 +637,17 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_postpone_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_postpone_job_complete", + ) - def skip_job(self, name, data): + def skip_job(self, name, data, fire_event=True): """ Skip a job at a specific time in the scheduler. Ignores jobs from pillar @@ -634,14 +666,17 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_skip_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_skip_job_complete", + ) - def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S"): + def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S", fire_event=True): """ Return the next fire time for the specified job """ @@ -653,12 +688,15 @@ class Schedule: if _next_fire_time: _next_fire_time = _next_fire_time.strftime(fmt) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "next_fire_time": _next_fire_time}, - tag="/salt/minion/minion_schedule_next_fire_time_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "next_fire_time": _next_fire_time}, + tag="/salt/minion/minion_schedule_next_fire_time_complete", + ) def job_status(self, name, fire_event=False): """ From de7c2f05155f1119335980db2ff82e1b7900e010 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 27 Apr 2023 19:03:12 -0700 Subject: [PATCH 054/121] Updating integration tests to launch more sub proxies. Update single target tests to use a sample of 4 sub proxies for the target. --- tests/pytests/conftest.py | 46 +++++++++-- tests/pytests/integration/proxy/conftest.py | 80 +++++++------------ .../integration/proxy/test_deltaproxy.py | 3 +- 3 files changed, 73 insertions(+), 56 deletions(-) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 49181b9ce56..46aa89b0012 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -631,12 +631,46 @@ def io_loop(): # ----- Helpers -----------------------------------------------------------------------------------------------------> @pytest.helpers.proxy.register def delta_proxy_minion_ids(): - return [ - "dummy_proxy_one", - "dummy_proxy_two", - "dummy_proxy_three", - "dummy_proxy_four", - ] + + number_words = { + 1: "one", + 2: "two", + 3: "three", + 4: "four", + 5: "five", + 6: "six", + 7: "seven", + 8: "eight", + 9: "nine", + 10: "ten", + 11: "eleven", + 12: "twelve", + 13: "thirteen", + 14: "fourteen", + 15: "fifteen", + 16: "sixteen", + 17: "seventeen", + 18: "eighteen", + 19: "nineteen", + 20: "twenty", + 21: "twenty_one", + 22: "twenty_two", + 23: "twenty_three", + 24: "twenty_four", + 25: "twenty_five", + 26: "twenty_six", + 27: "twenty_seven", + 28: "twenty_eight", + 29: "twenty_nine", + 30: "thirty", + 31: "thirty_one", + } + + sub_proxies = [] + for i in range(1, 32): + sub_proxies.append("dummy_proxy_{}".format(number_words[i])) + + return sub_proxies # <---- Helpers ------------------------------------------------------------------------------------------------------ diff --git a/tests/pytests/integration/proxy/conftest.py b/tests/pytests/integration/proxy/conftest.py index d924f4eba8a..b44fe40304a 100644 --- a/tests/pytests/integration/proxy/conftest.py +++ b/tests/pytests/integration/proxy/conftest.py @@ -25,71 +25,53 @@ def deltaproxy_pillar_tree(request, salt_master, salt_delta_proxy_factory): """ Create the pillar files for controlproxy and two dummy proxy minions """ - ( - proxy_one, - proxy_two, - proxy_three, - proxy_four, - ) = pytest.helpers.proxy.delta_proxy_minion_ids() + minion_ids = pytest.helpers.proxy.delta_proxy_minion_ids() + + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy""" - top_file = """ - base: - {control}: - - controlproxy - {one}: - - {one} - {two}: - - {two} - {three}: - - {three} - {four}: - - {four} - """.format( - control=salt_delta_proxy_factory.id, - one=proxy_one, - two=proxy_two, - three=proxy_three, - four=proxy_four, - ) controlproxy_pillar_file = """ proxy: proxytype: deltaproxy parallel_startup: {} ids: - - {} - - {} - - {} - - {} """.format( request.param, - proxy_one, - proxy_two, - proxy_three, - proxy_four, ) - dummy_proxy_pillar_file = """ - proxy: - proxytype: dummy - """ + top_file = """ + base: + {control}: + - controlproxy""".format( + control=salt_delta_proxy_factory.id, + ) + for minion_id in minion_ids: + top_file += """ + {minion_id}: + - dummy""".format( + minion_id=minion_id, + ) + + controlproxy_pillar_file += """ + - {} + """.format( + minion_id, + ) + + tempfiles = [] top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( "controlproxy.sls", controlproxy_pillar_file ) - dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file( - "{}.sls".format(proxy_one), dummy_proxy_pillar_file + tempfiles = [top_tempfile, controlproxy_tempfile] + + dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( + "dummy.sls", dummy_proxy_pillar_file ) - dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file( - "{}.sls".format(proxy_two), dummy_proxy_pillar_file - ) - dummy_proxy_three_tempfile = salt_master.pillar_tree.base.temp_file( - "{}.sls".format(proxy_three), dummy_proxy_pillar_file - ) - dummy_proxy_four_tempfile = salt_master.pillar_tree.base.temp_file( - "{}.sls".format(proxy_four), dummy_proxy_pillar_file - ) - with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile, dummy_proxy_three_tempfile, dummy_proxy_four_tempfile: + + with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: yield diff --git a/tests/pytests/integration/proxy/test_deltaproxy.py b/tests/pytests/integration/proxy/test_deltaproxy.py index 48f23b18d68..6b6e7a5f0d1 100644 --- a/tests/pytests/integration/proxy/test_deltaproxy.py +++ b/tests/pytests/integration/proxy/test_deltaproxy.py @@ -2,6 +2,7 @@ Simple Smoke Tests for Connected Proxy Minion """ import logging +import random import pytest @@ -21,7 +22,7 @@ def skip_on_tcp_transport(request): pytest.skip("Deltaproxy under the TPC transport is not working. See #61367") -@pytest.fixture(params=pytest.helpers.proxy.delta_proxy_minion_ids()) +@pytest.fixture(params=random.sample(pytest.helpers.proxy.delta_proxy_minion_ids(), 4)) def proxy_id(request, salt_delta_proxy, skip_on_tcp_transport): return request.param From 8d212131971d5f79ec3f854b0b47ed6d170e5469 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Fri, 28 Apr 2023 07:37:48 -0700 Subject: [PATCH 055/121] swapping around the logging if an exception occurs. --- salt/metaproxy/deltaproxy.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/salt/metaproxy/deltaproxy.py b/salt/metaproxy/deltaproxy.py index 2d94a6af70e..d866d6f4c1d 100644 --- a/salt/metaproxy/deltaproxy.py +++ b/salt/metaproxy/deltaproxy.py @@ -356,9 +356,9 @@ def post_master_init(self, master): except Exception as exc: # pylint: disable=broad-except _id = futures[future] log.info( - "An exception %s occured during initialization for %s, skipping.", - exc, + "An exception occured during initialization for %s, skipping: %s", _id, + exc, ) _failed.append(_id) continue @@ -383,9 +383,9 @@ def post_master_init(self, master): ) except Exception as exc: # pylint: disable=broad-except log.info( - "An exception %s occured during initialization for %s, skipping.", - exc, + "An exception occured during initialization for %s, skipping: %s", _id, + exc, ) _failed.append(_id) continue From abac2e04fa8fb54a3e18800d45206c58017fd486 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Sat, 29 Apr 2023 17:44:57 -0700 Subject: [PATCH 056/121] Revert "Updating integration tests to launch more sub proxies. Update single target tests to use a sample of 4 sub proxies for the target." This reverts commit 8a08200d0669450b069a828ab777bbef65706855. --- tests/pytests/conftest.py | 46 ++--------- tests/pytests/integration/proxy/conftest.py | 80 ++++++++++++------- .../integration/proxy/test_deltaproxy.py | 3 +- 3 files changed, 56 insertions(+), 73 deletions(-) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 46aa89b0012..49181b9ce56 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -631,46 +631,12 @@ def io_loop(): # ----- Helpers -----------------------------------------------------------------------------------------------------> @pytest.helpers.proxy.register def delta_proxy_minion_ids(): - - number_words = { - 1: "one", - 2: "two", - 3: "three", - 4: "four", - 5: "five", - 6: "six", - 7: "seven", - 8: "eight", - 9: "nine", - 10: "ten", - 11: "eleven", - 12: "twelve", - 13: "thirteen", - 14: "fourteen", - 15: "fifteen", - 16: "sixteen", - 17: "seventeen", - 18: "eighteen", - 19: "nineteen", - 20: "twenty", - 21: "twenty_one", - 22: "twenty_two", - 23: "twenty_three", - 24: "twenty_four", - 25: "twenty_five", - 26: "twenty_six", - 27: "twenty_seven", - 28: "twenty_eight", - 29: "twenty_nine", - 30: "thirty", - 31: "thirty_one", - } - - sub_proxies = [] - for i in range(1, 32): - sub_proxies.append("dummy_proxy_{}".format(number_words[i])) - - return sub_proxies + return [ + "dummy_proxy_one", + "dummy_proxy_two", + "dummy_proxy_three", + "dummy_proxy_four", + ] # <---- Helpers ------------------------------------------------------------------------------------------------------ diff --git a/tests/pytests/integration/proxy/conftest.py b/tests/pytests/integration/proxy/conftest.py index b44fe40304a..d924f4eba8a 100644 --- a/tests/pytests/integration/proxy/conftest.py +++ b/tests/pytests/integration/proxy/conftest.py @@ -25,53 +25,71 @@ def deltaproxy_pillar_tree(request, salt_master, salt_delta_proxy_factory): """ Create the pillar files for controlproxy and two dummy proxy minions """ - minion_ids = pytest.helpers.proxy.delta_proxy_minion_ids() - - dummy_proxy_pillar_file = """ - proxy: - proxytype: dummy""" + ( + proxy_one, + proxy_two, + proxy_three, + proxy_four, + ) = pytest.helpers.proxy.delta_proxy_minion_ids() + top_file = """ + base: + {control}: + - controlproxy + {one}: + - {one} + {two}: + - {two} + {three}: + - {three} + {four}: + - {four} + """.format( + control=salt_delta_proxy_factory.id, + one=proxy_one, + two=proxy_two, + three=proxy_three, + four=proxy_four, + ) controlproxy_pillar_file = """ proxy: proxytype: deltaproxy parallel_startup: {} ids: + - {} + - {} + - {} + - {} """.format( request.param, + proxy_one, + proxy_two, + proxy_three, + proxy_four, ) - top_file = """ - base: - {control}: - - controlproxy""".format( - control=salt_delta_proxy_factory.id, - ) + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy + """ - for minion_id in minion_ids: - top_file += """ - {minion_id}: - - dummy""".format( - minion_id=minion_id, - ) - - controlproxy_pillar_file += """ - - {} - """.format( - minion_id, - ) - - tempfiles = [] top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( "controlproxy.sls", controlproxy_pillar_file ) - tempfiles = [top_tempfile, controlproxy_tempfile] - - dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( - "dummy.sls", dummy_proxy_pillar_file + dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_one), dummy_proxy_pillar_file ) - - with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: + dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_two), dummy_proxy_pillar_file + ) + dummy_proxy_three_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_three), dummy_proxy_pillar_file + ) + dummy_proxy_four_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_four), dummy_proxy_pillar_file + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile, dummy_proxy_three_tempfile, dummy_proxy_four_tempfile: yield diff --git a/tests/pytests/integration/proxy/test_deltaproxy.py b/tests/pytests/integration/proxy/test_deltaproxy.py index 6b6e7a5f0d1..48f23b18d68 100644 --- a/tests/pytests/integration/proxy/test_deltaproxy.py +++ b/tests/pytests/integration/proxy/test_deltaproxy.py @@ -2,7 +2,6 @@ Simple Smoke Tests for Connected Proxy Minion """ import logging -import random import pytest @@ -22,7 +21,7 @@ def skip_on_tcp_transport(request): pytest.skip("Deltaproxy under the TPC transport is not working. See #61367") -@pytest.fixture(params=random.sample(pytest.helpers.proxy.delta_proxy_minion_ids(), 4)) +@pytest.fixture(params=pytest.helpers.proxy.delta_proxy_minion_ids()) def proxy_id(request, salt_delta_proxy, skip_on_tcp_transport): return request.param From d051a6b9d6dad0a7725a5e0a1cc5c08286224aa4 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Sun, 30 Apr 2023 10:39:57 -0700 Subject: [PATCH 057/121] Adding test_exit_status_correct_usage_large_number_of_minions to test_salt_deltaproxy.py to test starting deltaproxy with larger number of sub proxies. --- .../integration/cli/test_salt_deltaproxy.py | 142 ++++++++++++++++++ 1 file changed, 142 insertions(+) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index 85c7aeb7dcc..928239f57d0 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -4,6 +4,7 @@ import logging import os +import random import pytest from pytestshellutils.exceptions import FactoryNotStarted @@ -721,3 +722,144 @@ def ping(): # Terminate the proxy minion ret = factory.terminate() assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret + + +# Hangs on Windows. You can add a timeout to the proxy.run command, but then +# it just times out. +@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) +@pytest.mark.parametrize( + "parallel_startup", + [True, False], + ids=["parallel_startup=True", "parallel_startup=False"], +) +def test_exit_status_correct_usage_large_number_of_minions( + salt_master, + salt_cli, + proxy_minion_id, + parallel_startup, +): + """ + Ensure the salt-proxy control proxy starts and + is able to respond to test.ping, additionally ensure that + the proxies being controlled also respond to test.ping. + + Finally ensure correct exit status when salt-proxy exits correctly. + + Skip on Windows because daemonization not supported + """ + + config_defaults = { + "metaproxy": "deltaproxy", + } + sub_proxies = [ + "proxy_one", + "proxy_two", + "proxy_three", + "proxy_four", + "proxy_five", + "proxy_six", + "proxy_seven", + "proxy_eight", + "proxy_nine", + "proxy_ten", + "proxy_eleven", + "proxy_twelve", + "proxy_thirteen", + "proxy_fourteen", + "proxy_fifteen", + "proxy_sixteen", + "proxy_seventeen", + "proxy_eighteen", + "proxy_nineteen", + "proxy_twenty", + "proxy_twenty_one", + "proxy_twenty_two", + "proxy_twenty_three", + "proxy_twenty_four", + "proxy_twenty_five", + "proxy_twenty_six", + "proxy_twenty_seven", + "proxy_twenty_eight", + "proxy_twenty_nine", + "proxy_thirty", + "proxy_thirty_one", + ] + + top_file = """ + base: + {control}: + - controlproxy + """.format( + control=proxy_minion_id, + ) + controlproxy_pillar_file = """ + proxy: + proxytype: deltaproxy + parallel_startup: {} + ids: + """.format( + parallel_startup + ) + + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy + """ + + for minion_id in sub_proxies: + top_file += """ + {minion_id}: + - dummy""".format( + minion_id=minion_id, + ) + + controlproxy_pillar_file += """ + - {} + """.format( + minion_id, + ) + + top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) + controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( + "controlproxy.sls", controlproxy_pillar_file + ) + dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( + "dummy.sls", + dummy_proxy_pillar_file, + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + defaults=config_defaults, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + start_timeout=240, + ) + + for minion_id in [proxy_minion_id] + sub_proxies: + factory.before_start( + pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id + ) + + with factory.started(): + assert factory.is_running() + + # Let's issue a ping the control proxy + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True + + for minion_id in random.sample(sub_proxies, 4): + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=minion_id) + assert ret.returncode == 0 + assert ret.data is True + + # Terminate the proxy minion + ret = factory.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret From eff6515760b8c0d41ab8cb96a7d740cae6488a33 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Sun, 30 Apr 2023 22:03:13 -0700 Subject: [PATCH 058/121] temporarily removing the ping to the control proxy. --- tests/pytests/integration/cli/test_salt_deltaproxy.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index 928239f57d0..cca2fdcafd3 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -850,9 +850,9 @@ def test_exit_status_correct_usage_large_number_of_minions( assert factory.is_running() # Let's issue a ping the control proxy - ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) - assert ret.returncode == 0 - assert ret.data is True + # ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + # assert ret.returncode == 0 + # assert ret.data is True for minion_id in random.sample(sub_proxies, 4): # Let's issue a ping to one of the controlled proxies From 489f956a3c01560513bd9ef2e25fd975a80c9a83 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 1 May 2023 08:55:00 -0700 Subject: [PATCH 059/121] reduce the number of sub proxies. --- .../integration/cli/test_salt_deltaproxy.py | 21 +++---------------- 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index cca2fdcafd3..2de144cb848 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -768,21 +768,6 @@ def test_exit_status_correct_usage_large_number_of_minions( "proxy_fourteen", "proxy_fifteen", "proxy_sixteen", - "proxy_seventeen", - "proxy_eighteen", - "proxy_nineteen", - "proxy_twenty", - "proxy_twenty_one", - "proxy_twenty_two", - "proxy_twenty_three", - "proxy_twenty_four", - "proxy_twenty_five", - "proxy_twenty_six", - "proxy_twenty_seven", - "proxy_twenty_eight", - "proxy_twenty_nine", - "proxy_thirty", - "proxy_thirty_one", ] top_file = """ @@ -850,9 +835,9 @@ def test_exit_status_correct_usage_large_number_of_minions( assert factory.is_running() # Let's issue a ping the control proxy - # ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) - # assert ret.returncode == 0 - # assert ret.data is True + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True for minion_id in random.sample(sub_proxies, 4): # Let's issue a ping to one of the controlled proxies From dae7ccb7df49d8a68bd74904e6caf8177c747072 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 1 May 2023 14:15:15 -0700 Subject: [PATCH 060/121] Adding changelog files --- changelog/64102.fixed.md | 4 ++++ changelog/64103.fixed.md | 4 ++++ salt/scripts.py | 15 +++++++++++++++ 3 files changed, 23 insertions(+) create mode 100644 changelog/64102.fixed.md create mode 100644 changelog/64103.fixed.md diff --git a/changelog/64102.fixed.md b/changelog/64102.fixed.md new file mode 100644 index 00000000000..64988c2f2e0 --- /dev/null +++ b/changelog/64102.fixed.md @@ -0,0 +1,4 @@ +Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. +This event is only used when these functions are called via the schedule execution modules. +Inside deltaproxy, then update all scheudle related functions to include fire_event=False, +as the event bus is not available when these functions are called. diff --git a/changelog/64103.fixed.md b/changelog/64103.fixed.md new file mode 100644 index 00000000000..64988c2f2e0 --- /dev/null +++ b/changelog/64103.fixed.md @@ -0,0 +1,4 @@ +Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. +This event is only used when these functions are called via the schedule execution modules. +Inside deltaproxy, then update all scheudle related functions to include fire_event=False, +as the event bus is not available when these functions are called. diff --git a/salt/scripts.py b/salt/scripts.py index 07393373c9d..3f68c02ea98 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -444,6 +444,21 @@ def salt_call(): client.run() +def salt_call_local(): + """ + Directly call a salt command in the modules, does not require a running + salt minion to run. + """ + import salt.cli.call + + if "" in sys.path: + sys.path.remove("") + client = salt.cli.call.SaltCall() + client.set_default("local", True) + _install_signal_handlers(client) + client.run() + + def salt_run(): """ Execute a salt convenience routine. From 418d575bb43041f76a1324e77d3593597699c58e Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 1 May 2023 15:28:08 -0700 Subject: [PATCH 061/121] trying the large sub proxy as a functional test. --- .../functional/cli/test_salt_deltaproxy.py | 224 ++++++++++++++++++ 1 file changed, 224 insertions(+) create mode 100644 tests/pytests/functional/cli/test_salt_deltaproxy.py diff --git a/tests/pytests/functional/cli/test_salt_deltaproxy.py b/tests/pytests/functional/cli/test_salt_deltaproxy.py new file mode 100644 index 00000000000..c4e84088607 --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_deltaproxy.py @@ -0,0 +1,224 @@ +""" +:codeauthor: Gareth J. Greenaway (ggreenaway@vmware.com) +""" + +import logging +import os +import random + +import pytest +from saltfactories.utils import random_string + +import salt.defaults.exitcodes +from tests.support.helpers import PRE_PYTEST_SKIP_REASON + +log = logging.getLogger(__name__) + + +pytestmark = [ + pytest.mark.skip_on_spawning_platform( + reason="Deltaproxy minions do not currently work on spawning platforms.", + ), + pytest.mark.core_test, +] + + +@pytest.fixture(scope="package") +def salt_master(salt_factories): + config_defaults = { + "open_mode": True, + } + salt_master = salt_factories.salt_master_daemon( + "deltaproxy-functional-master", defaults=config_defaults + ) + with salt_master.started(): + yield salt_master + + +@pytest.fixture(scope="package") +def salt_cli(salt_master): + """ + The ``salt`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_cli(timeout=30) + + +@pytest.fixture(scope="package", autouse=True) +def skip_on_tcp_transport(request): + if request.config.getoption("--transport") == "tcp": + pytest.skip("Deltaproxy under the TPC transport is not working. See #61367") + + +@pytest.fixture +def proxy_minion_id(salt_master): + _proxy_minion_id = random_string("proxy-minion-") + + try: + yield _proxy_minion_id + finally: + # Remove stale key if it exists + pytest.helpers.remove_stale_minion_key(salt_master, _proxy_minion_id) + + +def clear_proxy_minions(salt_master, proxy_minion_id): + for proxy in [proxy_minion_id, "dummy_proxy_one", "dummy_proxy_two"]: + pytest.helpers.remove_stale_minion_key(salt_master, proxy) + + cachefile = os.path.join( + salt_master.config["cachedir"], "{}.cache".format(proxy) + ) + if os.path.exists(cachefile): + os.unlink(cachefile) + + +# Hangs on Windows. You can add a timeout to the proxy.run command, but then +# it just times out. +@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) +@pytest.mark.parametrize( + "parallel_startup", + [True, False], + ids=["parallel_startup=True", "parallel_startup=False"], +) +def test_exit_status_correct_usage_large_number_of_minions( + salt_master, + salt_cli, + proxy_minion_id, + parallel_startup, +): + """ + Ensure the salt-proxy control proxy starts and + is able to respond to test.ping, additionally ensure that + the proxies being controlled also respond to test.ping. + + Finally ensure correct exit status when salt-proxy exits correctly. + + Skip on Windows because daemonization not supported + """ + + config_defaults = { + "metaproxy": "deltaproxy", + } + sub_proxies = [ + "proxy_one", + "proxy_two", + "proxy_three", + "proxy_four", + "proxy_five", + "proxy_six", + "proxy_seven", + "proxy_eight", + "proxy_nine", + "proxy_ten", + "proxy_eleven", + "proxy_twelve", + "proxy_thirteen", + "proxy_fourteen", + "proxy_fifteen", + "proxy_sixteen", + "proxy_seventeen", + "proxy_eighteen", + "proxy_nineteen", + "proxy_twenty", + "proxy_twenty_one", + "proxy_twenty_two", + "proxy_twenty_three", + "proxy_twenty_four", + "proxy_twenty_five", + "proxy_twenty_six", + "proxy_twenty_seven", + "proxy_twenty_eight", + "proxy_twenty_nine", + "proxy_thirty", + "proxy_thirty_one", + ] + + top_file = """ + base: + {control}: + - controlproxy + """.format( + control=proxy_minion_id, + ) + controlproxy_pillar_file = """ + proxy: + proxytype: deltaproxy + parallel_startup: {} + ids: + """.format( + parallel_startup + ) + + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy + """ + + for minion_id in sub_proxies: + top_file += """ + {minion_id}: + - dummy""".format( + minion_id=minion_id, + ) + + controlproxy_pillar_file += """ + - {} + """.format( + minion_id, + ) + + top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) + controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( + "controlproxy.sls", controlproxy_pillar_file + ) + dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( + "dummy.sls", + dummy_proxy_pillar_file, + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: + with salt_master.started(): + assert salt_master.is_running() + + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + defaults=config_defaults, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + start_timeout=240, + ) + + for minion_id in [proxy_minion_id] + sub_proxies: + factory.before_start( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + + with factory.started(): + assert factory.is_running() + + # Let's issue a ping the control proxy + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True + + for minion_id in random.sample(sub_proxies, 4): + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=minion_id) + assert ret.returncode == 0 + assert ret.data is True + + # Terminate the proxy minion + ret = factory.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret + + # Terminate the salt master + ret = salt_master.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret From beaa9ee3a59892cedc50022f9bf69048298d5a7b Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 1 May 2023 20:47:43 -0700 Subject: [PATCH 062/121] removing from integration in favor of the in functional tests --- .../integration/cli/test_salt_deltaproxy.py | 127 ------------------ 1 file changed, 127 deletions(-) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index 2de144cb848..85c7aeb7dcc 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -4,7 +4,6 @@ import logging import os -import random import pytest from pytestshellutils.exceptions import FactoryNotStarted @@ -722,129 +721,3 @@ def ping(): # Terminate the proxy minion ret = factory.terminate() assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret - - -# Hangs on Windows. You can add a timeout to the proxy.run command, but then -# it just times out. -@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) -@pytest.mark.parametrize( - "parallel_startup", - [True, False], - ids=["parallel_startup=True", "parallel_startup=False"], -) -def test_exit_status_correct_usage_large_number_of_minions( - salt_master, - salt_cli, - proxy_minion_id, - parallel_startup, -): - """ - Ensure the salt-proxy control proxy starts and - is able to respond to test.ping, additionally ensure that - the proxies being controlled also respond to test.ping. - - Finally ensure correct exit status when salt-proxy exits correctly. - - Skip on Windows because daemonization not supported - """ - - config_defaults = { - "metaproxy": "deltaproxy", - } - sub_proxies = [ - "proxy_one", - "proxy_two", - "proxy_three", - "proxy_four", - "proxy_five", - "proxy_six", - "proxy_seven", - "proxy_eight", - "proxy_nine", - "proxy_ten", - "proxy_eleven", - "proxy_twelve", - "proxy_thirteen", - "proxy_fourteen", - "proxy_fifteen", - "proxy_sixteen", - ] - - top_file = """ - base: - {control}: - - controlproxy - """.format( - control=proxy_minion_id, - ) - controlproxy_pillar_file = """ - proxy: - proxytype: deltaproxy - parallel_startup: {} - ids: - """.format( - parallel_startup - ) - - dummy_proxy_pillar_file = """ - proxy: - proxytype: dummy - """ - - for minion_id in sub_proxies: - top_file += """ - {minion_id}: - - dummy""".format( - minion_id=minion_id, - ) - - controlproxy_pillar_file += """ - - {} - """.format( - minion_id, - ) - - top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) - controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( - "controlproxy.sls", controlproxy_pillar_file - ) - dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( - "dummy.sls", - dummy_proxy_pillar_file, - ) - with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: - factory = salt_master.salt_proxy_minion_daemon( - proxy_minion_id, - defaults=config_defaults, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - start_timeout=240, - ) - - for minion_id in [proxy_minion_id] + sub_proxies: - factory.before_start( - pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id - ) - factory.after_terminate( - pytest.helpers.remove_stale_minion_key, salt_master, minion_id - ) - factory.after_terminate( - pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id - ) - - with factory.started(): - assert factory.is_running() - - # Let's issue a ping the control proxy - ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) - assert ret.returncode == 0 - assert ret.data is True - - for minion_id in random.sample(sub_proxies, 4): - # Let's issue a ping to one of the controlled proxies - ret = salt_cli.run("test.ping", minion_tgt=minion_id) - assert ret.returncode == 0 - assert ret.data is True - - # Terminate the proxy minion - ret = factory.terminate() - assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret From 924b1db68d6ee1085246bbe68dfef17ee8b559de Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 2 May 2023 08:13:51 -0700 Subject: [PATCH 063/121] removing something that snuck in. --- salt/scripts.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/salt/scripts.py b/salt/scripts.py index 3f68c02ea98..07393373c9d 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -444,21 +444,6 @@ def salt_call(): client.run() -def salt_call_local(): - """ - Directly call a salt command in the modules, does not require a running - salt minion to run. - """ - import salt.cli.call - - if "" in sys.path: - sys.path.remove("") - client = salt.cli.call.SaltCall() - client.set_default("local", True) - _install_signal_handlers(client) - client.run() - - def salt_run(): """ Execute a salt convenience routine. From c673bc454243ca349bfb5e8dcc0eaa247d1ed51e Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 2 May 2023 08:14:46 -0700 Subject: [PATCH 064/121] Adding one more sub proxy. --- tests/pytests/functional/cli/test_salt_deltaproxy.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/functional/cli/test_salt_deltaproxy.py b/tests/pytests/functional/cli/test_salt_deltaproxy.py index c4e84088607..5bc7604c84a 100644 --- a/tests/pytests/functional/cli/test_salt_deltaproxy.py +++ b/tests/pytests/functional/cli/test_salt_deltaproxy.py @@ -131,6 +131,7 @@ def test_exit_status_correct_usage_large_number_of_minions( "proxy_twenty_nine", "proxy_thirty", "proxy_thirty_one", + "proxy_thirty_two", ] top_file = """ From 6a859de92a077100b96424bdd88180ffa15f1230 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 2 May 2023 09:24:06 -0700 Subject: [PATCH 065/121] fixing the changelog files. --- changelog/64102.fixed.md | 3 +-- changelog/64103.fixed.md | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/changelog/64102.fixed.md b/changelog/64102.fixed.md index 64988c2f2e0..09d14ab16cb 100644 --- a/changelog/64102.fixed.md +++ b/changelog/64102.fixed.md @@ -1,4 +1,3 @@ Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. This event is only used when these functions are called via the schedule execution modules. -Inside deltaproxy, then update all scheudle related functions to include fire_event=False, -as the event bus is not available when these functions are called. +Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. diff --git a/changelog/64103.fixed.md b/changelog/64103.fixed.md index 64988c2f2e0..09d14ab16cb 100644 --- a/changelog/64103.fixed.md +++ b/changelog/64103.fixed.md @@ -1,4 +1,3 @@ Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. This event is only used when these functions are called via the schedule execution modules. -Inside deltaproxy, then update all scheudle related functions to include fire_event=False, -as the event bus is not available when these functions are called. +Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. From f601bd078cd902d2754200459282ab4a5bf40e93 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 18 Apr 2023 19:01:07 -0400 Subject: [PATCH 066/121] Run the package download tests for minor, latest, and the version being staged/released --- pkg/tests/download/test_pkg_download.py | 38 ++++++++++++++++++------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index bd6e0454215..3caeddc00f8 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -14,7 +14,6 @@ log = logging.getLogger(__name__) def get_salt_test_commands(): - salt_release = get_salt_release() if platform.is_windows(): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): @@ -96,6 +95,14 @@ def get_salt_release(): return salt_release +@pytest.fixture( + scope="module", + params=["latest", "minor", packaging.version.parse(get_salt_release()).major], +) +def repo_subpath(request): + return request.param + + @pytest.fixture(scope="module") def gpg_key_name(salt_release): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): @@ -110,7 +117,7 @@ def salt_release(): @pytest.fixture(scope="module") def _setup_system( - tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name + tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name, repo_subpath ): downloads_path = tmp_path_factory.mktemp("downloads") try: @@ -120,6 +127,7 @@ def _setup_system( root_url=root_url, salt_release=salt_release, downloads_path=downloads_path, + repo_subpath=repo_subpath, ) elif grains["os_family"] == "MacOS": setup_macos( @@ -127,6 +135,7 @@ def _setup_system( root_url=root_url, salt_release=salt_release, downloads_path=downloads_path, + repo_subpath=repo_subpath, ) elif grains["os"] == "Amazon": setup_redhat_family( @@ -137,6 +146,7 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, ) elif grains["os"] == "Fedora": setup_redhat_family( @@ -147,6 +157,7 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, ) elif grains["os"] == "VMware Photon OS": setup_redhat_family( @@ -157,6 +168,7 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, ) elif grains["os_family"] == "RedHat": setup_redhat_family( @@ -167,6 +179,7 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, ) elif grains["os_family"] == "Debian": setup_debian_family( @@ -178,6 +191,7 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, ) else: pytest.fail("Don't know how to handle %s", grains["osfinger"]) @@ -194,12 +208,15 @@ def setup_redhat_family( salt_release, downloads_path, gpg_key_name, + repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": arch = "arm64" - repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) @@ -249,6 +266,7 @@ def setup_debian_family( salt_release, downloads_path, gpg_key_name, + repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "amd64" if arch == "aarch64": @@ -260,7 +278,9 @@ def setup_debian_family( if ret.returncode != 0: pytest.fail(str(ret)) - repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) @@ -303,15 +323,14 @@ def setup_debian_family( pytest.fail(str(ret)) -def setup_macos(shell, root_url, salt_release, downloads_path): - +def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": arch = "arm64" if packaging.version.parse(salt_release) > packaging.version.parse("3005"): mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" - mac_pkg_url = f"{root_url}/macos/minor/{salt_release}/{mac_pkg}" + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" else: mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" @@ -330,8 +349,7 @@ def setup_macos(shell, root_url, salt_release, downloads_path): assert ret.returncode == 0, ret -def setup_windows(shell, root_url, salt_release, downloads_path): - +def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath): root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") arch = os.environ.get("SALT_REPO_ARCH") or "amd64" @@ -345,7 +363,7 @@ def setup_windows(shell, root_url, salt_release, downloads_path): if arch.lower() != "x86": arch = arch.upper() win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" - win_pkg_url = f"{root_url}/windows/minor/{salt_release}/{win_pkg}" + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" ssm_bin = root_dir / "ssm.exe" else: win_pkg = f"salt-{salt_release}-windows-{arch}.exe" From 2024eb3d711a74495a86e9989f8259db8462319d Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 18 Apr 2023 19:13:10 -0400 Subject: [PATCH 067/121] Only minor will have the `salt_release` subpath --- pkg/tests/download/test_pkg_download.py | 31 ++++++++++++++++++------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 3caeddc00f8..92205070b57 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -214,10 +214,15 @@ def setup_redhat_family( if arch == "aarch64": arch = "arm64" - repo_url_base = ( - f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" - ) + if repo_subpath == "minor": + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: @@ -278,10 +283,14 @@ def setup_debian_family( if ret.returncode != 0: pytest.fail(str(ret)) - repo_url_base = ( - f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" - ) + if repo_subpath == "minor": + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: @@ -330,7 +339,10 @@ def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" - mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + if repo_subpath == "minor": + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + else: + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}" else: mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" @@ -363,7 +375,10 @@ def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath): if arch.lower() != "x86": arch = arch.upper() win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" - win_pkg_url = f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + if repo_subpath == "minor": + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + else: + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" ssm_bin = root_dir / "ssm.exe" else: win_pkg = f"salt-{salt_release}-windows-{arch}.exe" From 353cf57470fcebbd5d7af9174301ec841a93383c Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 19 Apr 2023 12:15:37 -0400 Subject: [PATCH 068/121] `dmesg` should now already be installed on centosstream 9 --- pkg/tests/download/test_pkg_download.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 92205070b57..271abe123d0 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -252,10 +252,6 @@ def setup_redhat_family( ), ] - # For some reason, the centosstream9 container doesn't have dmesg installed - if os_version == 9 and os_name == "redhat": - commands.insert(2, ("yum", "install", "-y", "util-linux")) - for cmd in commands: ret = shell.run(*cmd, check=False) if ret.returncode != 0: From 201fc53e14d500ca8986fcee6d4eab42832d9071 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 19 Apr 2023 12:19:55 -0400 Subject: [PATCH 069/121] TO REVERT: Allow testing staging runs from hotfix/3006.x/run-pkg-download-tests-for-all --- .github/workflows/release.yml | 4 ++-- .github/workflows/staging.yml | 4 ++-- .github/workflows/templates/layout.yml.jinja | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e79b5c3bfad..a2b3f86f875 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -47,9 +47,9 @@ jobs: - name: Check Branch run: | echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then + if [ "${{ contains(fromJSON('["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"]'), github.ref_name) }}" != "true" ]; then echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" + echo "Allowed branches: master, 3006.x, hotfix/3006.x/run-pkg-download-tests-for-all" exit 1 else echo "Allowed to release from branch ${{ github.ref_name }}" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 684b96183bd..7e64607354b 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -60,9 +60,9 @@ jobs: - name: Check Branch run: | echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then + if [ "${{ contains(fromJSON('["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"]'), github.ref_name) }}" != "true" ]; then echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" + echo "Allowed branches: master, 3006.x, hotfix/3006.x/run-pkg-download-tests-for-all" exit 1 else echo "Allowed to release from branch ${{ github.ref_name }}" diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 37b86d32da8..1f1f49d1f86 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -9,7 +9,7 @@ <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x"] %> +<%- set release_branches = ["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"] %> --- <%- block name %> name: <{ workflow_name }> From 28861443fe8982aa8851678f02e089efd6c0e5cd Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 25 Apr 2023 13:44:48 -0400 Subject: [PATCH 070/121] Only run the download tests for the `latest` subpath if the current version being released is greater than or equal to the latest salt release --- .github/workflows/ci.yml | 14 +++++ .github/workflows/nightly.yml | 14 +++++ .github/workflows/release.yml | 14 +++++ .github/workflows/scheduled.yml | 14 +++++ .github/workflows/staging.yml | 17 ++++++ .github/workflows/templates/layout.yml.jinja | 14 +++++ .github/workflows/templates/release.yml.jinja | 14 +++++ .../test-pkg-repo-downloads.yml.jinja | 3 + .../test-package-downloads-action-linux.yml | 6 ++ .../test-package-downloads-action-macos.yml | 6 ++ .../test-package-downloads-action-windows.yml | 6 ++ pkg/tests/download/test_pkg_download.py | 16 +++++- tools/ci.py | 26 +++++++++ tools/pkg/repo.py | 57 +------------------ tools/utils.py | 51 +++++++++++++++++ 15 files changed, 217 insertions(+), 55 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 49f7240dcc5..19508cfcfe7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,6 +37,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -204,6 +206,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index ca3f409acb1..12a90122d2a 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -90,6 +90,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -257,6 +259,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a2b3f86f875..6ab8145e2bc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,6 +67,8 @@ jobs: outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -103,6 +105,18 @@ jobs: run: | tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Set Cache Seed Output id: set-cache-seed run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index b00b7e8d1e6..d23d6b50a4b 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -80,6 +80,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -247,6 +249,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 7e64607354b..f60bd9e4073 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -81,6 +81,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -254,6 +256,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -2115,6 +2129,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-8-arm64-pkg-download-tests: @@ -2574,6 +2589,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit windows-2022-nsis-amd64-pkg-download-tests: @@ -2610,6 +2626,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit publish-pypi: diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 1f1f49d1f86..96a1b5b6e42 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -90,6 +90,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -267,6 +269,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 92b123eeafe..ad651fcfaae 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -95,6 +95,8 @@ permissions: outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -131,6 +133,18 @@ permissions: run: | tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Set Cache Seed Output id: set-cache-seed run: | diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index cb62f445c7a..ac826f6e9fe 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -56,6 +56,7 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit <%- endfor %> @@ -89,6 +90,7 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit <%- endfor %> @@ -123,6 +125,7 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit <%- endfor %> <%- endfor %> diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index f1475a1df69..ee67c4d4020 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -27,6 +27,10 @@ on: required: true type: string description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release package-name: required: false type: string @@ -216,6 +220,7 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ @@ -232,6 +237,7 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index c5825a29d5b..ec985efbcee 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -27,6 +27,10 @@ on: required: true type: string description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release python-version: required: false type: string @@ -194,6 +198,7 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs @@ -209,6 +214,7 @@ jobs: SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" SALT_RELEASE: "${{ inputs.salt-version }}" SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index 4c253410647..10d4462e451 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -31,6 +31,10 @@ on: required: true type: string description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release package-name: required: false type: string @@ -220,6 +224,7 @@ jobs: INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -237,6 +242,7 @@ jobs: INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 271abe123d0..136325c94a7 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -95,9 +95,23 @@ def get_salt_release(): return salt_release +def get_repo_subpath_params(): + current_release = packaging.version.parse(get_salt_release()) + params = ["minor", current_release.major] + latest_env_var = os.environ.get("LATEST_SALT_RELEASE") + if latest_env_var is not None: + latest_release = packaging.version.parse(get_salt_release()) + if current_release >= latest_release: + log.debug( + f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}" + ) + params.append("latest") + return params + + @pytest.fixture( scope="module", - params=["latest", "minor", packaging.version.parse(get_salt_release()).major], + params=[get_repo_subpath_params()], ) def repo_subpath(request): return request.param diff --git a/tools/ci.py b/tools/ci.py index a3904b81693..444fc544312 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -645,3 +645,29 @@ def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"matrix={json.dumps(matrix)}\n") ctx.exit(0) + + +@ci.command( + name="get-releases", + arguments={ + "repository": { + "help": "The repository to query for releases, e.g. saltstack/salt", + }, + }, +) +def get_releases(ctx: Context, repository: str = "saltstack/salt"): + """ + Generate the latest salt release. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + + if github_output is None: + ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") + else: + releases = tools.utils.get_salt_releases(ctx, repository) + latest = releases[-1] + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"latest-release={latest}\n") + wfh.write(f"releases={json.dumps(releases)}\n") + ctx.exit(0) diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 7c27776385f..d781cf3c8ff 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -23,7 +23,7 @@ from ptscripts import Context, command_group import tools.pkg import tools.utils -from tools.utils import Version +from tools.utils import Version, get_salt_releases try: import boto3 @@ -1302,7 +1302,7 @@ def github( with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") - releases = _get_salt_releases(ctx, repository) + releases = get_salt_releases(ctx, repository) if Version(salt_version) >= releases[-1]: make_latest = True else: @@ -1343,7 +1343,7 @@ def confirm_unreleased( """ Confirm that the passed version is not yet tagged and/or released. """ - releases = _get_salt_releases(ctx, repository) + releases = get_salt_releases(ctx, repository) if Version(salt_version) in releases: ctx.error(f"There's already a '{salt_version}' tag or github release.") ctx.exit(1) @@ -1402,57 +1402,6 @@ def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack ctx.exit(0) -def _get_salt_releases(ctx: Context, repository: str) -> list[Version]: - """ - Return a list of salt versions - """ - versions = set() - with ctx.web as web: - headers = { - "Accept": "application/vnd.github+json", - } - if "GITHUB_TOKEN" in os.environ: - headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}" - web.headers.update(headers) - ret = web.get(f"https://api.github.com/repos/{repository}/tags") - if ret.status_code != 200: - ctx.error( - f"Failed to get the tags for repository {repository!r}: {ret.reason}" - ) - ctx.exit(1) - for tag in ret.json(): - name = tag["name"] - if name.startswith("v"): - name = name[1:] - if "-" in name: - # We're not going to parse dash tags - continue - if "docs" in name: - # We're not going to consider doc tags - continue - versions.add(Version(name)) - - # Now let's go through the github releases - ret = web.get(f"https://api.github.com/repos/{repository}/releases") - if ret.status_code != 200: - ctx.error( - f"Failed to get the releases for repository {repository!r}: {ret.reason}" - ) - ctx.exit(1) - for release in ret.json(): - name = release["name"] - if name.startswith("v"): - name = name[1:] - if name and "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - name = release["tag_name"] - if "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - return sorted(versions) - - def _get_repo_detailed_file_list( bucket_name: str, bucket_folder: str = "", diff --git a/tools/utils.py b/tools/utils.py index bf4bfb4d1a5..cb4379c61e0 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -118,3 +118,54 @@ class Version(packaging.version.Version): def __hash__(self): return hash(str(self)) + + +def get_salt_releases(ctx: Context, repository: str) -> list[Version]: + """ + Return a list of salt versions + """ + versions = set() + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + } + if "GITHUB_TOKEN" in os.environ: + headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}" + web.headers.update(headers) + ret = web.get(f"https://api.github.com/repos/{repository}/tags") + if ret.status_code != 200: + ctx.error( + f"Failed to get the tags for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for tag in ret.json(): + name = tag["name"] + if name.startswith("v"): + name = name[1:] + if "-" in name: + # We're not going to parse dash tags + continue + if "docs" in name: + # We're not going to consider doc tags + continue + versions.add(Version(name)) + + # Now let's go through the github releases + ret = web.get(f"https://api.github.com/repos/{repository}/releases") + if ret.status_code != 200: + ctx.error( + f"Failed to get the releases for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for release in ret.json(): + name = release["name"] + if name.startswith("v"): + name = name[1:] + if name and "-" not in name and "docs" not in name: + # We're not going to parse dash or docs releases + versions.add(Version(name)) + name = release["tag_name"] + if "-" not in name and "docs" not in name: + # We're not going to parse dash or docs releases + versions.add(Version(name)) + return sorted(versions) From 759dbf2fe6503976eebcb949ab5143fcc92f220f Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 25 Apr 2023 13:56:37 -0400 Subject: [PATCH 071/121] Convert the versions to strings before dumping them as json --- tools/ci.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 444fc544312..ba7a7c2f849 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -665,9 +665,10 @@ def get_releases(ctx: Context, repository: str = "saltstack/salt"): ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") else: releases = tools.utils.get_salt_releases(ctx, repository) - latest = releases[-1] + str_releases = [str(version) for version in releases] + latest = str_releases[-1] with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"latest-release={latest}\n") - wfh.write(f"releases={json.dumps(releases)}\n") + wfh.write(f"releases={json.dumps(str_releases)}\n") ctx.exit(0) From 8008417de9f99008e5b41970beb145bac6bb3627 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 26 Apr 2023 09:23:14 -0400 Subject: [PATCH 072/121] `get_repo_subpath_params` already returns a list --- pkg/tests/download/test_pkg_download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 136325c94a7..21bf1856172 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -111,7 +111,7 @@ def get_repo_subpath_params(): @pytest.fixture( scope="module", - params=[get_repo_subpath_params()], + params=get_repo_subpath_params(), ) def repo_subpath(request): return request.param From 9c349a8bc9951b8baa5885d191c1c02529bb98ae Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 26 Apr 2023 11:36:10 -0400 Subject: [PATCH 073/121] get the latest release from the environment variable we propogate --- pkg/tests/download/test_pkg_download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 21bf1856172..c7c768c8503 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -100,7 +100,7 @@ def get_repo_subpath_params(): params = ["minor", current_release.major] latest_env_var = os.environ.get("LATEST_SALT_RELEASE") if latest_env_var is not None: - latest_release = packaging.version.parse(get_salt_release()) + latest_release = packaging.version.parse(latest_env_var) if current_release >= latest_release: log.debug( f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}" From 88bcab29ca1936b0022c1abf79586c5cd260d647 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 1 May 2023 17:28:25 -0400 Subject: [PATCH 074/121] Run pre-commit after the rebase --- .github/workflows/release.yml | 30 ++++++++++++++++++++++++++++++ .github/workflows/staging.yml | 27 +++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6ab8145e2bc..4480f999007 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -247,6 +247,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-8-arm64-pkg-download-tests: @@ -265,6 +266,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-9-pkg-download-tests: @@ -283,6 +285,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-9-arm64-pkg-download-tests: @@ -301,6 +304,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit amazonlinux-2-pkg-download-tests: @@ -319,6 +323,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit amazonlinux-2-arm64-pkg-download-tests: @@ -337,6 +342,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centos-7-pkg-download-tests: @@ -355,6 +361,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centos-7-arm64-pkg-download-tests: @@ -373,6 +380,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-8-pkg-download-tests: @@ -391,6 +399,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-8-arm64-pkg-download-tests: @@ -409,6 +418,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-9-pkg-download-tests: @@ -427,6 +437,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-9-arm64-pkg-download-tests: @@ -445,6 +456,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-10-pkg-download-tests: @@ -463,6 +475,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-11-pkg-download-tests: @@ -481,6 +494,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-11-arm64-pkg-download-tests: @@ -499,6 +513,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-36-pkg-download-tests: @@ -517,6 +532,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-36-arm64-pkg-download-tests: @@ -535,6 +551,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-37-pkg-download-tests: @@ -553,6 +570,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-37-arm64-pkg-download-tests: @@ -571,6 +589,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-38-pkg-download-tests: @@ -589,6 +608,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-38-arm64-pkg-download-tests: @@ -607,6 +627,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit photonos-3-pkg-download-tests: @@ -625,6 +646,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit photonos-4-pkg-download-tests: @@ -643,6 +665,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2004-pkg-download-tests: @@ -661,6 +684,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2004-arm64-pkg-download-tests: @@ -679,6 +703,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2204-pkg-download-tests: @@ -697,6 +722,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2204-arm64-pkg-download-tests: @@ -715,6 +741,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit macos-12-pkg-download-tests: @@ -733,6 +760,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit windows-2022-nsis-amd64-pkg-download-tests: @@ -752,6 +780,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit windows-2022-msi-amd64-pkg-download-tests: @@ -771,6 +800,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit release: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index f60bd9e4073..6e52371c4de 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2147,6 +2147,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-9-pkg-download-tests: @@ -2164,6 +2165,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit almalinux-9-arm64-pkg-download-tests: @@ -2181,6 +2183,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit amazonlinux-2-pkg-download-tests: @@ -2198,6 +2201,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit amazonlinux-2-arm64-pkg-download-tests: @@ -2215,6 +2219,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centos-7-pkg-download-tests: @@ -2232,6 +2237,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centos-7-arm64-pkg-download-tests: @@ -2249,6 +2255,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-8-pkg-download-tests: @@ -2266,6 +2273,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-8-arm64-pkg-download-tests: @@ -2283,6 +2291,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-9-pkg-download-tests: @@ -2300,6 +2309,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit centosstream-9-arm64-pkg-download-tests: @@ -2317,6 +2327,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-10-pkg-download-tests: @@ -2334,6 +2345,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-11-pkg-download-tests: @@ -2351,6 +2363,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit debian-11-arm64-pkg-download-tests: @@ -2368,6 +2381,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-36-pkg-download-tests: @@ -2385,6 +2399,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-36-arm64-pkg-download-tests: @@ -2402,6 +2417,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-37-pkg-download-tests: @@ -2419,6 +2435,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-37-arm64-pkg-download-tests: @@ -2436,6 +2453,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-38-pkg-download-tests: @@ -2453,6 +2471,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit fedora-38-arm64-pkg-download-tests: @@ -2470,6 +2489,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit photonos-3-pkg-download-tests: @@ -2487,6 +2507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit photonos-4-pkg-download-tests: @@ -2504,6 +2525,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2004-pkg-download-tests: @@ -2521,6 +2543,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2004-arm64-pkg-download-tests: @@ -2538,6 +2561,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2204-pkg-download-tests: @@ -2555,6 +2579,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit ubuntu-2204-arm64-pkg-download-tests: @@ -2572,6 +2597,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit macos-12-pkg-download-tests: @@ -2608,6 +2634,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit windows-2022-msi-amd64-pkg-download-tests: From 12d41db5d2a7f45afa9c55106bb158e71f56e362 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 2 May 2023 12:02:16 -0400 Subject: [PATCH 075/121] Turn `setup_windows` into a context manager so we can uninstall the MSI packages after the tests are done --- pkg/tests/download/test_pkg_download.py | 216 +++++++++++++----------- 1 file changed, 115 insertions(+), 101 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index c7c768c8503..5bb0e3a96a4 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -1,6 +1,7 @@ """ Test Salt Pkg Downloads """ +import contextlib import logging import os import pathlib @@ -135,81 +136,84 @@ def _setup_system( ): downloads_path = tmp_path_factory.mktemp("downloads") try: + # Windows is a special case, because sometimes we need to uninstall the packages if grains["os_family"] == "Windows": - setup_windows( + with setup_windows( shell, root_url=root_url, salt_release=salt_release, downloads_path=downloads_path, repo_subpath=repo_subpath, - ) - elif grains["os_family"] == "MacOS": - setup_macos( - shell, - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - repo_subpath=repo_subpath, - ) - elif grains["os"] == "Amazon": - setup_redhat_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - repo_subpath=repo_subpath, - ) - elif grains["os"] == "Fedora": - setup_redhat_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - repo_subpath=repo_subpath, - ) - elif grains["os"] == "VMware Photon OS": - setup_redhat_family( - shell, - os_name="photon", - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - repo_subpath=repo_subpath, - ) - elif grains["os_family"] == "RedHat": - setup_redhat_family( - shell, - os_name="redhat", - os_version=grains["osmajorrelease"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - repo_subpath=repo_subpath, - ) - elif grains["os_family"] == "Debian": - setup_debian_family( - shell, - os_name=grains["os"].lower(), - os_version=grains["osrelease"], - os_codename=grains["oscodename"], - root_url=root_url, - salt_release=salt_release, - downloads_path=downloads_path, - gpg_key_name=gpg_key_name, - repo_subpath=repo_subpath, - ) + ): + yield else: - pytest.fail("Don't know how to handle %s", grains["osfinger"]) - yield + if grains["os_family"] == "MacOS": + setup_macos( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "Amazon": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "Fedora": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "VMware Photon OS": + setup_redhat_family( + shell, + os_name="photon", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "RedHat": + setup_redhat_family( + shell, + os_name="redhat", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "Debian": + setup_debian_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osrelease"], + os_codename=grains["oscodename"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + else: + pytest.fail("Don't know how to handle %s", grains["osfinger"]) + yield finally: shutil.rmtree(downloads_path, ignore_errors=True) @@ -371,44 +375,54 @@ def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath): assert ret.returncode == 0, ret +@contextlib.contextmanager def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath): - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + try: + root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - arch = os.environ.get("SALT_REPO_ARCH") or "amd64" - install_type = os.environ.get("INSTALL_TYPE") or "msi" - if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + arch = os.environ.get("SALT_REPO_ARCH") or "amd64" + install_type = os.environ.get("INSTALL_TYPE") or "msi" + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + if install_type.lower() == "nsis": + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + else: + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" + if repo_subpath == "minor": + win_pkg_url = ( + f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + ) + else: + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" + ssm_bin = root_dir / "ssm.exe" + else: + win_pkg = f"salt-{salt_release}-windows-{arch}.exe" + win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" + ssm_bin = root_dir / "bin" / "ssm_bin" + + pkg_path = downloads_path / win_pkg + + pytest.helpers.download_file(win_pkg_url, pkg_path) if install_type.lower() == "nsis": - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) else: - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" - if repo_subpath == "minor": - win_pkg_url = f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" - else: - win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" - ssm_bin = root_dir / "ssm.exe" - else: - win_pkg = f"salt-{salt_release}-windows-{arch}.exe" - win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" - ssm_bin = root_dir / "bin" / "ssm_bin" + ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""') + assert ret.returncode == 0, ret - pkg_path = downloads_path / win_pkg - - pytest.helpers.download_file(win_pkg_url, pkg_path) - if install_type.lower() == "nsis": - ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) - else: - ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""') - assert ret.returncode == 0, ret - - log.debug("Removing installed salt-minion service") - ret = shell.run( - "cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False - ) - assert ret.returncode == 0, ret + log.debug("Removing installed salt-minion service") + ret = shell.run( + "cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False + ) + assert ret.returncode == 0, ret + yield + finally: + # We need to uninstall the MSI packages, otherwise they will not install correctly + if install_type.lower() == "msi": + ret = shell.run("msiexec", "/qn", "/x", str(pkg_path)) + assert ret.returncode == 0, ret @pytest.fixture(scope="module") From 481c8ed63219196f347eab2a17413d3c65c7d4f0 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 2 May 2023 15:51:03 -0400 Subject: [PATCH 076/121] Revert "TO REVERT: Allow testing staging runs from hotfix/3006.x/run-pkg-download-tests-for-all" This reverts commit b0d7cc0dd55251e651c7ae4aa023677f394c4124. --- .github/workflows/release.yml | 4 ++-- .github/workflows/staging.yml | 4 ++-- .github/workflows/templates/layout.yml.jinja | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4480f999007..0dd338bdd8a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -47,9 +47,9 @@ jobs: - name: Check Branch run: | echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"]'), github.ref_name) }}" != "true" ]; then + if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x, hotfix/3006.x/run-pkg-download-tests-for-all" + echo "Allowed branches: master, 3006.x" exit 1 else echo "Allowed to release from branch ${{ github.ref_name }}" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 6e52371c4de..9c6b7696f5b 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -60,9 +60,9 @@ jobs: - name: Check Branch run: | echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"]'), github.ref_name) }}" != "true" ]; then + if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x, hotfix/3006.x/run-pkg-download-tests-for-all" + echo "Allowed branches: master, 3006.x" exit 1 else echo "Allowed to release from branch ${{ github.ref_name }}" diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 96a1b5b6e42..4e0fa686e3e 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -9,7 +9,7 @@ <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x", "hotfix/3006.x/run-pkg-download-tests-for-all"] %> +<%- set release_branches = ["master", "3006.x"] %> --- <%- block name %> name: <{ workflow_name }> From bae0321af9236601cd40a2850d794173546dcc97 Mon Sep 17 00:00:00 2001 From: Charles McMarrow Date: Tue, 2 May 2023 20:18:48 -0500 Subject: [PATCH 077/121] [3006.x] saltutil.refresh_grains(clean_pillar_cache=False) (#64176) * flacky jail most have label * add clean_pillar_cache arg * add docs * Revert "flacky jail most have label" This reverts commit 0888b84fd0bd5e7ed678599633f60aa32dc6b63e. * add clean pillar cache refresh util * dont clear unless refresh_pillar * tests * changelong --- changelog/64081.fixed.md | 1 + conf/master | 1 + conf/suse/master | 1 + doc/ref/configuration/master.rst | 1 + pkg/common/conf/master | 1 + salt/modules/saltutil.py | 41 ++++++++++++--- tests/pytests/unit/modules/test_saltutil.py | 58 ++++++++++++++++++++- 7 files changed, 95 insertions(+), 9 deletions(-) create mode 100644 changelog/64081.fixed.md diff --git a/changelog/64081.fixed.md b/changelog/64081.fixed.md new file mode 100644 index 00000000000..ed0720ac9a3 --- /dev/null +++ b/changelog/64081.fixed.md @@ -0,0 +1 @@ +Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. diff --git a/conf/master b/conf/master index f542051d762..2c0a5c9cb87 100644 --- a/conf/master +++ b/conf/master @@ -1025,6 +1025,7 @@ # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/conf/suse/master b/conf/suse/master index 7168441dc41..863d8790240 100644 --- a/conf/suse/master +++ b/conf/suse/master @@ -950,6 +950,7 @@ syndic_user: salt # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index a6022c94ee1..74d4b58b084 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -5013,6 +5013,7 @@ Default: ``3600`` If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .. conf_master:: pillar_cache_backend diff --git a/pkg/common/conf/master b/pkg/common/conf/master index fcad1961c10..4f0fa646d49 100644 --- a/pkg/common/conf/master +++ b/pkg/common/conf/master @@ -1025,6 +1025,7 @@ user: salt # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/salt/modules/saltutil.py b/salt/modules/saltutil.py index a692c3f34d4..ecf467046aa 100644 --- a/salt/modules/saltutil.py +++ b/salt/modules/saltutil.py @@ -381,6 +381,9 @@ def refresh_grains(**kwargs): refresh_pillar : True Set to ``False`` to keep pillar data from being refreshed. + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -389,6 +392,7 @@ def refresh_grains(**kwargs): """ kwargs = salt.utils.args.clean_kwargs(**kwargs) _refresh_pillar = kwargs.pop("refresh_pillar", True) + clean_pillar_cache = kwargs.pop("clean_pillar_cache", False) if kwargs: salt.utils.args.invalid_kwargs(kwargs) # Modules and pillar need to be refreshed in case grains changes affected @@ -396,14 +400,18 @@ def refresh_grains(**kwargs): # newly-reloaded grains to each execution module's __grains__ dunder. if _refresh_pillar: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) else: refresh_modules() return True def sync_grains( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 0.10.0 @@ -430,6 +438,9 @@ def sync_grains( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -441,7 +452,7 @@ def sync_grains( ret = _sync("grains", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -915,7 +926,11 @@ def sync_log_handlers( def sync_pillar( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 2015.8.11,2016.3.2 @@ -935,6 +950,9 @@ def sync_pillar( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + .. note:: This function will raise an error if executed on a traditional (i.e. not masterless) minion @@ -953,7 +971,7 @@ def sync_pillar( ret = _sync("pillar", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -998,7 +1016,13 @@ def sync_executors( return ret -def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None): +def sync_all( + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, +): """ .. versionchanged:: 2015.8.11,2016.3.2 On masterless minions, pillar modules are now synced, and refreshed @@ -1036,6 +1060,9 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist extmod_blacklist : None dictionary of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -1080,7 +1107,7 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist ret["pillar"] = sync_pillar(saltenv, False, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index 889543c9454..97527d3dc24 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -2,13 +2,13 @@ import pytest import salt.modules.saltutil as saltutil from salt.client import LocalClient -from tests.support.mock import create_autospec +from tests.support.mock import create_autospec, patch from tests.support.mock import sentinel as s @pytest.fixture def configure_loader_modules(): - return {saltutil: {}} + return {saltutil: {"__opts__": {"file_client": "local"}}} def test_exec_kwargs(): @@ -82,3 +82,57 @@ def test_exec_kwargs(): **{"subset": s.subset, "batch": s.batch} ) client.cmd_batch.assert_called_with(batch=s.batch, **_cmd_expected_kwargs) + + +def test_refresh_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_refresh_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_pillar_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_pillar_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_all_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_all_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) From fc54cafc48fa9444080353cd46fe27ecea779e96 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Mon, 10 Apr 2023 12:21:13 -0500 Subject: [PATCH 078/121] remove dead line (cherry picked from commit 9a6ab55c3bd26c496efec51e269b866bf0273324) --- salt/matchers/compound_match.py | 1 - salt/matchers/nodegroup_match.py | 1 - salt/modules/match.py | 1 - 3 files changed, 3 deletions(-) diff --git a/salt/matchers/compound_match.py b/salt/matchers/compound_match.py index 538d2f92a37..2bce58f117a 100644 --- a/salt/matchers/compound_match.py +++ b/salt/matchers/compound_match.py @@ -22,7 +22,6 @@ def _load_matchers(opts): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(opts) diff --git a/salt/matchers/nodegroup_match.py b/salt/matchers/nodegroup_match.py index 1ce621510fb..c2b57dc612f 100644 --- a/salt/matchers/nodegroup_match.py +++ b/salt/matchers/nodegroup_match.py @@ -14,7 +14,6 @@ def _load_matchers(opts): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(opts) diff --git a/salt/modules/match.py b/salt/modules/match.py index a6775a4916d..7c7f6d933ea 100644 --- a/salt/modules/match.py +++ b/salt/modules/match.py @@ -22,7 +22,6 @@ def _load_matchers(): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(__opts__) From 87074deea08fd9178a953956084dd82d16237699 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 May 2023 17:26:51 +0100 Subject: [PATCH 079/121] Stop using the deprecated `salt.transport.client` imports. Fixes #64186 Signed-off-by: Pedro Algarvio --- changelog/64186.fixed.md | 1 + salt/minion.py | 8 +++----- salt/transport/client.py | 2 -- salt/transport/ipc.py | 1 - salt/transport/local.py | 2 +- salt/transport/tcp.py | 2 -- .../functional/transport/server/test_req_channel.py | 2 -- 7 files changed, 5 insertions(+), 13 deletions(-) create mode 100644 changelog/64186.fixed.md diff --git a/changelog/64186.fixed.md b/changelog/64186.fixed.md new file mode 100644 index 00000000000..64c2c27f8a7 --- /dev/null +++ b/changelog/64186.fixed.md @@ -0,0 +1 @@ +Stop using the deprecated `salt.transport.client` imports. diff --git a/salt/minion.py b/salt/minion.py index 6237fcc4b7f..3a7c26366fc 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -1363,7 +1363,7 @@ class Minion(MinionBase): ) # a long-running req channel - self.req_channel = salt.transport.client.AsyncReqChannel.factory( + self.req_channel = salt.channel.client.AsyncReqChannel.factory( self.opts, io_loop=self.io_loop ) @@ -2817,10 +2817,8 @@ class Minion(MinionBase): self.opts["master"], ) - self.req_channel = ( - salt.transport.client.AsyncReqChannel.factory( - self.opts, io_loop=self.io_loop - ) + self.req_channel = salt.channel.client.AsyncReqChannel.factory( + self.opts, io_loop=self.io_loop ) # put the current schedule into the new loaders diff --git a/salt/transport/client.py b/salt/transport/client.py index 7ffc97fe8e7..bd79ac357b4 100644 --- a/salt/transport/client.py +++ b/salt/transport/client.py @@ -13,8 +13,6 @@ from salt.utils.versions import warn_until log = logging.getLogger(__name__) -# XXX: Add depreication warnings to start using salt.channel.client - class ReqChannel: """ diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py index ca13a498e3e..453afaaad78 100644 --- a/salt/transport/ipc.py +++ b/salt/transport/ipc.py @@ -13,7 +13,6 @@ import salt.ext.tornado.concurrent import salt.ext.tornado.gen import salt.ext.tornado.ioloop import salt.ext.tornado.netutil -import salt.transport.client import salt.transport.frame import salt.utils.msgpack from salt.ext.tornado.ioloop import IOLoop diff --git a/salt/transport/local.py b/salt/transport/local.py index 49fb1e0b588..e0a22b78cb1 100644 --- a/salt/transport/local.py +++ b/salt/transport/local.py @@ -1,7 +1,7 @@ import logging import salt.utils.files -from salt.transport.client import ReqChannel +from salt.channel.client import ReqChannel log = logging.getLogger(__name__) diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 6a9e1138940..ddde882e764 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -25,10 +25,8 @@ import salt.ext.tornado.tcpclient import salt.ext.tornado.tcpserver import salt.master import salt.payload -import salt.transport.client import salt.transport.frame import salt.transport.ipc -import salt.transport.server import salt.utils.asynchronous import salt.utils.files import salt.utils.msgpack diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index 4a74802a0d0..46a3b2fe0e5 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -11,8 +11,6 @@ import salt.config import salt.exceptions import salt.ext.tornado.gen import salt.master -import salt.transport.client -import salt.transport.server import salt.utils.platform import salt.utils.process import salt.utils.stringutils From e4c5ce36926aa5e7ed2dce6104125a9dbc6beeb6 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Tue, 2 May 2023 15:56:55 -0600 Subject: [PATCH 080/121] Add test to ensure ssm.exe present --- pkg/tests/integration/test_ssm.py | 16 ++++++++++++++++ pkg/tests/support/helpers.py | 3 +++ 2 files changed, 19 insertions(+) create mode 100644 pkg/tests/integration/test_ssm.py diff --git a/pkg/tests/integration/test_ssm.py b/pkg/tests/integration/test_ssm.py new file mode 100644 index 00000000000..1296581115f --- /dev/null +++ b/pkg/tests/integration/test_ssm.py @@ -0,0 +1,16 @@ +import os + +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +def test_ssm_present(install_salt): + """ + The ssm.exe binary needs to be present in both the zip and the exe/msi + builds + """ + ssm_path = os.path.join(*install_salt.binary_paths["ssm"]) + assert os.path.exists(ssm_path) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 57b6ccd4d00..b465bbe3df4 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -380,6 +380,7 @@ class SaltPkgInstall: "minion": ["salt-minion.exe"], "pip": ["salt-pip.exe"], "python": [python_bin], + "ssm": [self.ssm_bin], } else: if os.path.exists(self.install_dir / "bin" / "salt"): @@ -412,6 +413,7 @@ class SaltPkgInstall: "minion": [str(self.run_root), "minion"], "pip": [str(self.run_root), "pip"], "python": [python_bin], + "ssm": [self.ssm_bin], } else: self.binary_paths = { @@ -439,6 +441,7 @@ class SaltPkgInstall: "minion": [self.install_dir / "salt-minion.exe"], "pip": [self.install_dir / "salt-pip.exe"], "python": [python_bin], + "ssm": [self.ssm_bin], } else: self.binary_paths = { From ba29a27ad78228c15319bb7ab81dcc815e6c8b9d Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 May 2023 07:52:39 +0100 Subject: [PATCH 081/121] Fix test Signed-off-by: Pedro Algarvio --- pkg/tests/integration/test_ssm.py | 3 +-- pkg/tests/support/helpers.py | 3 --- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/pkg/tests/integration/test_ssm.py b/pkg/tests/integration/test_ssm.py index 1296581115f..059766caf17 100644 --- a/pkg/tests/integration/test_ssm.py +++ b/pkg/tests/integration/test_ssm.py @@ -12,5 +12,4 @@ def test_ssm_present(install_salt): The ssm.exe binary needs to be present in both the zip and the exe/msi builds """ - ssm_path = os.path.join(*install_salt.binary_paths["ssm"]) - assert os.path.exists(ssm_path) + assert os.path.exists(install_salt.ssm_bin) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index b465bbe3df4..57b6ccd4d00 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -380,7 +380,6 @@ class SaltPkgInstall: "minion": ["salt-minion.exe"], "pip": ["salt-pip.exe"], "python": [python_bin], - "ssm": [self.ssm_bin], } else: if os.path.exists(self.install_dir / "bin" / "salt"): @@ -413,7 +412,6 @@ class SaltPkgInstall: "minion": [str(self.run_root), "minion"], "pip": [str(self.run_root), "pip"], "python": [python_bin], - "ssm": [self.ssm_bin], } else: self.binary_paths = { @@ -441,7 +439,6 @@ class SaltPkgInstall: "minion": [self.install_dir / "salt-minion.exe"], "pip": [self.install_dir / "salt-pip.exe"], "python": [python_bin], - "ssm": [self.ssm_bin], } else: self.binary_paths = { From b7fbec8158de5a973affb200022849d2c7e78434 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 26 Apr 2023 14:17:36 -0700 Subject: [PATCH 082/121] Move salt user creation to common package Move the salt user creation to the common package shared all other salt packages. --- pkg/debian/salt-common.postinst | 40 ++++++++++++++++++++ pkg/debian/salt-master.postinst | 12 ++++-- pkg/rpm/salt.spec | 50 +++++++++++++++++++++---- pkg/tests/integration/test_salt_user.py | 32 ++++++++++++++++ 4 files changed, 123 insertions(+), 11 deletions(-) create mode 100644 pkg/debian/salt-common.postinst diff --git a/pkg/debian/salt-common.postinst b/pkg/debian/salt-common.postinst new file mode 100644 index 00000000000..9623fdece6b --- /dev/null +++ b/pkg/debian/salt-common.postinst @@ -0,0 +1,40 @@ +case "$1" in + install|upgrade) + [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt + [ -z "$SALT_USER" ] && SALT_USER=salt + [ -z "$SALT_NAME" ] && SALT_NAME="Salt" + [ -z "$SALT_GROUP" ] && SALT_GROUP=salt + + # create user to avoid running server as root + # 1. create group if not existing + if ! getent group | grep -q "^$SALT_GROUP:" ; then + echo -n "Adding group $SALT_GROUP.." + addgroup --quiet --system $SALT_GROUP 2>/dev/null ||true + echo "..done" + fi + # 2. create homedir if not existing + test -d $SALT_HOME || mkdir $SALT_HOME + # 3. create user if not existing + if ! getent passwd | grep -q "^$SALT_USER:"; then + echo -n "Adding system user $SALT_USER.." + adduser --quiet \ + --system \ + --ingroup $SALT_GROUP \ + --no-create-home \ + --disabled-password \ + $SALT_USER 2>/dev/null || true + echo "..done" + fi + # 4. adjust passwd entry + usermod -c "$SALT_NAME" \ + -d $SALT_HOME \ + -g $SALT_GROUP \ + $SALT_USER + # 5. adjust file and directory permissions + if ! dpkg-statoverride --list $SALT_HOME >/dev/null + then + chown -R $SALT_USER:$SALT_GROUP $SALT_HOME + chmod u=rwx,g=rxs,o= $SALT_HOME + fi + ;; +esac diff --git a/pkg/debian/salt-master.postinst b/pkg/debian/salt-master.postinst index 6ac58f198f9..986ad1f8e1a 100644 --- a/pkg/debian/salt-master.postinst +++ b/pkg/debian/salt-master.postinst @@ -1,3 +1,9 @@ -adduser --system salt --group -chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt -if command -v systemctl; then systemctl enable salt-master; fi +case "$1" in + install) + if command -v systemctl; then systemctl enable salt-master; fi + chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt + ;; + upgrade) + chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt + ;; +esac diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 0df3ec2e774..2239b791c46 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -14,6 +14,10 @@ %global __requires_exclude_from ^.*\\.so.*$ %define _source_payload w2.gzdio %define _binary_payload w2.gzdio +%define _SALT_GROUP salt +%define _SALT_USER salt +%define _SALT_NAME Salt +%define _SALT_HOME /opt/saltstack/salt # Disable python bytecompile for MANY reasons %global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') @@ -278,8 +282,6 @@ rm -rf %{buildroot} %dir %{_sysconfdir}/salt/pki - - %files master %defattr(-,root,root) %doc %{_mandir}/man7/salt.7* @@ -311,6 +313,7 @@ rm -rf %{buildroot} %dir %attr(0750, salt, salt) %{_var}/cache/salt/master/syndics/ %dir %attr(0750, salt, salt) %{_var}/cache/salt/master/tokens/ + %files minion %defattr(-,root,root) %doc %{_mandir}/man1/salt-call.1* @@ -327,17 +330,20 @@ rm -rf %{buildroot} %dir %{_sysconfdir}/salt/minion.d %dir %attr(0750, root, root) %{_var}/cache/salt/minion/ + %files syndic %doc %{_mandir}/man1/salt-syndic.1* %{_bindir}/salt-syndic %{_unitdir}/salt-syndic.service + %files api %defattr(-,root,root) %doc %{_mandir}/man1/salt-api.1* %{_bindir}/salt-api %{_unitdir}/salt-api.service + %files cloud %doc %{_mandir}/man1/salt-cloud.1* %{_bindir}/salt-cloud @@ -348,36 +354,64 @@ rm -rf %{buildroot} %{_sysconfdir}/salt/cloud.providers.d %config(noreplace) %{_sysconfdir}/salt/cloud + %files ssh %doc %{_mandir}/man1/salt-ssh.1* %{_bindir}/salt-ssh %config(noreplace) %{_sysconfdir}/salt/roster -# Add salt user/group for Salt Master -%pre master -getent group salt >/dev/null || groupadd -r salt -getent passwd salt >/dev/null || \ - useradd -r -g salt -s /sbin/nologin \ - -c "Salt user for Salt Master" salt + +%pre +# create user to avoid running server as root +# 1. create group if not existing +if ! getent group | grep -q "^%{_SALT_GROUP}:" ; then + addgroup --quiet --system %{_SALT_GROUP} 2>/dev/null ||true +fi +# 2. create homedir if not existing +test -d %{_SALT_HOME} || mkdir %{_SALT_HOME} +# 3. create user if not existing +if ! getent passwd | grep -q "^%{_SALT_USER}:"; then + adduser --quiet \ + --system \ + --ingroup %{_SALT_USER} \ + --no-create-home \ + --disabled-password \ + -s /sbin/nlogin \ + %{_SALT_USER} 2>/dev/null || true +fi +# 4. adjust passwd entry +usermod -c "%{_SALT_NAME}" \ + -d %{_SALT_HOME} \ + -g %{_SALT_GROUP} \ + %{_SALT_USER} +# 5. adjust file and directory permissions +chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} +chmod u=rwx,g=rxs,o= %{_SALT_HOME} + # assumes systemd for RHEL 7 & 8 & 9 %preun master # RHEL 9 is giving warning msg if syndic is not installed, supress it %systemd_preun salt-syndic.service > /dev/null 2>&1 + %preun minion %systemd_preun salt-minion.service + %preun api %systemd_preun salt-api.service + %post ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip + %post cloud ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud + %post master %systemd_post salt-master.service ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 4e7ddfda0a0..ff18e3b4bdc 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -1,3 +1,5 @@ +import subprocess + import psutil import pytest import yaml @@ -20,3 +22,33 @@ def test_salt_user_master(salt_master, install_salt): match = True assert match + + +def test_salt_user_home(install_salt): + """ + Test the correct user is running the Salt Master + """ + proc = subprocess.run(["getent", "salt"], check=False, capture=True) + assert proc.exitcode() == 0 + home = "" + try: + home = proc.stdout.decode().split(":")[5] + except: + pass + assert home == "/opt/saltstack/salt" + + +def test_salt_user_group(install_salt): + """ + Test the salt user is the salt group + """ + proc = subprocess.run(["id", "salt"], check=False, capture=True) + assert proc.exitcode() == 0 + in_group = False + try: + for group in proc.stdout.decode().split(" "): + if group == "salt": + in_group = True + except: + pass + assert in_group is True From 9a3ce4630c5aa48b235e9c3b6470674ab8c947a9 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 29 Apr 2023 01:17:22 -0700 Subject: [PATCH 083/121] Fix warts in setting up user in spec file --- pkg/debian/salt-common.postinst | 2 +- pkg/rpm/salt.spec | 16 +++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/pkg/debian/salt-common.postinst b/pkg/debian/salt-common.postinst index 9623fdece6b..4e945f2e0c5 100644 --- a/pkg/debian/salt-common.postinst +++ b/pkg/debian/salt-common.postinst @@ -13,7 +13,7 @@ case "$1" in echo "..done" fi # 2. create homedir if not existing - test -d $SALT_HOME || mkdir $SALT_HOME + test -d $SALT_HOME || mkdir -p $SALT_HOME # 3. create user if not existing if ! getent passwd | grep -q "^$SALT_USER:"; then echo -n "Adding system user $SALT_USER.." diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 2239b791c46..9f547735285 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -364,19 +364,18 @@ rm -rf %{buildroot} %pre # create user to avoid running server as root # 1. create group if not existing -if ! getent group | grep -q "^%{_SALT_GROUP}:" ; then - addgroup --quiet --system %{_SALT_GROUP} 2>/dev/null ||true +if ! getent group %{_SALT_GROUP}; then + groupadd --system %{_SALT_GROUP} 2>/dev/null ||true fi # 2. create homedir if not existing -test -d %{_SALT_HOME} || mkdir %{_SALT_HOME} +test -d %{_SALT_HOME} || mkdir -p %{_SALT_HOME} # 3. create user if not existing +# -g %{_SALT_GROUP} \ if ! getent passwd | grep -q "^%{_SALT_USER}:"; then - adduser --quiet \ - --system \ - --ingroup %{_SALT_USER} \ + adduser --system \ --no-create-home \ - --disabled-password \ -s /sbin/nlogin \ + -g %{_SALT_GROUP} \ %{_SALT_USER} 2>/dev/null || true fi # 4. adjust passwd entry @@ -386,8 +385,6 @@ usermod -c "%{_SALT_NAME}" \ %{_SALT_USER} # 5. adjust file and directory permissions chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} -chmod u=rwx,g=rxs,o= %{_SALT_HOME} - # assumes systemd for RHEL 7 & 8 & 9 %preun master @@ -404,6 +401,7 @@ chmod u=rwx,g=rxs,o= %{_SALT_HOME} %post +chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip From fb6906f9d2c9646bffa404e56c71309ba338644d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 29 Apr 2023 13:20:08 -0700 Subject: [PATCH 084/121] use preinst for salt user --- pkg/debian/{salt-common.postinst => salt-common.preinst} | 2 +- pkg/debian/salt-master.postinst | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) rename pkg/debian/{salt-common.postinst => salt-common.preinst} (96%) diff --git a/pkg/debian/salt-common.postinst b/pkg/debian/salt-common.preinst similarity index 96% rename from pkg/debian/salt-common.postinst rename to pkg/debian/salt-common.preinst index 4e945f2e0c5..dbfe7fbfd37 100644 --- a/pkg/debian/salt-common.postinst +++ b/pkg/debian/salt-common.preinst @@ -34,7 +34,7 @@ case "$1" in if ! dpkg-statoverride --list $SALT_HOME >/dev/null then chown -R $SALT_USER:$SALT_GROUP $SALT_HOME - chmod u=rwx,g=rxs,o= $SALT_HOME + chmod u=rwx,g=rwx,o= $SALT_HOME fi ;; esac diff --git a/pkg/debian/salt-master.postinst b/pkg/debian/salt-master.postinst index 986ad1f8e1a..1c78ee73478 100644 --- a/pkg/debian/salt-master.postinst +++ b/pkg/debian/salt-master.postinst @@ -1,9 +1,6 @@ case "$1" in - install) + configure) + chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt if command -v systemctl; then systemctl enable salt-master; fi - chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt - ;; - upgrade) - chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt ;; esac From 6c66467269148428e17ee9a68953c78c89cd6dad Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 30 Apr 2023 16:23:08 -0700 Subject: [PATCH 085/121] Leave salt dir world read/execute --- pkg/debian/salt-common.preinst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst index dbfe7fbfd37..9a86b042238 100644 --- a/pkg/debian/salt-common.preinst +++ b/pkg/debian/salt-common.preinst @@ -34,7 +34,8 @@ case "$1" in if ! dpkg-statoverride --list $SALT_HOME >/dev/null then chown -R $SALT_USER:$SALT_GROUP $SALT_HOME - chmod u=rwx,g=rwx,o= $SALT_HOME + # Tests fail when we remove world execute + # chmod u=rwx,g=rwx,o= $SALT_HOME fi ;; esac From e00031c5e52e44e9a2fee35eb0a08c3c5ba85ec6 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 30 Apr 2023 16:25:03 -0700 Subject: [PATCH 086/121] Salt home has world read and execute perms --- pkg/debian/salt-common.preinst | 3 +-- pkg/rpm/salt.spec | 1 + pkg/tests/integration/test_salt_user.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst index 9a86b042238..dddca49c7bf 100644 --- a/pkg/debian/salt-common.preinst +++ b/pkg/debian/salt-common.preinst @@ -34,8 +34,7 @@ case "$1" in if ! dpkg-statoverride --list $SALT_HOME >/dev/null then chown -R $SALT_USER:$SALT_GROUP $SALT_HOME - # Tests fail when we remove world execute - # chmod u=rwx,g=rwx,o= $SALT_HOME + chmod u=rwx,g=rwx,o=rx $SALT_HOME fi ;; esac diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 9f547735285..e6caa4c3e99 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -402,6 +402,7 @@ chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} %post chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} +chmod u=rwx,g=rwx,o=rx %{_SALT_HOME} ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index ff18e3b4bdc..374004178f3 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -28,8 +28,8 @@ def test_salt_user_home(install_salt): """ Test the correct user is running the Salt Master """ - proc = subprocess.run(["getent", "salt"], check=False, capture=True) - assert proc.exitcode() == 0 + proc = subprocess.run(["getent", "salt"], check=False, capture_output=True) + assert proc.returncode == 0 home = "" try: home = proc.stdout.decode().split(":")[5] @@ -42,8 +42,8 @@ def test_salt_user_group(install_salt): """ Test the salt user is the salt group """ - proc = subprocess.run(["id", "salt"], check=False, capture=True) - assert proc.exitcode() == 0 + proc = subprocess.run(["id", "salt"], check=False, capture_output=True) + assert proc.returncode == 0 in_group = False try: for group in proc.stdout.decode().split(" "): From 6b599b0e12aa4e18dab9978dbfc89d0c1f978a7a Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 1 May 2023 14:14:02 -0700 Subject: [PATCH 087/121] salt user in rpm --- pkg/rpm/salt.spec | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index e6caa4c3e99..3d470cc314f 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -148,8 +148,13 @@ cd $RPM_BUILD_DIR # the /bin directory find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; - $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux + $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt + + # Generate master and minion configs + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master + sed 's/#group: root/group: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion + %else # The relenv onedir is being provided, all setup up until Salt is installed # is expected to be done @@ -159,6 +164,10 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + # Generate master and minion configs + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master + sed 's/#group: root/group: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion + cd $RPM_BUILD_DIR %endif @@ -215,8 +224,8 @@ install -m 0755 %{buildroot}/opt/saltstack/salt/spm %{buildroot}%{_bindir}/spm install -m 0755 %{buildroot}/opt/saltstack/salt/salt-pip %{buildroot}%{_bindir}/salt-pip # Add the config files -install -p -m 0640 %{_salt_src}/conf/minion %{buildroot}%{_sysconfdir}/salt/minion -install -p -m 0640 %{_salt_src}/pkg/common/conf/master %{buildroot}%{_sysconfdir}/salt/master +install -p -m 0640 $RPM_BUILD_DIR/build/minion %{buildroot}%{_sysconfdir}/salt/minion +install -p -m 0640 $RPM_BUILD_DIR/build/master %{buildroot}%{_sysconfdir}/salt/master install -p -m 0640 %{_salt_src}/conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud install -p -m 0640 %{_salt_src}/conf/roster %{buildroot}%{_sysconfdir}/salt/roster install -p -m 0640 %{_salt_src}/conf/proxy %{buildroot}%{_sysconfdir}/salt/proxy From 64a0bddd13b5ead11849e3fb000fdb01d3b0f1a3 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 1 May 2023 17:29:56 -0700 Subject: [PATCH 088/121] Generate master and minion configs --- pkg/debian/rules | 9 +++++++++ pkg/debian/salt-master.install | 1 - pkg/debian/salt-minion.install | 1 - pkg/rpm/salt.spec | 4 ++-- 4 files changed, 11 insertions(+), 4 deletions(-) diff --git a/pkg/debian/rules b/pkg/debian/rules index ebc6bdff52d..1a7556ab699 100755 --- a/pkg/debian/rules +++ b/pkg/debian/rules @@ -29,6 +29,7 @@ override_dh_auto_build: build/onedir/venv/bin/tools pkg build salt-onedir . --package-name build/onedir/salt --platform linux build/onedir/venv/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt + else override_dh_auto_build: # The relenv onedir is being provided, all setup up until Salt is installed @@ -38,6 +39,7 @@ override_dh_auto_build: # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + endif # dh_auto_install tries to invoke distutils causing failures. @@ -47,4 +49,11 @@ override_dh_auto_install: override_dh_install: mkdir -p debian/salt-common/opt/saltstack cp -R build/onedir/salt debian/salt-common/opt/saltstack/ + + # Generate master and minion configs + mkdir -p debian/salt-master/etc/salt + sed 's/#user: root/user: salt/g' conf/master > debian/salt-master/etc/salt/master + mkdir -p debian/salt-minion/etc/salt + sed 's/#user: root/#user: root\ngroup: salt/g' conf/minion > debian/salt-minion/etc/salt/minion + dh_install diff --git a/pkg/debian/salt-master.install b/pkg/debian/salt-master.install index 35ea3571d08..3d665d5b164 100644 --- a/pkg/debian/salt-master.install +++ b/pkg/debian/salt-master.install @@ -1,2 +1 @@ -pkg/common/conf/master /etc/salt pkg/common/salt-master.service /lib/systemd/system diff --git a/pkg/debian/salt-minion.install b/pkg/debian/salt-minion.install index d7a23a423bd..3132ad7d128 100644 --- a/pkg/debian/salt-minion.install +++ b/pkg/debian/salt-minion.install @@ -1,4 +1,3 @@ -conf/minion /etc/salt conf/proxy /etc/salt pkg/common/salt-minion.service /lib/systemd/system pkg/common/salt-proxy@.service /lib/systemd/system diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 3d470cc314f..769ecd7180e 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -153,7 +153,7 @@ cd $RPM_BUILD_DIR # Generate master and minion configs sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master - sed 's/#group: root/group: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion + sed 's/#group: root/#user: root\ngroup: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion %else # The relenv onedir is being provided, all setup up until Salt is installed @@ -166,7 +166,7 @@ cd $RPM_BUILD_DIR # Generate master and minion configs sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master - sed 's/#group: root/group: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion + sed 's/#group: root/#user: root\ngroup: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion cd $RPM_BUILD_DIR %endif From 28d87c1333b8510457dc63dd754503ad11f2f9d4 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 1 May 2023 20:00:55 -0700 Subject: [PATCH 089/121] Add chahgelogs for #64141 and #64158 --- changelog/64141.fixed.md | 1 + changelog/64158.fixed.md | 1 + pkg/debian/salt-common.preinst | 6 +++--- pkg/rpm/salt.spec | 4 ++-- pkg/tests/integration/test_salt_user.py | 6 ++++-- 5 files changed, 11 insertions(+), 7 deletions(-) create mode 100644 changelog/64141.fixed.md create mode 100644 changelog/64158.fixed.md diff --git a/changelog/64141.fixed.md b/changelog/64141.fixed.md new file mode 100644 index 00000000000..62c3e8f90c1 --- /dev/null +++ b/changelog/64141.fixed.md @@ -0,0 +1 @@ +Make salt user's home /opt/saltstack/salt diff --git a/changelog/64158.fixed.md b/changelog/64158.fixed.md new file mode 100644 index 00000000000..a31abbfe023 --- /dev/null +++ b/changelog/64158.fixed.md @@ -0,0 +1 @@ +Salt minion runs with salt group permissions diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst index dddca49c7bf..967060bc0ee 100644 --- a/pkg/debian/salt-common.preinst +++ b/pkg/debian/salt-common.preinst @@ -17,11 +17,11 @@ case "$1" in # 3. create user if not existing if ! getent passwd | grep -q "^$SALT_USER:"; then echo -n "Adding system user $SALT_USER.." - adduser --quiet \ + useradd --quiet \ --system \ - --ingroup $SALT_GROUP \ --no-create-home \ - --disabled-password \ + -s /sbin/nologin + -g $SALT_GROUP \ $SALT_USER 2>/dev/null || true echo "..done" fi diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 769ecd7180e..ce32d47ed66 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -381,9 +381,9 @@ test -d %{_SALT_HOME} || mkdir -p %{_SALT_HOME} # 3. create user if not existing # -g %{_SALT_GROUP} \ if ! getent passwd | grep -q "^%{_SALT_USER}:"; then - adduser --system \ + useradd --system \ --no-create-home \ - -s /sbin/nlogin \ + -s /sbin/nologin \ -g %{_SALT_GROUP} \ %{_SALT_USER} 2>/dev/null || true fi diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 374004178f3..4c8d1af664d 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -28,7 +28,9 @@ def test_salt_user_home(install_salt): """ Test the correct user is running the Salt Master """ - proc = subprocess.run(["getent", "salt"], check=False, capture_output=True) + proc = subprocess.run( + ["getent", "passwd", "salt"], check=False, capture_output=True + ) assert proc.returncode == 0 home = "" try: @@ -47,7 +49,7 @@ def test_salt_user_group(install_salt): in_group = False try: for group in proc.stdout.decode().split(" "): - if group == "salt": + if "salt" in group: in_group = True except: pass From e36aa3cfcb4dfbfa6b229e32e92dd1cd199ee5d6 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 00:33:16 -0700 Subject: [PATCH 090/121] Skip all salt user tests on mac --- pkg/debian/rules | 4 +--- pkg/debian/salt-common.preinst | 5 ++--- pkg/debian/salt-minion.install | 1 + pkg/rpm/salt.spec | 8 +++----- pkg/tests/integration/test_salt_user.py | 3 +-- 5 files changed, 8 insertions(+), 13 deletions(-) diff --git a/pkg/debian/rules b/pkg/debian/rules index 1a7556ab699..a73b38b4041 100755 --- a/pkg/debian/rules +++ b/pkg/debian/rules @@ -50,10 +50,8 @@ override_dh_install: mkdir -p debian/salt-common/opt/saltstack cp -R build/onedir/salt debian/salt-common/opt/saltstack/ - # Generate master and minion configs + # Generate master config mkdir -p debian/salt-master/etc/salt sed 's/#user: root/user: salt/g' conf/master > debian/salt-master/etc/salt/master - mkdir -p debian/salt-minion/etc/salt - sed 's/#user: root/#user: root\ngroup: salt/g' conf/minion > debian/salt-minion/etc/salt/minion dh_install diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst index 967060bc0ee..48816330357 100644 --- a/pkg/debian/salt-common.preinst +++ b/pkg/debian/salt-common.preinst @@ -17,10 +17,9 @@ case "$1" in # 3. create user if not existing if ! getent passwd | grep -q "^$SALT_USER:"; then echo -n "Adding system user $SALT_USER.." - useradd --quiet \ - --system \ + useradd --system \ --no-create-home \ - -s /sbin/nologin + -s /sbin/nologin \ -g $SALT_GROUP \ $SALT_USER 2>/dev/null || true echo "..done" diff --git a/pkg/debian/salt-minion.install b/pkg/debian/salt-minion.install index 3132ad7d128..d7a23a423bd 100644 --- a/pkg/debian/salt-minion.install +++ b/pkg/debian/salt-minion.install @@ -1,3 +1,4 @@ +conf/minion /etc/salt conf/proxy /etc/salt pkg/common/salt-minion.service /lib/systemd/system pkg/common/salt-proxy@.service /lib/systemd/system diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index ce32d47ed66..0c769cd369b 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -151,9 +151,8 @@ cd $RPM_BUILD_DIR $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt - # Generate master and minion configs + # Generate master config sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master - sed 's/#group: root/#user: root\ngroup: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion %else # The relenv onedir is being provided, all setup up until Salt is installed @@ -164,9 +163,8 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; - # Generate master and minion configs + # Generate master config sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master - sed 's/#group: root/#user: root\ngroup: salt/g' %{_salt_src}/conf/minion > $RPM_BUILD_DIR/build/minion cd $RPM_BUILD_DIR %endif @@ -224,7 +222,7 @@ install -m 0755 %{buildroot}/opt/saltstack/salt/spm %{buildroot}%{_bindir}/spm install -m 0755 %{buildroot}/opt/saltstack/salt/salt-pip %{buildroot}%{_bindir}/salt-pip # Add the config files -install -p -m 0640 $RPM_BUILD_DIR/build/minion %{buildroot}%{_sysconfdir}/salt/minion +install -p -m 0640 %{_salt_src}/conf/minion %{buildroot}%{_sysconfdir}/salt/minion install -p -m 0640 $RPM_BUILD_DIR/build/master %{buildroot}%{_sysconfdir}/salt/master install -p -m 0640 %{_salt_src}/conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud install -p -m 0640 %{_salt_src}/conf/roster %{buildroot}%{_sysconfdir}/salt/roster diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 4c8d1af664d..7e87f68f5c1 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -7,6 +7,7 @@ from pytestskipmarkers.utils import platform pytestmark = [ pytest.mark.skip_on_windows, + pytest.mark.skip_on_darwin, ] @@ -14,8 +15,6 @@ def test_salt_user_master(salt_master, install_salt): """ Test the correct user is running the Salt Master """ - if platform.is_windows() or platform.is_darwin(): - pytest.skip("Package does not have user set. Not testing user") match = False for proc in psutil.Process(salt_master.pid).children(): assert proc.username() == "salt" From 3365a55c56f0e462562169629e749caa13d718ee Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 10:59:27 -0700 Subject: [PATCH 091/121] Update changelog entry --- changelog/64158.fixed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/64158.fixed.md b/changelog/64158.fixed.md index a31abbfe023..e34fc72b4d8 100644 --- a/changelog/64158.fixed.md +++ b/changelog/64158.fixed.md @@ -1 +1 @@ -Salt minion runs with salt group permissions +Move salt user and group creation to common package From c4ee9ee9670f1c57f248b1e47f3b4ddf6c8f5087 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 00:04:00 -0700 Subject: [PATCH 092/121] Fix perms on could deployment directories --- pkg/debian/salt-cloud.postinst | 5 +++++ pkg/rpm/salt.spec | 2 ++ pkg/tests/integration/test_salt_user.py | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+) create mode 100644 pkg/debian/salt-cloud.postinst diff --git a/pkg/debian/salt-cloud.postinst b/pkg/debian/salt-cloud.postinst new file mode 100644 index 00000000000..12a955b9349 --- /dev/null +++ b/pkg/debian/salt-cloud.postinst @@ -0,0 +1,5 @@ +case "$1" in + configure) + chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy + ;; +esac diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 0c769cd369b..3cf777f20bc 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -415,6 +415,8 @@ ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip %post cloud +chown -R salt:salt /etc/salt/cloud.deploy.d +chown -R salt:salt /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 7e87f68f5c1..c8e7afa6d9c 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -1,3 +1,4 @@ +import pathlib import subprocess import psutil @@ -53,3 +54,20 @@ def test_salt_user_group(install_salt): except: pass assert in_group is True + + +def test_salt_cloud_dirs(install_salt): + """ + Test the correct user is running the Salt Master + """ + if platform.is_windows() or platform.is_darwin(): + pytest.skip("Package does not have user set. Not testing user") + paths = [ + "/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy", + "/etc/salt/cloud.deploy.d", + ] + for name in paths: + path = pathlib.Path(name) + assert path.exists() + assert path.owner() == "salt" + assert path.group() == "salt" From 58b6f8c36914a03a7730edf14af9176a290f6f94 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 00:06:07 -0700 Subject: [PATCH 093/121] Add changelog for #64204 --- changelog/64204.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/64204.fixed.md diff --git a/changelog/64204.fixed.md b/changelog/64204.fixed.md new file mode 100644 index 00000000000..bc979379c9d --- /dev/null +++ b/changelog/64204.fixed.md @@ -0,0 +1 @@ +Cloud deployment directories are owned by salt user and group From 8e4907a057599e1a9aabb940c5c90277ebee1e24 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 00:35:26 -0700 Subject: [PATCH 094/121] Skip salt user tests on macos --- pkg/tests/integration/test_salt_user.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index c8e7afa6d9c..d1c8d504fa0 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -60,8 +60,6 @@ def test_salt_cloud_dirs(install_salt): """ Test the correct user is running the Salt Master """ - if platform.is_windows() or platform.is_darwin(): - pytest.skip("Package does not have user set. Not testing user") paths = [ "/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy", "/etc/salt/cloud.deploy.d", From 66e085740cba30e6a5bf0f14421dd3a784f7e2c9 Mon Sep 17 00:00:00 2001 From: Twangboy Date: Wed, 3 May 2023 14:38:37 -0600 Subject: [PATCH 095/121] make lgpo_reg enforce registry changes --- changelog/64222.fixed.md | 1 + salt/states/win_lgpo_reg.py | 143 +++++-- salt/utils/win_reg.py | 1 + .../pytests/unit/states/test_win_lgpo_reg.py | 373 ++++++++++++++---- tests/pytests/unit/utils/test_win_reg.py | 3 + 5 files changed, 400 insertions(+), 121 deletions(-) create mode 100644 changelog/64222.fixed.md diff --git a/changelog/64222.fixed.md b/changelog/64222.fixed.md new file mode 100644 index 00000000000..ce7b5a54d8c --- /dev/null +++ b/changelog/64222.fixed.md @@ -0,0 +1 @@ +``lgpo_reg`` state now enforces and reports changes to the registry diff --git a/salt/states/win_lgpo_reg.py b/salt/states/win_lgpo_reg.py index 23120c6fe04..8377817a198 100644 --- a/salt/states/win_lgpo_reg.py +++ b/salt/states/win_lgpo_reg.py @@ -72,6 +72,27 @@ def __virtual__(): return __virtualname__ +def _get_current(key, name, policy_class): + """ + Helper function to get the current state of the policy + """ + hive = "HKLM" + if policy_class == "User": + hive = "HKCU" + pol = __salt__["lgpo_reg.get_value"]( + key=key, v_name=name, policy_class=policy_class + ) + reg_raw = __utils__["reg.read_value"](hive=hive, key=key, vname=name) + + reg = {} + if reg_raw["vdata"] is not None: + reg["data"] = reg_raw["vdata"] + if reg_raw["vtype"] is not None: + reg["type"] = reg_raw["vtype"] + + return {"pol": pol, "reg": reg} + + def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"): r""" Ensure a registry setting is present in the Registry.pol file. @@ -128,16 +149,29 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(old["pol"].get("data", "")) == str(v_data) + and old["pol"].get("type", "") == v_type ) - if old.get("data", "") == v_data and old.get("type", "") == v_type: - ret["comment"] = "Registry.pol value already present" + reg_correct = ( + str(old["reg"].get("data", "")) == str(v_data) + and old["reg"].get("type", "") == v_type + ) + + if pol_correct and reg_correct: + ret["comment"] = "Policy value already present\nRegistry value already present" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be set" + if not pol_correct: + ret["comment"] = "Policy value will be set" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be set" ret["result"] = None return ret @@ -149,15 +183,24 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") policy_class=policy_class, ) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + new = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(new["pol"]["data"]) == str(v_data) and new["pol"]["type"] == v_type + ) + reg_correct = ( + str(new["reg"]["data"]) == str(v_data) and new["reg"]["type"] == v_type ) - if str(new["data"]) == str(v_data) and new["type"] == v_type: - ret["comment"] = "Registry.pol value has been set" + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value has been set" ret["result"] = True - else: - ret["comment"] = "Failed to set Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to set policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to set registry value" changes = salt.utils.data.recursive_diff(old, new) @@ -206,30 +249,42 @@ def value_disabled(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if old.get("data", "") == "**del.{}".format(name): - ret["comment"] = "Registry.pol value already disabled" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already disabled" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be disabled" + if not pol_correct: + ret["comment"] = "Policy value will be disabled" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be removed" ret["result"] = None return ret __salt__["lgpo_reg.disable_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) - if "**del." in str(new["data"]) and new["type"] == "REG_SZ": - ret["comment"] = "Registry.pol value disabled" + pol_correct = new["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = new["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value disabled" ret["result"] = True - else: - ret["comment"] = "Failed to disable Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to disable policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to remove registry value" changes = salt.utils.data.recursive_diff(old, new) @@ -278,32 +333,42 @@ def value_absent(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if not old: - ret["comment"] = "Registry.pol value already absent" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"] == {} + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already deleted" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be deleted" + if not pol_correct: + ret["comment"] = "Policy value will be deleted" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be deleted" ret["result"] = None return ret __salt__["lgpo_reg.delete_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) - if not new: - ret["comment"] = "Registry.pol value deleted" + pol_correct = new["pol"] == {} + reg_correct = new["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value deleted" ret["result"] = True - # We're setting this here in case new is None - new = {} - else: - ret["comment"] = "Failed to delete Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to delete policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to delete registry value" changes = salt.utils.data.recursive_diff(old, new) diff --git a/salt/utils/win_reg.py b/salt/utils/win_reg.py index cde01a9556a..74aa17b5d81 100644 --- a/salt/utils/win_reg.py +++ b/salt/utils/win_reg.py @@ -527,6 +527,7 @@ def read_value(hive, key, vname=None, use_32bit_registry=False): "key": local_key, "vname": local_vname, "vdata": None, + "vtype": None, "success": True, } diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index 6ae4ef7f84d..ea345deae23 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -29,6 +29,9 @@ def configure_loader_modules(): "lgpo_reg.disable_value": win_lgpo_reg.disable_value, "lgpo_reg.delete_value": win_lgpo_reg.delete_value, }, + "__utils__": { + "reg.read_value": salt.utils.win_reg.read_value, + }, }, file: { "__utils__": { @@ -44,6 +47,8 @@ def empty_reg_pol_mach(): reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) reg_pol_file.parent.mkdir(parents=True, exist_ok=True) reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") yield salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") @@ -56,6 +61,8 @@ def empty_reg_pol_user(): reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) reg_pol_file.parent.mkdir(parents=True, exist_ok=True) reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") yield salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") @@ -87,6 +94,27 @@ def reg_pol_mach(): }, } win_lgpo_reg.write_reg_pol(data_to_write) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) yield salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") @@ -121,6 +149,27 @@ def reg_pol_user(): }, } win_lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) yield salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") @@ -140,25 +189,76 @@ def test_machine_value_present(empty_reg_pol_mach): """ result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": { "new": { - "data": 1, - "type": "REG_DWORD", + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, }, - "old": {}, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue", "result": True, } assert result == expected +def test_machine_value_present_enforce(reg_pol_mach): + """ + Issue #64222 + Test value.present in Machine policy when the registry changes after the + state is applied. This would cause a discrepancy between the registry + setting and the value in the registry.pol file + """ + # reg_pol_mach has MyValue3 with REG_DWORD value of 0, let's set it to 1 + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata="1", + vtype="REG_DWORD", + ) + # Now the registry and Registry.pol file are out of sync + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data="0", + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "reg": { + "data": 0, + } + }, + "old": { + "reg": { + "data": 1, + } + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + def test_machine_value_present_existing_change(reg_pol_mach): """ Test value.present with existing incorrect value in Machine policy @@ -172,15 +272,27 @@ def test_machine_value_present_existing_change(reg_pol_mach): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue1", "result": True, } @@ -200,13 +312,23 @@ def test_machine_value_present_existing_change_dword(reg_pol_mach): expected = { "changes": { "new": { - "data": 1, + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, }, "old": { - "data": 0, + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue3", "result": True, } @@ -225,7 +347,7 @@ def test_machine_value_present_existing_no_change(reg_pol_mach): ) expected = { "changes": {}, - "comment": "Registry.pol value already present", + "comment": "Policy value already present\nRegistry value already present", "name": "MyValue1", "result": True, } @@ -239,13 +361,13 @@ def test_machine_value_present_test_true(empty_reg_pol_mach): with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": {}, - "comment": "Registry.pol value will be set", + "comment": "Policy value will be set\nRegistry value will be set", "name": "MyValue", "result": None, } @@ -265,15 +387,24 @@ def test_machine_value_present_existing_disabled(reg_pol_mach): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue2", "result": True, } @@ -291,12 +422,14 @@ def test_machine_value_disabled(empty_reg_pol_mach): expected = { "changes": { "new": { - "data": "**del.MyValue1", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, }, - "old": {}, + "old": {"pol": {}}, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } @@ -315,13 +448,19 @@ def test_machine_value_disabled_existing_change(reg_pol_mach): expected = { "changes": { "new": { - "data": "**del.MyValue1", + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, }, "old": { - "data": "squidward", + "pol": { + "data": "squidward", + }, + "reg": {"data": "squidward", "type": "REG_SZ"}, }, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } @@ -338,7 +477,7 @@ def test_machine_value_disabled_existing_no_change(reg_pol_mach): ) expected = { "changes": {}, - "comment": "Registry.pol value already disabled", + "comment": "Registry policy value already disabled", "name": "MyValue2", "result": True, } @@ -352,11 +491,11 @@ def test_machine_value_disabled_test_true(empty_reg_pol_mach): with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_disabled( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", ) expected = { "changes": {}, - "comment": "Registry.pol value will be disabled", + "comment": "Policy value will be disabled", "name": "MyValue", "result": None, } @@ -370,13 +509,19 @@ def test_machine_value_absent(reg_pol_mach): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}, "reg": {}}, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue1", "result": True, } @@ -390,7 +535,7 @@ def test_machine_value_absent_no_change(empty_reg_pol_mach): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value already absent", + "comment": "Registry policy value already deleted", "name": "MyValue1", "result": True, } @@ -404,13 +549,15 @@ def test_machine_value_absent_disabled(reg_pol_mach): result = lgpo_reg.value_absent(name="MyValue2", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}}, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue2", "result": True, } @@ -425,7 +572,7 @@ def test_machine_value_absent_test_true(reg_pol_mach): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value will be deleted", + "comment": "Policy value will be deleted\nRegistry value will be deleted", "name": "MyValue1", "result": None, } @@ -438,7 +585,7 @@ def test_user_value_present(empty_reg_pol_user): """ result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", policy_class="User", @@ -446,12 +593,21 @@ def test_user_value_present(empty_reg_pol_user): expected = { "changes": { "new": { - "data": 1, - "type": "REG_DWORD", + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, }, - "old": {}, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue", "result": True, } @@ -472,15 +628,27 @@ def test_user_value_present_existing_change(reg_pol_user): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue1", "result": True, } @@ -501,13 +669,23 @@ def test_user_value_present_existing_change_dword(reg_pol_user): expected = { "changes": { "new": { - "data": 1, + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, }, "old": { - "data": 0, + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue3", "result": True, } @@ -527,7 +705,7 @@ def test_user_value_present_existing_no_change(reg_pol_user): ) expected = { "changes": {}, - "comment": "Registry.pol value already present", + "comment": "Policy value already present\nRegistry value already present", "name": "MyValue1", "result": True, } @@ -541,14 +719,14 @@ def test_user_value_present_test_true(empty_reg_pol_user): with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", policy_class="User", ) expected = { "changes": {}, - "comment": "Registry.pol value will be set", + "comment": "Policy value will be set\nRegistry value will be set", "name": "MyValue", "result": None, } @@ -569,15 +747,24 @@ def test_user_value_present_existing_disabled(reg_pol_user): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue2", "result": True, } @@ -594,12 +781,14 @@ def test_user_value_disabled(empty_reg_pol_user): expected = { "changes": { "new": { - "data": "**del.MyValue1", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, }, - "old": {}, + "old": {"pol": {}}, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } @@ -619,13 +808,22 @@ def test_user_value_disabled_existing_change(reg_pol_user): expected = { "changes": { "new": { - "data": "**del.MyValue1", + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, }, "old": { - "data": "squidward", + "pol": { + "data": "squidward", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } @@ -643,7 +841,7 @@ def test_user_value_disabled_existing_no_change(reg_pol_user): ) expected = { "changes": {}, - "comment": "Registry.pol value already disabled", + "comment": "Registry policy value already disabled", "name": "MyValue2", "result": True, } @@ -657,12 +855,12 @@ def test_user_value_disabled_test_true(empty_reg_pol_user): with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_disabled( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", policy_class="User", ) expected = { "changes": {}, - "comment": "Registry.pol value will be disabled", + "comment": "Policy value will be disabled", "name": "MyValue", "result": None, } @@ -680,13 +878,22 @@ def test_user_value_absent(reg_pol_user): ) expected = { "changes": { - "new": {}, + "new": { + "pol": {}, + "reg": {}, + }, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue1", "result": True, } @@ -704,7 +911,7 @@ def test_user_value_absent_no_change(empty_reg_pol_user): ) expected = { "changes": {}, - "comment": "Registry.pol value already absent", + "comment": "Registry policy value already deleted", "name": "MyValue1", "result": True, } @@ -722,13 +929,15 @@ def test_user_value_absent_disabled(reg_pol_user): ) expected = { "changes": { - "new": {}, + "new": {"pol": {}}, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue2", "result": True, } @@ -747,7 +956,7 @@ def test_user_value_absent_test_true(reg_pol_user): ) expected = { "changes": {}, - "comment": "Registry.pol value will be deleted", + "comment": "Policy value will be deleted\nRegistry value will be deleted", "name": "MyValue1", "result": None, } diff --git a/tests/pytests/unit/utils/test_win_reg.py b/tests/pytests/unit/utils/test_win_reg.py index fa7c0186553..206c40b3089 100644 --- a/tests/pytests/unit/utils/test_win_reg.py +++ b/tests/pytests/unit/utils/test_win_reg.py @@ -338,6 +338,7 @@ def test_read_value_non_existing(): "Windows\\CurrentVersion" ), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -360,6 +361,7 @@ def test_read_value_non_existing_key(fake_key): expected = { "comment": "Cannot find key: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -375,6 +377,7 @@ def test_read_value_access_denied(fake_key): expected = { "comment": "Access is denied: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", From 88f223e1ca7867fde9533546667c2a9f8fb1feaa Mon Sep 17 00:00:00 2001 From: Twangboy Date: Wed, 3 May 2023 16:48:54 -0600 Subject: [PATCH 096/121] Fix test_reg, migrate to pytests --- tests/pytests/unit/modules/test_reg.py | 827 +++++++++++++++++++++++ tests/unit/modules/test_reg.py | 872 ------------------------- 2 files changed, 827 insertions(+), 872 deletions(-) create mode 100644 tests/pytests/unit/modules/test_reg.py delete mode 100644 tests/unit/modules/test_reg.py diff --git a/tests/pytests/unit/modules/test_reg.py b/tests/pytests/unit/modules/test_reg.py new file mode 100644 index 00000000000..480af192086 --- /dev/null +++ b/tests/pytests/unit/modules/test_reg.py @@ -0,0 +1,827 @@ +import pytest +from saltfactories.utils import random_string + +import salt.modules.reg as reg +import salt.utils.stringutils +import salt.utils.win_reg +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + +try: + import win32api + + HAS_WIN32 = True +except ImportError: + HAS_WIN32 = False + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.skipif(HAS_WIN32 is False, reason="Tests require win32 libraries"), +] + + +UNICODE_KEY = "Unicode Key \N{TRADE MARK SIGN}" +UNICODE_VALUE = ( + "Unicode Value \N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}" +) +FAKE_KEY = "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False)) + + +@pytest.fixture +def configure_loader_modules(): + return { + reg: { + "__utils__": { + "reg.delete_value": salt.utils.win_reg.delete_value, + "reg.delete_key_recursive": salt.utils.win_reg.delete_key_recursive, + "reg.key_exists": salt.utils.win_reg.key_exists, + "reg.list_keys": salt.utils.win_reg.list_keys, + "reg.list_values": salt.utils.win_reg.list_values, + "reg.read_value": salt.utils.win_reg.read_value, + "reg.set_value": salt.utils.win_reg.set_value, + "reg.value_exists": salt.utils.win_reg.value_exists, + } + } + } + + +def test_key_exists_existing(): + """ + Tests the key_exists function using a well known registry key + """ + assert reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_key_exists_non_existing(): + """ + Tests the key_exists function using a non existing registry key + """ + assert not reg.key_exists(hive="HKLM", key=FAKE_KEY) + + +def test_key_exists_invalid_hive(): + """ + Tests the key_exists function using an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.key_exists(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_key_exists_unknown_key_error(): + """ + Tests the key_exists function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_value_exists_existing(): + """ + Tests the value_exists function using a well known registry key + """ + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + assert result + + +def test_value_exists_non_existing(): + """ + Tests the value_exists function using a non existing registry key + """ + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="NonExistingValueName", + ) + assert not result + + +def test_value_exists_invalid_hive(): + """ + Tests the value_exists function using an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.value_exists( + hive="BADHIVE", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + + +def test_value_exists_key_not_exist(): + """ + Tests the value_exists function when the key does not exist + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + assert not result + + +def test_value_exists_unknown_key_error(): + """ + Tests the value_exists function with an unknown error when opening the + key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + + +def test_value_exists_empty_default_value(): + """ + Tests the value_exists function when querying the default value + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegQueryValueEx", "Empty Value") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname=None, + ) + assert result + + +def test_value_exists_no_vname(): + """ + Tests the value_exists function when the vname does not exist + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegQueryValueEx", "Empty Value") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="NonExistingValuePair", + ) + assert not result + + +def test_list_keys_existing(): + """ + Test the list_keys function using a well known registry key + """ + assert "Microsoft" in reg.list_keys(hive="HKLM", key="SOFTWARE") + + +def test_list_keys_non_existing(): + """ + Test the list_keys function using a non existing registry key + """ + expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) + result = reg.list_keys(hive="HKLM", key=FAKE_KEY) + assert result == expected + + +def test_list_keys_invalid_hive(): + """ + Test the list_keys function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.list_keys(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_list_keys_unknown_key_error(): + """ + Tests the list_keys function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.list_keys(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_list_values_existing(): + """ + Test the list_values function using a well known registry key + """ + values = reg.list_values( + hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" + ) + keys = [] + for value in values: + keys.append(value["vname"]) + assert "ProgramFilesDir" in keys + + +def test_list_values_non_existing(): + """ + Test the list_values function using a non existing registry key + """ + expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) + result = reg.list_values(hive="HKLM", key=FAKE_KEY) + assert result == expected + + +def test_list_values_invalid_hive(): + """ + Test the list_values function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.list_values(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_list_values_unknown_key_error(): + """ + Tests the list_values function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.list_values(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_read_value_existing(): + """ + Test the read_value function using a well known registry value + """ + ret = reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + assert ret["vdata"] == "%ProgramFiles%" + + +def test_read_value_default(): + """ + Test the read_value function reading the default value using a well + known registry key + """ + ret = reg.read_value( + hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" + ) + assert ret["vdata"] == "(value not set)" + + +def test_read_value_non_existing(): + """ + Test the read_value function using a non existing value pair + """ + expected = { + "comment": ( + "Cannot find fake_name in HKLM\\SOFTWARE\\Microsoft\\" + "Windows\\CurrentVersion" + ), + "vdata": None, + "vtype": None, + "vname": "fake_name", + "success": False, + "hive": "HKLM", + "key": "SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + } + result = reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="fake_name", + ) + assert result == expected + + +def test_read_value_non_existing_key(): + """ + Test the read_value function using a non existing registry key + """ + expected = { + "comment": "Cannot find key: HKLM\\{}".format(FAKE_KEY), + "vdata": None, + "vtype": None, + "vname": "fake_name", + "success": False, + "hive": "HKLM", + "key": FAKE_KEY, + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result == expected + + +def test_read_value_invalid_hive(): + """ + Test the read_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.read_value( + hive="BADHIVE", + key="SOFTWARE\\Microsoft", + vname="ProgramFilesPath", + ) + + +def test_read_value_unknown_key_error(): + """ + Tests the read_value function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + + +def test_read_value_unknown_value_error(): + """ + Tests the read_value function with an unknown value error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegQueryValueEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + with pytest.raises(win32api.error): + reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + + +@pytest.mark.destructive_test +def test_read_value_multi_sz_empty_list(): + """ + An empty REG_MULTI_SZ value should return an empty list, not None + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="empty_list", + vdata=[], + vtype="REG_MULTI_SZ", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": [], + "vname": "empty_list", + "vtype": "REG_MULTI_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="empty_list") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value(): + """ + Test the set_value function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": "fake_data", + "vname": "fake_name", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_default(): + """ + Test the set_value function on the default value + """ + try: + assert reg.set_value(hive="HKLM", key=FAKE_KEY, vdata="fake_default_data") + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": "fake_default_data", + "vname": "(Default)", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_unicode_key(): + """ + Test the set_value function on a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="fake_value", + ) + expected = { + "hive": "HKLM", + "key": "\\".join([FAKE_KEY, UNICODE_KEY]), + "success": True, + "vdata": "fake_value", + "vname": "fake_name", + "vtype": "REG_SZ", + } + result = reg.read_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + ) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_unicode_value(): + """ + Test the set_value function on a unicode value + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": UNICODE_VALUE, + "vname": "fake_unicode", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_reg_dword(): + """ + Test the set_value function on a REG_DWORD value + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="dword_value", + vdata=123, + vtype="REG_DWORD", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": 123, + "vname": "dword_value", + "vtype": "REG_DWORD", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="dword_value") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_reg_qword(): + """ + Test the set_value function on a REG_QWORD value + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="qword_value", + vdata=123, + vtype="REG_QWORD", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": 123, + "vname": "qword_value", + "vtype": "REG_QWORD", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="qword_value") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_set_value_invalid_hive(): + """ + Test the set_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.set_value( + hive="BADHIVE", + key=FAKE_KEY, + vname="fake_name", + vdata="fake_data", + ) + + +def test_set_value_open_create_failure(): + """ + Test the set_value function when there is a problem opening/creating + the key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegCreateKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegCreateKeyEx", mock_error): + result = reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + assert not result + + +def test_set_value_type_error(): + """ + Test the set_value function when the wrong type of data is passed + """ + mock_error = MagicMock(side_effect=TypeError("Mocked TypeError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +def test_set_value_system_error(): + """ + Test the set_value function when a SystemError occurs while setting the + value + """ + mock_error = MagicMock(side_effect=SystemError("Mocked SystemError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +def test_set_value_value_error(): + """ + Test the set_value function when a ValueError occurs while setting the + value + """ + mock_error = MagicMock(side_effect=ValueError("Mocked ValueError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +@pytest.mark.destructive_test +def test_delete_value(): + """ + Test the delete_value function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_value_non_existing(): + """ + Test the delete_value function on non existing value + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + result = reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result is None + + +def test_delete_value_invalid_hive(): + """ + Test the delete_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.delete_value(hive="BADHIVE", key=FAKE_KEY, vname="fake_name") + + +def test_delete_value_unknown_error(): + """ + Test the delete_value function when there is a problem opening the key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.delete_value( + hive="HKLM", + key=FAKE_KEY, + vname="fake_name", + ) + + +@pytest.mark.destructive_test +def test_delete_value_unicode(): + """ + Test the delete_value function on a unicode value + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_value_unicode_vname(): + """ + Test the delete_value function on a unicode vname + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY, vdata="junk data" + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY) + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_value_unicode_key(): + """ + Test the delete_value function on a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="junk data", + ) + assert reg.delete_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + ) + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_key_recursive_invalid_hive(): + """ + Test the delete_key_recursive function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.delete_key_recursive(hive="BADHIVE", key=FAKE_KEY) + + +def test_delete_key_recursive_key_not_found(): + """ + Test the delete_key_recursive function when the passed key to delete is + not found. + """ + assert not reg.key_exists(hive="HKLM", key=FAKE_KEY) + assert not reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_key_recursive_too_close(): + """ + Test the delete_key_recursive function when the passed key to delete is + too close to root, such as + """ + mock_true = MagicMock(return_value=True) + with patch("salt.utils.win_reg.key_exists", mock_true): + assert not reg.delete_key_recursive(hive="HKLM", key="FAKE_KEY") + + +@pytest.mark.destructive_test +def test_delete_key_recursive(): + """ + Test the delete_key_recursive function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = {"Deleted": ["\\".join(["HKLM", FAKE_KEY])], "Failed": []} + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_failed_to_open_key(): + """ + Test the delete_key_recursive function on failure to open the key + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = { + "Deleted": [], + "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Failed to connect to key"], + } + mock_true = MagicMock(return_value=True) + mock_error = MagicMock( + side_effect=[ + 1, + win32api.error(3, "RegOpenKeyEx", "Failed to connect to key"), + ] + ) + with patch("salt.utils.win_reg.key_exists", mock_true), patch( + "salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error + ): + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_failed_to_delete(): + """ + Test the delete_key_recursive function on failure to delete a key + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = { + "Deleted": [], + "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Unknown error"], + } + # pylint: disable=undefined-variable + mock_error = MagicMock(side_effect=WindowsError("Unknown error")) + # pylint: enable=undefined-variable + with patch("salt.utils.win_reg.win32api.RegDeleteKey", mock_error): + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_unicode(): + """ + Test the delete_key_recursive function on value within a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="fake_value", + ) + expected = { + "Deleted": ["\\".join(["HKLM", FAKE_KEY, UNICODE_KEY])], + "Failed": [], + } + result = reg.delete_key_recursive( + hive="HKLM", key="\\".join([FAKE_KEY, UNICODE_KEY]) + ) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) diff --git a/tests/unit/modules/test_reg.py b/tests/unit/modules/test_reg.py deleted file mode 100644 index 3afe79af223..00000000000 --- a/tests/unit/modules/test_reg.py +++ /dev/null @@ -1,872 +0,0 @@ -import pytest -from saltfactories.utils import random_string - -import salt.modules.reg as reg -import salt.utils.stringutils -import salt.utils.win_reg -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -try: - import win32api - - HAS_WIN32 = True -except ImportError: - HAS_WIN32 = False - -UNICODE_KEY = "Unicode Key \N{TRADE MARK SIGN}" -UNICODE_VALUE = ( - "Unicode Value \N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}" -) -FAKE_KEY = "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False)) - - -@pytest.mark.skipif(not HAS_WIN32, reason="Tests require win32 libraries") -class WinFunctionsTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.reg - """ - - def setup_loader_modules(self): - return { - reg: { - "__utils__": { - "reg.delete_value": salt.utils.win_reg.delete_value, - "reg.delete_key_recursive": salt.utils.win_reg.delete_key_recursive, - "reg.key_exists": salt.utils.win_reg.key_exists, - "reg.list_keys": salt.utils.win_reg.list_keys, - "reg.list_values": salt.utils.win_reg.list_values, - "reg.read_value": salt.utils.win_reg.read_value, - "reg.set_value": salt.utils.win_reg.set_value, - "reg.value_exists": salt.utils.win_reg.value_exists, - } - } - } - - def test_key_exists_existing(self): - """ - Tests the key_exists function using a well known registry key - """ - self.assertTrue(reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft")) - - def test_key_exists_non_existing(self): - """ - Tests the key_exists function using a non existing registry key - """ - self.assertFalse(reg.key_exists(hive="HKLM", key=FAKE_KEY)) - - def test_key_exists_invalid_hive(self): - """ - Tests the key_exists function using an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.key_exists, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_key_exists_unknown_key_error(self): - """ - Tests the key_exists function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.key_exists, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_value_exists_existing(self): - """ - Tests the value_exists function using a well known registry key - """ - self.assertTrue( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - ) - - def test_value_exists_non_existing(self): - """ - Tests the value_exists function using a non existing registry key - """ - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="NonExistingValueName", - ) - ) - - def test_value_exists_invalid_hive(self): - """ - Tests the value_exists function using an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.value_exists, - hive="BADHIVE", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - - def test_value_exists_key_not_exist(self): - """ - Tests the value_exists function when the key does not exist - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - ) - - def test_value_exists_unknown_key_error(self): - """ - Tests the value_exists function with an unknown error when opening the - key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.value_exists, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - - def test_value_exists_empty_default_value(self): - """ - Tests the value_exists function when querying the default value - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegQueryValueEx", "Empty Value") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertTrue( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname=None, - ) - ) - - def test_value_exists_no_vname(self): - """ - Tests the value_exists function when the vname does not exist - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegQueryValueEx", "Empty Value") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="NonExistingValuePair", - ) - ) - - def test_list_keys_existing(self): - """ - Test the list_keys function using a well known registry key - """ - self.assertIn("Microsoft", reg.list_keys(hive="HKLM", key="SOFTWARE")) - - def test_list_keys_non_existing(self): - """ - Test the list_keys function using a non existing registry key - """ - expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) - self.assertEqual(reg.list_keys(hive="HKLM", key=FAKE_KEY), expected) - - def test_list_keys_invalid_hive(self): - """ - Test the list_keys function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.list_keys, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_list_keys_unknown_key_error(self): - """ - Tests the list_keys function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.list_keys, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_list_values_existing(self): - """ - Test the list_values function using a well known registry key - """ - values = reg.list_values( - hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" - ) - keys = [] - for value in values: - keys.append(value["vname"]) - self.assertIn("ProgramFilesDir", keys) - - def test_list_values_non_existing(self): - """ - Test the list_values function using a non existing registry key - """ - expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) - self.assertEqual(reg.list_values(hive="HKLM", key=FAKE_KEY), expected) - - def test_list_values_invalid_hive(self): - """ - Test the list_values function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.list_values, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_list_values_unknown_key_error(self): - """ - Tests the list_values function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.list_values, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_read_value_existing(self): - """ - Test the read_value function using a well known registry value - """ - ret = reg.read_value( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - self.assertEqual(ret["vdata"], "%ProgramFiles%") - - def test_read_value_default(self): - """ - Test the read_value function reading the default value using a well - known registry key - """ - ret = reg.read_value( - hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" - ) - self.assertEqual(ret["vdata"], "(value not set)") - - def test_read_value_non_existing(self): - """ - Test the read_value function using a non existing value pair - """ - expected = { - "comment": ( - "Cannot find fake_name in HKLM\\SOFTWARE\\Microsoft\\" - "Windows\\CurrentVersion" - ), - "vdata": None, - "vname": "fake_name", - "success": False, - "hive": "HKLM", - "key": "SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - } - self.assertDictEqual( - reg.read_value( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="fake_name", - ), - expected, - ) - - def test_read_value_non_existing_key(self): - """ - Test the read_value function using a non existing registry key - """ - expected = { - "comment": "Cannot find key: HKLM\\{}".format(FAKE_KEY), - "vdata": None, - "vname": "fake_name", - "success": False, - "hive": "HKLM", - "key": FAKE_KEY, - } - self.assertDictEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"), expected - ) - - def test_read_value_invalid_hive(self): - """ - Test the read_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.read_value, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - vname="ProgramFilesPath", - ) - - def test_read_value_unknown_key_error(self): - """ - Tests the read_value function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.read_value, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - - def test_read_value_unknown_value_error(self): - """ - Tests the read_value function with an unknown value error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegQueryValueEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertRaises( - win32api.error, - reg.read_value, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - - @pytest.mark.destructive_test - def test_read_value_multi_sz_empty_list(self): - """ - An empty REG_MULTI_SZ value should return an empty list, not None - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="empty_list", - vdata=[], - vtype="REG_MULTI_SZ", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": [], - "vname": "empty_list", - "vtype": "REG_MULTI_SZ", - } - self.assertEqual( - reg.read_value( - hive="HKLM", - key=FAKE_KEY, - vname="empty_list", - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value(self): - """ - Test the set_value function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": "fake_data", - "vname": "fake_name", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_default(self): - """ - Test the set_value function on the default value - """ - try: - self.assertTrue( - reg.set_value(hive="HKLM", key=FAKE_KEY, vdata="fake_default_data") - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": "fake_default_data", - "vname": "(Default)", - "vtype": "REG_SZ", - } - self.assertEqual(reg.read_value(hive="HKLM", key=FAKE_KEY), expected) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_unicode_key(self): - """ - Test the set_value function on a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="fake_value", - ) - ) - expected = { - "hive": "HKLM", - "key": "\\".join([FAKE_KEY, UNICODE_KEY]), - "success": True, - "vdata": "fake_value", - "vname": "fake_name", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_unicode_value(self): - """ - Test the set_value function on a unicode value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": UNICODE_VALUE, - "vname": "fake_unicode", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode"), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_reg_dword(self): - """ - Test the set_value function on a REG_DWORD value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="dword_value", - vdata=123, - vtype="REG_DWORD", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": 123, - "vname": "dword_value", - "vtype": "REG_DWORD", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="dword_value"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_reg_qword(self): - """ - Test the set_value function on a REG_QWORD value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="qword_value", - vdata=123, - vtype="REG_QWORD", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": 123, - "vname": "qword_value", - "vtype": "REG_QWORD", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="qword_value"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_set_value_invalid_hive(self): - """ - Test the set_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.set_value, - hive="BADHIVE", - key=FAKE_KEY, - vname="fake_name", - vdata="fake_data", - ) - - def test_set_value_open_create_failure(self): - """ - Test the set_value function when there is a problem opening/creating - the key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegCreateKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegCreateKeyEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_type_error(self): - """ - Test the set_value function when the wrong type of data is passed - """ - mock_error = MagicMock(side_effect=TypeError("Mocked TypeError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_system_error(self): - """ - Test the set_value function when a SystemError occurs while setting the - value - """ - mock_error = MagicMock(side_effect=SystemError("Mocked SystemError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_value_error(self): - """ - Test the set_value function when a ValueError occurs while setting the - value - """ - mock_error = MagicMock(side_effect=ValueError("Mocked ValueError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - @pytest.mark.destructive_test - def test_delete_value(self): - """ - Test the delete_value function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_delete_value_non_existing(self): - """ - Test the delete_value function on non existing value - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertIsNone( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") - ) - - def test_delete_value_invalid_hive(self): - """ - Test the delete_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.delete_value, - hive="BADHIVE", - key=FAKE_KEY, - vname="fake_name", - ) - - def test_delete_value_unknown_error(self): - """ - Test the delete_value function when there is a problem opening the key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.delete_value, - hive="HKLM", - key=FAKE_KEY, - vname="fake_name", - ) - - @pytest.mark.destructive_test - def test_delete_value_unicode(self): - """ - Test the delete_value function on a unicode value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_value_unicode_vname(self): - """ - Test the delete_value function on a unicode vname - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY, vdata="junk data" - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY) - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_value_unicode_key(self): - """ - Test the delete_value function on a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="junk data", - ) - ) - self.assertTrue( - reg.delete_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - ) - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_delete_key_recursive_invalid_hive(self): - """ - Test the delete_key_recursive function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.delete_key_recursive, - hive="BADHIVE", - key=FAKE_KEY, - ) - - def test_delete_key_recursive_key_not_found(self): - """ - Test the delete_key_recursive function when the passed key to delete is - not found. - """ - self.assertFalse(reg.key_exists(hive="HKLM", key=FAKE_KEY)) - self.assertFalse(reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)) - - def test_delete_key_recursive_too_close(self): - """ - Test the delete_key_recursive function when the passed key to delete is - too close to root, such as - """ - mock_true = MagicMock(return_value=True) - with patch("salt.utils.win_reg.key_exists", mock_true): - self.assertFalse(reg.delete_key_recursive(hive="HKLM", key="FAKE_KEY")) - - @pytest.mark.destructive_test - def test_delete_key_recursive(self): - """ - Test the delete_key_recursive function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = {"Deleted": ["\\".join(["HKLM", FAKE_KEY])], "Failed": []} - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_failed_to_open_key(self): - """ - Test the delete_key_recursive function on failure to open the key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = { - "Deleted": [], - "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Failed to connect to key"], - } - mock_true = MagicMock(return_value=True) - mock_error = MagicMock( - side_effect=[ - 1, - win32api.error(3, "RegOpenKeyEx", "Failed to connect to key"), - ] - ) - with patch("salt.utils.win_reg.key_exists", mock_true), patch( - "salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error - ): - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_failed_to_delete(self): - """ - Test the delete_key_recursive function on failure to delete a key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = { - "Deleted": [], - "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Unknown error"], - } - # pylint: disable=undefined-variable - mock_error = MagicMock(side_effect=WindowsError("Unknown error")) - # pylint: enable=undefined-variable - with patch("salt.utils.win_reg.win32api.RegDeleteKey", mock_error): - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_unicode(self): - """ - Test the delete_key_recursive function on value within a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="fake_value", - ) - ) - expected = { - "Deleted": ["\\".join(["HKLM", FAKE_KEY, UNICODE_KEY])], - "Failed": [], - } - self.assertDictEqual( - reg.delete_key_recursive( - hive="HKLM", key="\\".join([FAKE_KEY, UNICODE_KEY]) - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) From 5867b2d1df4cc03681c95fb206b9540fbdeaf64c Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 3 May 2023 13:21:47 -0500 Subject: [PATCH 097/121] add errors and docs --- salt/states/file.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/salt/states/file.py b/salt/states/file.py index a7b9e896234..05ba021dddb 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -2414,6 +2414,8 @@ def managed( - source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz - source_hash: md5=79eef25f9b0b2c642c62b7f737d4f53f + source_hash is ignored if the file hosted is not on a HTTP or FTP server. + Known issues: If the remote server URL has the hash file as an apparent sub-directory of the source file, the module will discover that it @@ -2946,6 +2948,13 @@ def managed( "'contents_grains' is permitted", ) + if ( + source is not None + and not source.lower().startswith(("http:", "ftp:")) + and source_hash + ): + return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + # If no source is specified, set replace to False, as there is nothing # with which to replace the file. if not source and contents_count == 0 and replace: @@ -5998,6 +6007,13 @@ def blockreplace( if not name: return _error(ret, "Must provide name to file.blockreplace") + if ( + source is not None + and not source.lower().startswith(("http:", "ftp:")) + and source_hash + ): + return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + if sources is None: sources = [] if source_hashes is None: @@ -6434,6 +6450,13 @@ def append( if not name: return _error(ret, "Must provide name to file.append") + if ( + source is not None + and not source.lower().startswith(("http:", "ftp:")) + and source_hash + ): + return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + name = os.path.expanduser(name) if sources is None: @@ -6718,6 +6741,13 @@ def prepend( if not name: return _error(ret, "Must provide name to file.prepend") + if ( + source is not None + and not source.lower().startswith(("http:", "ftp:")) + and source_hash + ): + return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + if sources is None: sources = [] From f122899f2b376c3fa1f9b3394129c42bd9a1cc22 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 3 May 2023 14:53:53 -0500 Subject: [PATCH 098/121] got to log warning --- salt/states/file.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/salt/states/file.py b/salt/states/file.py index 05ba021dddb..e3384b846d6 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -2953,7 +2953,7 @@ def managed( and not source.lower().startswith(("http:", "ftp:")) and source_hash ): - return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http' or 'ftp'") # If no source is specified, set replace to False, as there is nothing # with which to replace the file. @@ -6012,7 +6012,7 @@ def blockreplace( and not source.lower().startswith(("http:", "ftp:")) and source_hash ): - return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http' or 'ftp'") if sources is None: sources = [] @@ -6455,7 +6455,7 @@ def append( and not source.lower().startswith(("http:", "ftp:")) and source_hash ): - return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http' or 'ftp'") name = os.path.expanduser(name) @@ -6746,7 +6746,7 @@ def prepend( and not source.lower().startswith(("http:", "ftp:")) and source_hash ): - return _error(ret, "source_hash can only be used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http' or 'ftp'") if sources is None: sources = [] From 370d6e4f0f4b896aabcc755bd4ddf14c61da1b44 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 3 May 2023 15:12:26 -0500 Subject: [PATCH 099/121] add tests and add support for an iter --- salt/states/file.py | 33 ++++++++----------- .../pytests/unit/states/file/test_managed.py | 28 ++++++++++++++++ 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/salt/states/file.py b/salt/states/file.py index e3384b846d6..e749e4970dd 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -334,6 +334,15 @@ __func_alias__ = { } +def _http_ftp_check(source): + """ + Check if source or sources + """ + if isinstance(source, str): + return source.lower().startswith(("http:", "ftp:")) + return all([s.lower().startswith(("http:", "ftp:")) for s in source]) + + def _get_accumulator_filepath(): """ Return accumulator data path. @@ -2948,11 +2957,7 @@ def managed( "'contents_grains' is permitted", ) - if ( - source is not None - and not source.lower().startswith(("http:", "ftp:")) - and source_hash - ): + if source is not None and not _http_ftp_check(source) and source_hash: log.warning("source_hash is only used with 'http' or 'ftp'") # If no source is specified, set replace to False, as there is nothing @@ -6007,11 +6012,7 @@ def blockreplace( if not name: return _error(ret, "Must provide name to file.blockreplace") - if ( - source is not None - and not source.lower().startswith(("http:", "ftp:")) - and source_hash - ): + if source is not None and not _http_ftp_check(source) and source_hash: log.warning("source_hash is only used with 'http' or 'ftp'") if sources is None: @@ -6450,11 +6451,7 @@ def append( if not name: return _error(ret, "Must provide name to file.append") - if ( - source is not None - and not source.lower().startswith(("http:", "ftp:")) - and source_hash - ): + if source is not None and not _http_ftp_check(source) and source_hash: log.warning("source_hash is only used with 'http' or 'ftp'") name = os.path.expanduser(name) @@ -6741,11 +6738,7 @@ def prepend( if not name: return _error(ret, "Must provide name to file.prepend") - if ( - source is not None - and not source.lower().startswith(("http:", "ftp:")) - and source_hash - ): + if source is not None and not _http_ftp_check(source) and source_hash: log.warning("source_hash is only used with 'http' or 'ftp'") if sources is None: diff --git a/tests/pytests/unit/states/file/test_managed.py b/tests/pytests/unit/states/file/test_managed.py index 0f5da2dac27..a55f7c2b0c6 100644 --- a/tests/pytests/unit/states/file/test_managed.py +++ b/tests/pytests/unit/states/file/test_managed.py @@ -405,3 +405,31 @@ def test_managed_test_mode_user_group_not_present(): ) assert ret["result"] is not False assert "is not available" not in ret["comment"] + + +def test_http_ftp_check_pass(): + assert filestate._http_ftp_check("http://@$@dead_link@$@/src.tar.gz") is True + assert filestate._http_ftp_check("ftp://@$@dead_link@$@/src.tar.gz") is True + + +def test_http_ftp_check_fail(): + assert filestate._http_ftp_check("salt://@$@dead_link@$@/src.tar.gz") is False + assert filestate._http_ftp_check("https://@$@dead_link@$@/src.tar.gz") is False + + +def test_http_ftp_check_list_pass(): + assert ( + filestate._http_ftp_check( + ["http://@$@dead_link@$@/src.tar.gz", "ftp://@$@dead_link@$@/src.tar.gz"] + ) + is True + ) + + +def test_http_ftp_check_list_fail(): + assert ( + filestate._http_ftp_check( + ["salt://@$@dead_link@$@/src.tar.gz", "https://@$@dead_link@$@/src.tar.gz"] + ) + is False + ) From a303fdded7622f0f59ea975bacd262018b529c58 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 3 May 2023 15:15:44 -0500 Subject: [PATCH 100/121] fix doc --- salt/states/file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/states/file.py b/salt/states/file.py index e749e4970dd..d663fd7aae1 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -336,7 +336,7 @@ __func_alias__ = { def _http_ftp_check(source): """ - Check if source or sources + Check if source or sources is http or ftp. """ if isinstance(source, str): return source.lower().startswith(("http:", "ftp:")) From 841f5ca18aeed3083825c7f60334cda8c8d38a96 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 3 May 2023 21:40:21 -0500 Subject: [PATCH 101/121] change log --- changelog/63810.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/63810.fixed.md diff --git a/changelog/63810.fixed.md b/changelog/63810.fixed.md new file mode 100644 index 00000000000..4701465c0ef --- /dev/null +++ b/changelog/63810.fixed.md @@ -0,0 +1 @@ +Updated source_hash documentation and add it log warning when source_hash is used with non http or ftp source. From 42d3eaa6e6ae94189f9eb222503c6a660a032461 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 07:44:03 +0100 Subject: [PATCH 102/121] HTTPS is also valid. Switch to parametrized test. Signed-off-by: Pedro Algarvio --- changelog/63810.fixed.md | 2 +- salt/states/file.py | 16 +++--- .../pytests/unit/states/file/test_managed.py | 50 +++++++++---------- 3 files changed, 33 insertions(+), 35 deletions(-) diff --git a/changelog/63810.fixed.md b/changelog/63810.fixed.md index 4701465c0ef..241907dbc00 100644 --- a/changelog/63810.fixed.md +++ b/changelog/63810.fixed.md @@ -1 +1 @@ -Updated source_hash documentation and add it log warning when source_hash is used with non http or ftp source. +Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. diff --git a/salt/states/file.py b/salt/states/file.py index d663fd7aae1..f481537d529 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -336,11 +336,11 @@ __func_alias__ = { def _http_ftp_check(source): """ - Check if source or sources is http or ftp. + Check if source or sources is http, https or ftp. """ if isinstance(source, str): - return source.lower().startswith(("http:", "ftp:")) - return all([s.lower().startswith(("http:", "ftp:")) for s in source]) + return source.lower().startswith(("http:", "https:", "ftp:")) + return any([s.lower().startswith(("http:", "https:", "ftp:")) for s in source]) def _get_accumulator_filepath(): @@ -2423,7 +2423,7 @@ def managed( - source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz - source_hash: md5=79eef25f9b0b2c642c62b7f737d4f53f - source_hash is ignored if the file hosted is not on a HTTP or FTP server. + source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. Known issues: If the remote server URL has the hash file as an apparent @@ -2958,7 +2958,7 @@ def managed( ) if source is not None and not _http_ftp_check(source) and source_hash: - log.warning("source_hash is only used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") # If no source is specified, set replace to False, as there is nothing # with which to replace the file. @@ -6013,7 +6013,7 @@ def blockreplace( return _error(ret, "Must provide name to file.blockreplace") if source is not None and not _http_ftp_check(source) and source_hash: - log.warning("source_hash is only used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") if sources is None: sources = [] @@ -6452,7 +6452,7 @@ def append( return _error(ret, "Must provide name to file.append") if source is not None and not _http_ftp_check(source) and source_hash: - log.warning("source_hash is only used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") name = os.path.expanduser(name) @@ -6739,7 +6739,7 @@ def prepend( return _error(ret, "Must provide name to file.prepend") if source is not None and not _http_ftp_check(source) and source_hash: - log.warning("source_hash is only used with 'http' or 'ftp'") + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") if sources is None: sources = [] diff --git a/tests/pytests/unit/states/file/test_managed.py b/tests/pytests/unit/states/file/test_managed.py index a55f7c2b0c6..4a826c26869 100644 --- a/tests/pytests/unit/states/file/test_managed.py +++ b/tests/pytests/unit/states/file/test_managed.py @@ -407,29 +407,27 @@ def test_managed_test_mode_user_group_not_present(): assert "is not available" not in ret["comment"] -def test_http_ftp_check_pass(): - assert filestate._http_ftp_check("http://@$@dead_link@$@/src.tar.gz") is True - assert filestate._http_ftp_check("ftp://@$@dead_link@$@/src.tar.gz") is True - - -def test_http_ftp_check_fail(): - assert filestate._http_ftp_check("salt://@$@dead_link@$@/src.tar.gz") is False - assert filestate._http_ftp_check("https://@$@dead_link@$@/src.tar.gz") is False - - -def test_http_ftp_check_list_pass(): - assert ( - filestate._http_ftp_check( - ["http://@$@dead_link@$@/src.tar.gz", "ftp://@$@dead_link@$@/src.tar.gz"] - ) - is True - ) - - -def test_http_ftp_check_list_fail(): - assert ( - filestate._http_ftp_check( - ["salt://@$@dead_link@$@/src.tar.gz", "https://@$@dead_link@$@/src.tar.gz"] - ) - is False - ) +@pytest.mark.parametrize( + "source,check_result", + [ + ("http://@$@dead_link@$@/src.tar.gz", True), + ("https://@$@dead_link@$@/src.tar.gz", True), + ("ftp://@$@dead_link@$@/src.tar.gz", True), + ("salt://@$@dead_link@$@/src.tar.gz", False), + ("file://@$@dead_link@$@/src.tar.gz", False), + ( + ["http://@$@dead_link@$@/src.tar.gz", "https://@$@dead_link@$@/src.tar.gz"], + True, + ), + ( + ["salt://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + False, + ), + ( + ["http://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + True, + ), + ], +) +def test_sources_source_hash_check(source, check_result): + assert filestate._http_ftp_check(source) is check_result From 3584921fc663e74a7b82ad34968bfaa46f4a8898 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 May 2023 22:02:12 +0100 Subject: [PATCH 103/121] Add a `.pth` to the onedir env to ensure packages in extras are importable Signed-off-by: Pedro Algarvio --- changelog/64192.fixed.md | 1 + tools/pkg/build.py | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog/64192.fixed.md diff --git a/changelog/64192.fixed.md b/changelog/64192.fixed.md new file mode 100644 index 00000000000..00f9209e1c3 --- /dev/null +++ b/changelog/64192.fixed.md @@ -0,0 +1 @@ +Add a `.pth` to the onedir env to ensure packages in extras are importable. diff --git a/tools/pkg/build.py b/tools/pkg/build.py index b3f92ef615c..119df65241e 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -538,6 +538,14 @@ def salt_onedir( shutil.rmtree(onedir_env / "etc", onerror=errfn) shutil.rmtree(onedir_env / "Library", onerror=errfn) + # TODO: Fix hardcoded 3.10 + dest_path = onedir_env / "lib" / "python3.10" / "site-packages" / "extras.pth" + ctx.info(f"Writing '{dest_path}' ...") + dest_path.write_text( + 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)); ' + "extras not in sys.path and sys.path.insert(0, extras)\n" + ) + def _check_pkg_build_files_exist(ctx: Context, **kwargs): for name, path in kwargs.items(): From a59929ad20f1fe7dca5096ca4f6bdd0b128d5209 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 May 2023 22:07:00 +0100 Subject: [PATCH 104/121] Be sure to run the installed script to confirm it works Signed-off-by: Pedro Algarvio --- pkg/tests/conftest.py | 13 +++++--- pkg/tests/integration/test_pip.py | 53 ++++++++++++------------------- 2 files changed, 29 insertions(+), 37 deletions(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 9e6ea6fad51..89f54fe2f72 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -474,12 +474,17 @@ def extras_pypath(): extras_dir = "extras-{}.{}".format(*sys.version_info) if platform.is_windows(): return pathlib.Path( - os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir, "bin" + os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir ) elif platform.is_darwin(): - return pathlib.Path(f"/opt", "salt", extras_dir, "bin") + return pathlib.Path("/opt", "salt", extras_dir) else: - return pathlib.Path(f"/opt", "saltstack", "salt", extras_dir, "bin") + return pathlib.Path("/opt", "saltstack", "salt", extras_dir) + + +@pytest.fixture(scope="module") +def extras_pypath_bin(extras_pypath): + return extras_pypath / "bin" @pytest.fixture(scope="module") @@ -487,7 +492,7 @@ def salt_api(salt_master, install_salt, extras_pypath): """ start up and configure salt_api """ - shutil.rmtree(str(extras_pypath.parent), ignore_errors=True) + shutil.rmtree(str(extras_pypath), ignore_errors=True) start_timeout = None if platform.is_windows() and install_salt.singlebin: start_timeout = 240 diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py index ea9e6a81b84..7b4ccc0d04f 100644 --- a/pkg/tests/integration/test_pip.py +++ b/pkg/tests/integration/test_pip.py @@ -13,25 +13,21 @@ def pypath(): if platform.is_windows(): return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt") elif platform.is_darwin(): - return pathlib.Path(f"{os.sep}opt", "salt", "bin") + return pathlib.Path("/opt", "salt", "bin") else: - return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") + return pathlib.Path("/opt", "saltstack", "salt", "bin") @pytest.fixture(autouse=True) -def wipe_pydeps(install_salt, extras_pypath): +def wipe_pydeps(shell, install_salt, extras_pypath): try: yield finally: # Note, uninstalling anything with an associated script will leave the script. # This is due to a bug in pip. for dep in ["pep8", "PyGithub"]: - subprocess.run( - install_salt.binary_paths["pip"] + ["uninstall", "-y", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, + shell.run( + *(install_salt.binary_paths["pip"] + ["uninstall", "-y", dep]), ) shutil.rmtree(extras_pypath, ignore_errors=True) @@ -56,32 +52,24 @@ def test_pip_install(salt_call_cli): assert "The github execution module cannot be loaded" in use_lib.stderr -def test_pip_install_extras(install_salt, extras_pypath): +def test_pip_install_extras(shell, install_salt, extras_pypath_bin): """ Test salt-pip installs into the correct directory """ dep = "pep8" - extras_keyword = "extras" + extras_keyword = "extras-3" if platform.is_windows(): - check_path = extras_pypath / f"{dep}.exe" + check_path = extras_pypath_bin / f"{dep}.exe" else: - check_path = extras_pypath / dep + check_path = extras_pypath_bin / dep - install_ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + install_ret = shell.run(*(install_salt.binary_paths["pip"] + ["install", dep])) assert install_ret.returncode == 0 - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["list", "--format=json"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + ret = shell.run(*(install_salt.binary_paths["pip"] + ["list", "--format=json"])) assert ret.returncode == 0 - pkgs_installed = json.loads(ret.stdout.strip().decode()) - for pkg in pkgs_installed: + assert ret.data # We can parse the JSON output + for pkg in ret.data: if pkg["name"] == dep: break else: @@ -89,15 +77,14 @@ def test_pip_install_extras(install_salt, extras_pypath): f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}" ) - show_ret = subprocess.run( - install_salt.binary_paths["pip"] + ["show", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + show_ret = shell.run(*(install_salt.binary_paths["pip"] + ["show", dep])) assert show_ret.returncode == 0 - assert extras_keyword in show_ret.stdout.decode() + assert extras_keyword in show_ret.stdout assert check_path.exists() + ret = shell.run(str(check_path), "--version") + assert ret.returncode == 0 + def demote(user_uid, user_gid): def result(): @@ -108,8 +95,8 @@ def demote(user_uid, user_gid): @pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") -def test_pip_non_root(install_salt, test_account, extras_pypath): - check_path = extras_pypath / "pep8" +def test_pip_non_root(shell, install_salt, test_account, extras_pypath_bin): + check_path = extras_pypath_bin / "pep8" # We should be able to issue a --help without being root ret = subprocess.run( install_salt.binary_paths["salt"] + ["--help"], From 9a94ec6d6abf3b76dbb318dfaa5a4afc2a39aa38 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 May 2023 22:07:47 +0100 Subject: [PATCH 105/121] Confirm salt extensions are discoverable by salt when `salt-pip` installed Signed-off-by: Pedro Algarvio --- pkg/tests/integration/test_pip.py | 41 +++++++++++++++++++++++ tests/pytests/functional/cli/test_salt.py | 36 ++++++++++++++++++-- 2 files changed, 75 insertions(+), 2 deletions(-) diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py index 7b4ccc0d04f..7037763064e 100644 --- a/pkg/tests/integration/test_pip.py +++ b/pkg/tests/integration/test_pip.py @@ -143,3 +143,44 @@ def test_pip_non_root(shell, install_salt, test_account, extras_pypath_bin): assert check_path.exists() assert ret.returncode == 0, ret.stderr + + +def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell): + """ + Test salt-pip installs into the correct directory and the salt extension + is properly loaded. + """ + dep = "salt-analytics-framework" + dep_version = "0.1.0" + + install_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["install", f"{dep}=={dep_version}"]), + ) + assert install_ret.returncode == 0 + + ret = shell.run( + *(install_salt.binary_paths["pip"] + ["list", "--format=json"]), + ) + assert ret.returncode == 0 + pkgs_installed = json.loads(ret.stdout.strip()) + for pkg in pkgs_installed: + if pkg["name"] == dep: + break + else: + pytest.fail( + f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}" + ) + + show_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["show", dep]), + ) + assert show_ret.returncode == 0 + + assert extras_pypath.joinpath("saf").is_dir() + + ret = shell.run( + *(install_salt.binary_paths["minion"] + ["--versions-report"]), + ) + assert show_ret.returncode == 0 + assert "Salt Extensions" in ret.stdout + assert f"{dep}: {dep_version}" in ret.stdout diff --git a/tests/pytests/functional/cli/test_salt.py b/tests/pytests/functional/cli/test_salt.py index cc7fa703859..8b9468ff068 100644 --- a/tests/pytests/functional/cli/test_salt.py +++ b/tests/pytests/functional/cli/test_salt.py @@ -1,8 +1,35 @@ +import logging import os +import shutil import pytest import salt.version +from tests.conftest import CODE_DIR + +log = logging.getLogger(__name__) + + +@pytest.fixture(autouse=True) +def _install_salt_extension(shell): + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + return + + script_name = "salt-pip" + if salt.utils.platform.is_windows(): + script_name += ".exe" + + script_path = CODE_DIR / "artifacts" / "salt" / script_name + assert script_path.exists() + try: + ret = shell.run(str(script_path), "install", "salt-analytics-framework==0.1.0") + assert ret.returncode == 0 + log.info(ret) + yield + finally: + ret = shell.run(str(script_path), "uninstall", "-y", "salt-analytics-framework") + log.info(ret) + shutil.rmtree(script_path.parent / "extras-3.10", ignore_errors=True) @pytest.mark.windows_whitelisted @@ -52,5 +79,10 @@ def test_versions_report(salt_cli): assert key in expected_keys expected_keys.remove(key) assert not expected_keys - if os.environ.get("ONEDIR_TESTRUN", "0") == "1": - assert "relenv" in ret_dict["Dependency Versions"] + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + # Stop any more testing + return + + assert "relenv" in ret_dict["Dependency Versions"] + assert "Salt Extensions" in ret_dict + assert "salt-analytics-framework" in ret_dict["Salt Extensions"] From 42a3080b1d719dfdf7ac3410b801d30d6c1c1ed9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 May 2023 07:13:23 +0100 Subject: [PATCH 106/121] Only delete existing paths on macOS onedir Signed-off-by: Pedro Algarvio --- tools/pkg/build.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 119df65241e..8c7e43ae209 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -534,9 +534,10 @@ def salt_onedir( def errfn(fn, path, err): ctx.info(f"Removing {path} failed: {err}") - shutil.rmtree(onedir_env / "opt", onerror=errfn) - shutil.rmtree(onedir_env / "etc", onerror=errfn) - shutil.rmtree(onedir_env / "Library", onerror=errfn) + for subdir in ("opt", "etc", "Library"): + path = onedir_env / subdir + if path.exists(): + shutil.rmtree(path, onerror=errfn) # TODO: Fix hardcoded 3.10 dest_path = onedir_env / "lib" / "python3.10" / "site-packages" / "extras.pth" From 5c1ac329c47a7412f0501708ceb238ce9bf22844 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 May 2023 22:28:09 +0100 Subject: [PATCH 107/121] Get `site-packages` from python Signed-off-by: Pedro Algarvio --- tools/pkg/build.py | 50 +++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 8c7e43ae209..a4a9d2f7d50 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -4,6 +4,7 @@ These commands are used to build the salt onedir and system packages. # pylint: disable=resource-leakage,broad-except from __future__ import annotations +import json import logging import os import pathlib @@ -525,10 +526,28 @@ def salt_onedir( "-CICD", env=env, ) + python_executable = str(onedir_env / "Scripts" / "python.exe") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps([pathlib.Path(p).as_posix() for p in site.getsitepackages()]))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") else: env["RELENV_PIP_DIR"] = "1" pip_bin = onedir_env / "bin" / "pip3" - ctx.run(str(pip_bin), "install", str(salt_archive), env=env) + ctx.run( + str(pip_bin), + "install", + "--no-warn-script-location", + str(salt_archive), + env=env, + ) if platform == "darwin": def errfn(fn, path, err): @@ -539,10 +558,31 @@ def salt_onedir( if path.exists(): shutil.rmtree(path, onerror=errfn) - # TODO: Fix hardcoded 3.10 - dest_path = onedir_env / "lib" / "python3.10" / "site-packages" / "extras.pth" - ctx.info(f"Writing '{dest_path}' ...") - dest_path.write_text( + python_executable = str(onedir_env / "bin" / "python3") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps(site.getsitepackages()))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") + + site_packages: str + for site_packages_path in site_packages_json: + if "site-packages" in site_packages_path: + site_packages = site_packages_path + break + else: + ctx.error("Cloud not find a site-packages path with 'site-packages' in it?!") + ctx.exit(1) + + pth_path = pathlib.Path(site_packages) / "salt-extras.pth" + ctx.info(f"Writing '{pth_path}' ...") + pth_path.write_text( 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)); ' "extras not in sys.path and sys.path.insert(0, extras)\n" ) From 310991a6d672f2ab906811f28064b780556a9370 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 May 2023 07:27:26 +0100 Subject: [PATCH 108/121] The `extras-.` is a Salt thing. Create it at the right stage. Signed-off-by: Pedro Algarvio --- tools/pkg/build.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index a4a9d2f7d50..11c54230dea 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -467,8 +467,6 @@ def onedir_dependencies( str(requirements_file), env=env, ) - extras_dir = dest / f"extras-{requirements_version}" - extras_dir.mkdir() @build.command( @@ -580,6 +578,17 @@ def salt_onedir( ctx.error("Cloud not find a site-packages path with 'site-packages' in it?!") ctx.exit(1) + ret = ctx.run( + str(python_executable), + "-c", + "import sys; print('{}.{}'.format(*sys.version_info))", + capture=True, + ) + python_version_info = ret.stdout.strip().decode() + extras_dir = onedir_env / f"extras-{python_version_info}" + ctx.info(f"Creating Salt's extras path: {extras_dir}") + extras_dir.mkdir(exist_ok=True) + pth_path = pathlib.Path(site_packages) / "salt-extras.pth" ctx.info(f"Writing '{pth_path}' ...") pth_path.write_text( From ac906c49f8eab9efc781e4edd0dfe7d6d9159fe8 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 02:38:44 -0700 Subject: [PATCH 109/121] Update relenv version to 0.12.0 --- .github/actions/setup-relenv/action.yml | 2 +- .github/workflows/build-deps-onedir.yml | 2 +- .github/workflows/build-salt-onedir.yml | 2 +- changelog/64192.fixed.md | 2 +- cicd/shared-gh-workflows-context.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index 745eb293932..08371f996ef 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.10.2 + default: 0.12.0 outputs: version: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 8a703373f8f..315f247572d 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.10.2 + default: 0.12.0 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 837c6cf30b8..64d80462492 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.10.2 + default: 0.12.0 description: The version of relenv to use python-version-linux: required: false diff --git a/changelog/64192.fixed.md b/changelog/64192.fixed.md index 00f9209e1c3..505a945aa7c 100644 --- a/changelog/64192.fixed.md +++ b/changelog/64192.fixed.md @@ -1 +1 @@ -Add a `.pth` to the onedir env to ensure packages in extras are importable. +Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.0. diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index c6e88fc0c3d..c164a06e1cd 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,4 +1,4 @@ python_version_linux: "3.10.11" python_version_macos: "3.10.11" python_version_windows: "3.10.11" -relenv_version: "0.11.2" +relenv_version: "0.12.0" From 6b168950e5a6bf89d98f35e332ea487f98b95d65 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 May 2023 11:40:24 +0100 Subject: [PATCH 110/121] Generate workflows if `cicd/shared-gh-workflows-context.yml` is modified Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 4 ++-- .github/workflows/nightly.yml | 4 ++-- .github/workflows/scheduled.yml | 4 ++-- .github/workflows/staging.yml | 4 ++-- .pre-commit-config.yaml | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19508cfcfe7..a8208c118c6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -448,7 +448,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -466,7 +466,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 12a90122d2a..3e6d5306918 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -524,7 +524,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index d23d6b50a4b..d8717cfcbe8 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -491,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -509,7 +509,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 9c6b7696f5b..88d03cd718d 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.0" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0a4a1cee40c..23240c88e00 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -63,7 +63,7 @@ repos: - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates - files: ^(tools/pre_commit\.py|.github/workflows/templates/.*)$ + files: ^(cicd/shared-gh-workflows-context\.yml|tools/pre_commit\.py|.github/workflows/templates/.*)$ pass_filenames: false args: - pre-commit From 2aad5d20f141492ff508ce683835105206d1bc7a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 May 2023 11:55:42 +0100 Subject: [PATCH 111/121] Add build time checks to ensure the right relenv version is being used Signed-off-by: Pedro Algarvio --- tools/pkg/build.py | 68 +++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 7 deletions(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 11c54230dea..4b349d64761 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -394,20 +394,47 @@ def onedir_dependencies( # We import relenv here because it is not a hard requirement for the rest of the tools commands try: - from relenv.create import create + import relenv.create except ImportError: ctx.exit(1, "Relenv not installed in the current environment.") dest = pathlib.Path(package_name).resolve() - create(dest, arch=arch, version=python_version) + relenv.create.create(dest, arch=arch, version=python_version) + + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = dest / "Scripts" + else: + env_scripts_dir = dest / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + target_relenv_version = _get_shared_constants()["relenv_version"] + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != target_relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({target_relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) env = os.environ.copy() install_args = ["-v"] if platform == "windows": - python_bin = dest / "Scripts" / "python" + python_bin = env_scripts_dir / "python" else: env["RELENV_BUILDENV"] = "1" - python_bin = dest / "bin" / "python3" + python_bin = env_scripts_dir / "python3" install_args.extend( [ "--use-pep517", @@ -502,6 +529,33 @@ def salt_onedir( onedir_env = pathlib.Path(package_name).resolve() _check_pkg_build_files_exist(ctx, onedir_env=onedir_env, salt_archive=salt_archive) + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = onedir_env / "Scripts" + else: + env_scripts_dir = onedir_env / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + target_relenv_version = _get_shared_constants()["relenv_version"] + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != target_relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({target_relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) + env = os.environ.copy() env["USE_STATIC_REQUIREMENTS"] = "1" env["RELENV_BUILDENV"] = "1" @@ -524,7 +578,7 @@ def salt_onedir( "-CICD", env=env, ) - python_executable = str(onedir_env / "Scripts" / "python.exe") + python_executable = str(env_scripts_dir / "python.exe") ret = ctx.run( python_executable, "-c", @@ -538,7 +592,7 @@ def salt_onedir( ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") else: env["RELENV_PIP_DIR"] = "1" - pip_bin = onedir_env / "bin" / "pip3" + pip_bin = env_scripts_dir / "pip3" ctx.run( str(pip_bin), "install", @@ -556,7 +610,7 @@ def salt_onedir( if path.exists(): shutil.rmtree(path, onerror=errfn) - python_executable = str(onedir_env / "bin" / "python3") + python_executable = str(env_scripts_dir / "python3") ret = ctx.run( python_executable, "-c", From ae68a09465f7b2834e259dbba239272a41f427f0 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 09:27:27 -0700 Subject: [PATCH 112/121] Remove un-needed code block --- salt/__init__.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/salt/__init__.py b/salt/__init__.py index e06b8ad7127..6649fdf5683 100644 --- a/salt/__init__.py +++ b/salt/__init__.py @@ -140,9 +140,3 @@ del __define_global_system_encoding_variable__ import salt._logging # isort:skip # pylint: enable=unused-import - - -# When we are running in a 'onedir' environment, setup the path for user -# installed packages. -if hasattr(sys, "RELENV"): - sys.path.insert(0, str(sys.RELENV / "extras-{}.{}".format(*sys.version_info))) From b660d03ef085a84c733cce5ffdc0ed336f9e4afd Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 09:28:30 -0700 Subject: [PATCH 113/121] Bump relenv to 0.12.1 --- .github/actions/setup-relenv/action.yml | 2 +- .github/workflows/build-deps-onedir.yml | 2 +- .github/workflows/build-salt-onedir.yml | 2 +- .github/workflows/ci.yml | 4 ++-- .github/workflows/nightly.yml | 4 ++-- .github/workflows/scheduled.yml | 4 ++-- .github/workflows/staging.yml | 4 ++-- cicd/shared-gh-workflows-context.yml | 2 +- 8 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index 08371f996ef..caa8b79dc54 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.12.0 + default: 0.12.1 outputs: version: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 315f247572d..ab405a156a1 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.12.0 + default: 0.12.1 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 64d80462492..9e21e903c55 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.12.0 + default: 0.12.1 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8208c118c6..bdf6dbdfbd5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -448,7 +448,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -466,7 +466,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 3e6d5306918..9ca0ed5dd76 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -524,7 +524,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index d8717cfcbe8..c1c515f890d 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -491,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -509,7 +509,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 88d03cd718d..ddc90088460 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.0" + relenv-version: "0.12.1" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index c164a06e1cd..5acd767923d 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,4 +1,4 @@ python_version_linux: "3.10.11" python_version_macos: "3.10.11" python_version_windows: "3.10.11" -relenv_version: "0.12.0" +relenv_version: "0.12.1" From 2920f01161793654b483217c5e73fa962c61ddf8 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 3 May 2023 17:30:00 -0700 Subject: [PATCH 114/121] Account for different extras location on win32 --- tools/pkg/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 4b349d64761..609ddfa40fa 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -646,7 +646,8 @@ def salt_onedir( pth_path = pathlib.Path(site_packages) / "salt-extras.pth" ctx.info(f"Writing '{pth_path}' ...") pth_path.write_text( - 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)); ' + 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)) ' + 'if sys.platform != "win32" else str(pathlib.Path(__file__).parent.parent / "extras-{}.{}".format(*sys.version_info))} ; ' "extras not in sys.path and sys.path.insert(0, extras)\n" ) From f871c09e7bbc2bea23c758247e0f39466e764801 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 07:14:28 +0100 Subject: [PATCH 115/121] Fix typo Signed-off-by: Pedro Algarvio --- tools/pkg/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 609ddfa40fa..43d46655ee3 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -647,7 +647,7 @@ def salt_onedir( ctx.info(f"Writing '{pth_path}' ...") pth_path.write_text( 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)) ' - 'if sys.platform != "win32" else str(pathlib.Path(__file__).parent.parent / "extras-{}.{}".format(*sys.version_info))} ; ' + 'if sys.platform != "win32" else str(pathlib.Path(__file__).parent.parent / "extras-{}.{}".format(*sys.version_info)); ' "extras not in sys.path and sys.path.insert(0, extras)\n" ) From f6188fddfc578b96b9e5a9445a1e4b0427fb8228 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 07:22:30 +0100 Subject: [PATCH 116/121] Handle stale cache Signed-off-by: Pedro Algarvio --- tools/vm.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tools/vm.py b/tools/vm.py index 9500317f520..944f2fe6cc2 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -630,6 +630,11 @@ class VM: self.ctx.error(str(exc)) self.ctx.exit(1) instance_id_path.unlink() + except AttributeError: + # This machine no longer exists?! + instance_id_path.unlink() + self.ctx.info("It appears the cached image no longer exists...") + self.ctx.exit(1) if not instance_id_path.exists(): filters = [ {"Name": "tag:vm-name", "Values": [self.name]}, From 50531c5adfbaa75b2e9e8c55583885b4cd6aa724 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 19:35:11 +0100 Subject: [PATCH 117/121] Add 3006.1 release notes template. Signed-off-by: Pedro Algarvio --- doc/topics/releases/templates/3006.1.md.template | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 doc/topics/releases/templates/3006.1.md.template diff --git a/doc/topics/releases/templates/3006.1.md.template b/doc/topics/releases/templates/3006.1.md.template new file mode 100644 index 00000000000..f5302a4eab2 --- /dev/null +++ b/doc/topics/releases/templates/3006.1.md.template @@ -0,0 +1,15 @@ +(release-3006.1)= +# Salt 3006.1 release notes{{ unreleased }} +{{ warning }} + + + + + +## Changelog +{{ changelog }} From ad84d7cae9b1d3da008c10f8a2a1bc1947211823 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 22:12:09 +0100 Subject: [PATCH 118/121] Let's not concentrate the logic into a "one liner" Signed-off-by: Pedro Algarvio --- pkg/common/onedir/_salt_onedir_extras.pth | 1 + pkg/common/onedir/_salt_onedir_extras.py | 18 ++++++++++++++++++ tools/pkg/build.py | 12 +++++------- 3 files changed, 24 insertions(+), 7 deletions(-) create mode 100644 pkg/common/onedir/_salt_onedir_extras.pth create mode 100644 pkg/common/onedir/_salt_onedir_extras.py diff --git a/pkg/common/onedir/_salt_onedir_extras.pth b/pkg/common/onedir/_salt_onedir_extras.pth new file mode 100644 index 00000000000..1e7742532df --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.pth @@ -0,0 +1 @@ +import _salt_onedir_extras; _salt_onedir_extras.setup(__file__) diff --git a/pkg/common/onedir/_salt_onedir_extras.py b/pkg/common/onedir/_salt_onedir_extras.py new file mode 100644 index 00000000000..366136ba2a9 --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.py @@ -0,0 +1,18 @@ +import pathlib +import sys + + +def setup(pth_file_path): + # Discover the extras-. directory + extras_parent_path = pathlib.Path(pth_file_path).resolve().parent.parent + if not sys.platform.startswith("win"): + extras_parent_path = extras_parent_path.parent + + extras_path = str(extras_parent_path / "extras-{}.{}".format(*sys.version_info)) + + if extras_path in sys.path and sys.path[0] != extras_path: + # The extras directory must come first + sys.path.remove(extras_path) + + if extras_path not in sys.path: + sys.path.insert(0, extras_path) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index 43d46655ee3..b373338a99e 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -643,13 +643,11 @@ def salt_onedir( ctx.info(f"Creating Salt's extras path: {extras_dir}") extras_dir.mkdir(exist_ok=True) - pth_path = pathlib.Path(site_packages) / "salt-extras.pth" - ctx.info(f"Writing '{pth_path}' ...") - pth_path.write_text( - 'import sys, pathlib; extras = str(pathlib.Path(__file__).parent.parent.parent / "extras-{}.{}".format(*sys.version_info)) ' - 'if sys.platform != "win32" else str(pathlib.Path(__file__).parent.parent / "extras-{}.{}".format(*sys.version_info)); ' - "extras not in sys.path and sys.path.insert(0, extras)\n" - ) + for fname in ("_salt_onedir_extras.py", "_salt_onedir_extras.pth"): + src = tools.utils.REPO_ROOT / "pkg" / "common" / "onedir" / fname + dst = pathlib.Path(site_packages) / fname + ctx.info(f"Copying '{src.relative_to(tools.utils.REPO_ROOT)}' to '{dst}' ...") + shutil.copyfile(src, dst) def _check_pkg_build_files_exist(ctx: Context, **kwargs): From 6531a388ab3df1b1737164b1df08c4a0cdb33c10 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 4 May 2023 22:21:10 +0100 Subject: [PATCH 119/121] Bump to relenv 0.12.3 Signed-off-by: Pedro Algarvio --- .github/actions/setup-relenv/action.yml | 2 +- .github/workflows/build-deps-onedir.yml | 2 +- .github/workflows/build-salt-onedir.yml | 2 +- .github/workflows/ci.yml | 4 ++-- .github/workflows/nightly.yml | 4 ++-- .github/workflows/scheduled.yml | 4 ++-- .github/workflows/staging.yml | 4 ++-- changelog/64192.fixed.md | 2 +- cicd/shared-gh-workflows-context.yml | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index caa8b79dc54..1f228fd1822 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.12.1 + default: 0.12.3 outputs: version: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index ab405a156a1..ad788929cf9 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.12.1 + default: 0.12.3 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 9e21e903c55..2b1b758b42f 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.12.1 + default: 0.12.3 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bdf6dbdfbd5..a18e21fcc5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -448,7 +448,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -466,7 +466,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 9ca0ed5dd76..8291efe30fa 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -524,7 +524,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index c1c515f890d..fda566fbb3e 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -491,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -509,7 +509,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index ddc90088460..53f5fd62454 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.1" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/changelog/64192.fixed.md b/changelog/64192.fixed.md index 505a945aa7c..c0433045506 100644 --- a/changelog/64192.fixed.md +++ b/changelog/64192.fixed.md @@ -1 +1 @@ -Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.0. +Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 5acd767923d..ec3d939fe03 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,4 +1,4 @@ python_version_linux: "3.10.11" python_version_macos: "3.10.11" python_version_windows: "3.10.11" -relenv_version: "0.12.1" +relenv_version: "0.12.3" From 4e8b77df671fd756970fe4fb08122fba9b47c50b Mon Sep 17 00:00:00 2001 From: Salt Project Packaging Date: Fri, 5 May 2023 17:53:34 +0000 Subject: [PATCH 120/121] Release v3006.1 --- CHANGELOG.md | 36 +++++++ changelog/61236.fixed.md | 1 - changelog/62477.fixed.md | 1 - changelog/63589.fixed.md | 1 - changelog/63785.fixed.md | 1 - changelog/63810.fixed.md | 1 - changelog/64081.fixed.md | 1 - changelog/64082.fixed.md | 1 - changelog/64102.fixed.md | 3 - changelog/64103.fixed.md | 3 - changelog/64109.fixed.md | 1 - changelog/64111.fixed.md | 1 - changelog/64113.fixed.md | 2 - changelog/64114.fixed.md | 1 - changelog/64117.fixed.md | 1 - changelog/64118.fixed.md | 1 - changelog/64126.fixed.md | 1 - changelog/64141.fixed.md | 1 - changelog/64150.fixed.md | 1 - changelog/64158.fixed.md | 1 - changelog/64170.fixed.md | 2 - changelog/64184.fixed.md | 1 - changelog/64186.fixed.md | 1 - changelog/64192.fixed.md | 1 - changelog/64200.fixed.md | 1 - changelog/64204.fixed.md | 1 - changelog/64222.fixed.md | 1 - doc/man/salt-api.1 | 2 +- doc/man/salt-call.1 | 2 +- doc/man/salt-cloud.1 | 2 +- doc/man/salt-cp.1 | 2 +- doc/man/salt-key.1 | 2 +- doc/man/salt-master.1 | 2 +- doc/man/salt-minion.1 | 2 +- doc/man/salt-proxy.1 | 2 +- doc/man/salt-run.1 | 2 +- doc/man/salt-ssh.1 | 2 +- doc/man/salt-syndic.1 | 2 +- doc/man/salt.1 | 2 +- doc/man/salt.7 | 181 +++++++++++++++++++++++++++------- doc/man/spm.1 | 2 +- doc/topics/releases/3006.1.md | 52 ++++++++++ pkg/debian/changelog | 38 +++++++ pkg/rpm/salt.spec | 37 ++++++- 44 files changed, 321 insertions(+), 81 deletions(-) delete mode 100644 changelog/61236.fixed.md delete mode 100644 changelog/62477.fixed.md delete mode 100644 changelog/63589.fixed.md delete mode 100644 changelog/63785.fixed.md delete mode 100644 changelog/63810.fixed.md delete mode 100644 changelog/64081.fixed.md delete mode 100644 changelog/64082.fixed.md delete mode 100644 changelog/64102.fixed.md delete mode 100644 changelog/64103.fixed.md delete mode 100644 changelog/64109.fixed.md delete mode 100644 changelog/64111.fixed.md delete mode 100644 changelog/64113.fixed.md delete mode 100644 changelog/64114.fixed.md delete mode 100644 changelog/64117.fixed.md delete mode 100644 changelog/64118.fixed.md delete mode 100644 changelog/64126.fixed.md delete mode 100644 changelog/64141.fixed.md delete mode 100644 changelog/64150.fixed.md delete mode 100644 changelog/64158.fixed.md delete mode 100644 changelog/64170.fixed.md delete mode 100644 changelog/64184.fixed.md delete mode 100644 changelog/64186.fixed.md delete mode 100644 changelog/64192.fixed.md delete mode 100644 changelog/64200.fixed.md delete mode 100644 changelog/64204.fixed.md delete mode 100644 changelog/64222.fixed.md create mode 100644 doc/topics/releases/3006.1.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 834323eec49..1132b94882b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,42 @@ Versions are `MAJOR.PATCH`. # Changelog +## 3006.1 (2023-05-05) + + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + ## 3006.0 (2023-04-18) diff --git a/changelog/61236.fixed.md b/changelog/61236.fixed.md deleted file mode 100644 index 4c50beedcba..00000000000 --- a/changelog/61236.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Check that the return data from the cloud create function is a dictionary before attempting to pull values out. diff --git a/changelog/62477.fixed.md b/changelog/62477.fixed.md deleted file mode 100644 index 88f47bdb4bd..00000000000 --- a/changelog/62477.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ensure NamedLoaderContext's have their value() used if passing to other modules diff --git a/changelog/63589.fixed.md b/changelog/63589.fixed.md deleted file mode 100644 index 1f63f9ee993..00000000000 --- a/changelog/63589.fixed.md +++ /dev/null @@ -1 +0,0 @@ -add documentation note about reactor state ids. diff --git a/changelog/63785.fixed.md b/changelog/63785.fixed.md deleted file mode 100644 index 4a8406126ea..00000000000 --- a/changelog/63785.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Added support for ``test=True`` to the ``file.cached`` state module diff --git a/changelog/63810.fixed.md b/changelog/63810.fixed.md deleted file mode 100644 index 241907dbc00..00000000000 --- a/changelog/63810.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. diff --git a/changelog/64081.fixed.md b/changelog/64081.fixed.md deleted file mode 100644 index ed0720ac9a3..00000000000 --- a/changelog/64081.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. diff --git a/changelog/64082.fixed.md b/changelog/64082.fixed.md deleted file mode 100644 index c5bbc5a0ccb..00000000000 --- a/changelog/64082.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix dmsetup device names with hyphen being picked up. diff --git a/changelog/64102.fixed.md b/changelog/64102.fixed.md deleted file mode 100644 index 09d14ab16cb..00000000000 --- a/changelog/64102.fixed.md +++ /dev/null @@ -1,3 +0,0 @@ -Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. -This event is only used when these functions are called via the schedule execution modules. -Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. diff --git a/changelog/64103.fixed.md b/changelog/64103.fixed.md deleted file mode 100644 index 09d14ab16cb..00000000000 --- a/changelog/64103.fixed.md +++ /dev/null @@ -1,3 +0,0 @@ -Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. -This event is only used when these functions are called via the schedule execution modules. -Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. diff --git a/changelog/64109.fixed.md b/changelog/64109.fixed.md deleted file mode 100644 index 59c884cb869..00000000000 --- a/changelog/64109.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` diff --git a/changelog/64111.fixed.md b/changelog/64111.fixed.md deleted file mode 100644 index a6c00a1b999..00000000000 --- a/changelog/64111.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. diff --git a/changelog/64113.fixed.md b/changelog/64113.fixed.md deleted file mode 100644 index b2a530eeb3d..00000000000 --- a/changelog/64113.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -Fixed several file client uses which were not properly terminating it by switching to using it as a context manager -whenever possible or making sure `.destroy()` was called when using a context manager was not possible. diff --git a/changelog/64114.fixed.md b/changelog/64114.fixed.md deleted file mode 100644 index f01c5ea9127..00000000000 --- a/changelog/64114.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. diff --git a/changelog/64117.fixed.md b/changelog/64117.fixed.md deleted file mode 100644 index 0bca97e167d..00000000000 --- a/changelog/64117.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package diff --git a/changelog/64118.fixed.md b/changelog/64118.fixed.md deleted file mode 100644 index e7251827e97..00000000000 --- a/changelog/64118.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` diff --git a/changelog/64126.fixed.md b/changelog/64126.fixed.md deleted file mode 100644 index fb6cf7c46b4..00000000000 --- a/changelog/64126.fixed.md +++ /dev/null @@ -1 +0,0 @@ -lgpo_reg.set_value now returns ``True`` on success instead of ``None`` diff --git a/changelog/64141.fixed.md b/changelog/64141.fixed.md deleted file mode 100644 index 62c3e8f90c1..00000000000 --- a/changelog/64141.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Make salt user's home /opt/saltstack/salt diff --git a/changelog/64150.fixed.md b/changelog/64150.fixed.md deleted file mode 100644 index a767e10bf8d..00000000000 --- a/changelog/64150.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix cmd.run doesn't output changes in test mode diff --git a/changelog/64158.fixed.md b/changelog/64158.fixed.md deleted file mode 100644 index e34fc72b4d8..00000000000 --- a/changelog/64158.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Move salt user and group creation to common package diff --git a/changelog/64170.fixed.md b/changelog/64170.fixed.md deleted file mode 100644 index 1d20355bf1e..00000000000 --- a/changelog/64170.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -Fixed issue in salt-cloud so that multiple masters specified in the cloud -are written to the minion config properly diff --git a/changelog/64184.fixed.md b/changelog/64184.fixed.md deleted file mode 100644 index c63583324e3..00000000000 --- a/changelog/64184.fixed.md +++ /dev/null @@ -1 +0,0 @@ - Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. diff --git a/changelog/64186.fixed.md b/changelog/64186.fixed.md deleted file mode 100644 index 64c2c27f8a7..00000000000 --- a/changelog/64186.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Stop using the deprecated `salt.transport.client` imports. diff --git a/changelog/64192.fixed.md b/changelog/64192.fixed.md deleted file mode 100644 index c0433045506..00000000000 --- a/changelog/64192.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. diff --git a/changelog/64200.fixed.md b/changelog/64200.fixed.md deleted file mode 100644 index 9c977309cb9..00000000000 --- a/changelog/64200.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix ``lgpo_reg`` state to work with User policy diff --git a/changelog/64204.fixed.md b/changelog/64204.fixed.md deleted file mode 100644 index bc979379c9d..00000000000 --- a/changelog/64204.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Cloud deployment directories are owned by salt user and group diff --git a/changelog/64222.fixed.md b/changelog/64222.fixed.md deleted file mode 100644 index ce7b5a54d8c..00000000000 --- a/changelog/64222.fixed.md +++ /dev/null @@ -1 +0,0 @@ -``lgpo_reg`` state now enforces and reports changes to the registry diff --git a/doc/man/salt-api.1 b/doc/man/salt-api.1 index 0935a743675..379d345536a 100644 --- a/doc/man/salt-api.1 +++ b/doc/man/salt-api.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-API" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-API" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-api \- salt-api Command .sp diff --git a/doc/man/salt-call.1 b/doc/man/salt-call.1 index f81101ae003..460cf91dddb 100644 --- a/doc/man/salt-call.1 +++ b/doc/man/salt-call.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CALL" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CALL" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-call \- salt-call Documentation .SH SYNOPSIS diff --git a/doc/man/salt-cloud.1 b/doc/man/salt-cloud.1 index c0bc776c4f1..2a75e218e04 100644 --- a/doc/man/salt-cloud.1 +++ b/doc/man/salt-cloud.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CLOUD" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CLOUD" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-cloud \- Salt Cloud Command .sp diff --git a/doc/man/salt-cp.1 b/doc/man/salt-cp.1 index c9cfd69ba8b..74ab95a2bcc 100644 --- a/doc/man/salt-cp.1 +++ b/doc/man/salt-cp.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CP" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CP" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-cp \- salt-cp Documentation .sp diff --git a/doc/man/salt-key.1 b/doc/man/salt-key.1 index 0ff7822c6f9..c4723cae0e0 100644 --- a/doc/man/salt-key.1 +++ b/doc/man/salt-key.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-KEY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-KEY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-key \- salt-key Documentation .SH SYNOPSIS diff --git a/doc/man/salt-master.1 b/doc/man/salt-master.1 index 8eb527cadf3..72fa39ba91d 100644 --- a/doc/man/salt-master.1 +++ b/doc/man/salt-master.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MASTER" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-MASTER" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-master \- salt-master Documentation .sp diff --git a/doc/man/salt-minion.1 b/doc/man/salt-minion.1 index 32421a3f24f..fc550d0085f 100644 --- a/doc/man/salt-minion.1 +++ b/doc/man/salt-minion.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MINION" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-MINION" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-minion \- salt-minion Documentation .sp diff --git a/doc/man/salt-proxy.1 b/doc/man/salt-proxy.1 index 57084cc6101..9a78879db3a 100644 --- a/doc/man/salt-proxy.1 +++ b/doc/man/salt-proxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-PROXY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-PROXY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-proxy \- salt-proxy Documentation .sp diff --git a/doc/man/salt-run.1 b/doc/man/salt-run.1 index 69b75f76854..d4fbc53dc98 100644 --- a/doc/man/salt-run.1 +++ b/doc/man/salt-run.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-RUN" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-RUN" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-run \- salt-run Documentation .sp diff --git a/doc/man/salt-ssh.1 b/doc/man/salt-ssh.1 index d3d9749f531..3519bb75e1f 100644 --- a/doc/man/salt-ssh.1 +++ b/doc/man/salt-ssh.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SSH" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-SSH" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-ssh \- salt-ssh Documentation .SH SYNOPSIS diff --git a/doc/man/salt-syndic.1 b/doc/man/salt-syndic.1 index 9480755a145..3b50a769071 100644 --- a/doc/man/salt-syndic.1 +++ b/doc/man/salt-syndic.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SYNDIC" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-SYNDIC" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-syndic \- salt-syndic Documentation .sp diff --git a/doc/man/salt.1 b/doc/man/salt.1 index 0d522736c30..1c6873a02e1 100644 --- a/doc/man/salt.1 +++ b/doc/man/salt.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt \- salt .SH SYNOPSIS diff --git a/doc/man/salt.7 b/doc/man/salt.7 index 7a08e3aac1b..d50a2d55401 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "7" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT" "7" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt \- Salt Documentation .SH SALT PROJECT @@ -1346,7 +1346,7 @@ You can enable or disable test groups locally by passing their respected flag: \-\-flaky\-jail \- Test that need to be temporarily skipped. .UNINDENT .sp -In Your PR, you can enable or disable test groups by setting a label. +In your PR, you can enable or disable test groups by setting a label. All fast, slow, and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -1380,7 +1380,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -7377,6 +7377,7 @@ Default: \fB3600\fP If and only if a master has set \fBpillar_cache: True\fP, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .SS \fBpillar_cache_backend\fP .sp New in version 2015.8.8. @@ -11635,10 +11636,7 @@ Changed in version 2018.3.0: Renamed from \fBenvironment\fP to \fBsaltenv\fP\&. ignored and \fBsaltenv\fP will be used. .sp -Normally the minion is not isolated to any single environment on the master -when running states, but the environment can be isolated on the minion side -by statically setting it. Remember that the recommended way to manage -environments is to isolate via the top file. +The default fileserver environment to use when copying files and applying states. .INDENT 0.0 .INDENT 3.5 .sp @@ -14934,6 +14932,7 @@ For reference, see: # If and only if a master has set \(ga\(gapillar_cache: True\(ga\(ga, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set \(gapillar_cache: True\(ga, one of several storage providers @@ -50676,7 +50675,7 @@ You can enable or disable test groups locally by passing there respected flag: \-\-flaky\-jail .UNINDENT .sp -In Your PR you can enable or disable test groups by setting a label. +In your PR you can enable or disable test groups by setting a label. All thought the fast, slow and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -61394,7 +61393,7 @@ Add the following to \fB/srv/reactor/revert.sls\fP: .ft C revert\-file: local.state.apply: - \- tgt: {{ data[\(aqdata\(aq][\(aqid\(aq] }} + \- tgt: {{ data[\(aqid\(aq] }} \- arg: \- maintain_important_file .ft P @@ -61411,13 +61410,6 @@ to modify the watched file, it is important to ensure the state applied is also \fI\%idempotent\fP\&. .UNINDENT .UNINDENT -.sp -\fBNOTE:\fP -.INDENT 0.0 -.INDENT 3.5 -The expression \fB{{ data[\(aqdata\(aq][\(aqid\(aq] }}\fP \fI\%is correct\fP as it matches the event structure \fI\%shown above\fP\&. -.UNINDENT -.UNINDENT .SS State SLS .sp Create the state sls file referenced by the reactor sls file. This state file @@ -61838,6 +61830,14 @@ in \fI\%local reactions\fP, but as noted above this is not very user\-friendly. Therefore, the new config schema is recommended if the master is running a supported release. .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -61909,6 +61909,14 @@ Like \fI\%runner reactions\fP, the old config schema called for wheel reactions to have arguments passed directly under the name of the \fI\%wheel function\fP (or in \fBarg\fP or \fBkwarg\fP parameters). .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -193929,7 +193937,7 @@ Passes through all the parameters described in the \fI\%utils.http.query function\fP: .INDENT 7.0 .TP -.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.0\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) +.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.1\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) Query a resource, and decode the return data .UNINDENT .INDENT 7.0 @@ -280078,6 +280086,10 @@ proceess, as grains can affect which modules are available. .B refresh_pillar True Set to \fBFalse\fP to keep pillar data from being refreshed. +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280273,7 +280285,7 @@ salt \(aq*\(aq saltutil.signal_job 15 .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) Changed in version 2015.8.11,2016.3.2: On masterless minions, pillar modules are now synced, and refreshed when \fBrefresh\fP is set to \fBTrue\fP\&. @@ -280323,6 +280335,10 @@ dictionary of modules to sync based on type .B extmod_blacklist None dictionary of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280519,7 +280535,7 @@ salt \(aq*\(aq saltutil.sync_executors saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 0.10.0. .sp @@ -280547,6 +280563,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280815,7 +280835,7 @@ salt \(aq*\(aq saltutil.sync_output saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 2015.8.11,2016.3.2. .sp @@ -280837,6 +280857,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp \fBNOTE:\fP @@ -325208,6 +325232,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -325216,9 +325248,6 @@ None: Key/value not present .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -325264,6 +325293,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -325272,9 +325309,6 @@ None: If already disabled .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -325470,12 +325504,6 @@ Default is \fBMachine\fP .UNINDENT .TP -.B Returns -\fBTrue\fP if successful, otherwise \fBFalse\fP -.TP -.B Return type -\fI\%bool\fP -.TP .B Raises .INDENT 7.0 .IP \(bu 2 @@ -325485,6 +325513,12 @@ Default is \fBMachine\fP .IP \(bu 2 \fI\%SaltInvocationError\fP \-\- v_data doesn\(aqt match v_type .UNINDENT +.TP +.B Returns +\fBTrue\fP if successful, otherwise \fBFalse\fP +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -325533,10 +325567,18 @@ Default is \fBMachine\fP .UNINDENT .TP .B Raises +.INDENT 7.0 +.IP \(bu 2 \fI\%SaltInvocationError\fP \-\- Invalid policy class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure +.UNINDENT .TP .B Returns -None +True if successful +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -412564,6 +412606,8 @@ tomdroid\-src\-0.7.3.tar.gz: .fi .UNINDENT .UNINDENT +.sp +source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. .UNINDENT .UNINDENT .INDENT 7.0 @@ -444129,6 +444173,13 @@ If your service states are running into trouble with init system detection, please see the \fI\%Overriding Virtual Module Providers\fP section of Salt\(aqs module documentation to work around possible errors. .sp +For services managed by systemd, the systemd_service module includes a built\-in +feature to reload the daemon when unit files are changed or extended. This +feature is used automatically by the service state and the systemd_service +module when running on a systemd minion, so there is no need to set up your own +methods of reloading the daemon. If you need to manually reload the daemon for +some reason, you can use the \fI\%systemd_service.systemctl_reload\fP function provided by Salt. +.sp \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 @@ -466637,7 +466688,7 @@ You can enable or disable test groups locally by passing their respected flag: \-\-flaky\-jail \- Test that need to be temporarily skipped. .UNINDENT .sp -In Your PR, you can enable or disable test groups by setting a label. +In your PR, you can enable or disable test groups by setting a label. All fast, slow, and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -466671,7 +466722,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -476582,6 +476633,66 @@ Update to \fBmarkdown\-it\-py==2.2.0\fP due to: .UNINDENT .UNINDENT .UNINDENT +(release\-3006.1)= +.SS Salt 3006.1 release notes +.SS Changelog +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +Check that the return data from the cloud create function is a dictionary before attempting to pull values out. \fI\%#61236\fP +.IP \(bu 2 +Ensure NamedLoaderContext\(aqs have their value() used if passing to other modules \fI\%#62477\fP +.IP \(bu 2 +add documentation note about reactor state ids. \fI\%#63589\fP +.IP \(bu 2 +Added support for \fBtest=True\fP to the \fBfile.cached\fP state module \fI\%#63785\fP +.IP \(bu 2 +Updated \fBsource_hash\fP documentation and added a log warning when \fBsource_hash\fP is used with a source other than \fBhttp\fP, \fBhttps\fP and \fBftp\fP\&. \fI\%#63810\fP +.IP \(bu 2 +Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. \fI\%#64081\fP +.IP \(bu 2 +Fix dmsetup device names with hyphen being picked up. \fI\%#64082\fP +.IP \(bu 2 +Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. +This event is only used when these functions are called via the schedule execution modules. +Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. \fI\%#64102\fP, \fI\%#64103\fP +.IP \(bu 2 +Default to a 0 timeout if none is given for the terraform roster to avoid \fB\-o ConnectTimeout=None\fP when using \fBsalt\-ssh\fP \fI\%#64109\fP +.IP \(bu 2 +Disable class level caching of the file client on \fBSaltCacheLoader\fP and properly use context managers to take care of initialization and termination of the file client. \fI\%#64111\fP +.IP \(bu 2 +Fixed several file client uses which were not properly terminating it by switching to using it as a context manager +whenever possible or making sure \fB\&.destroy()\fP was called when using a context manager was not possible. \fI\%#64113\fP +.IP \(bu 2 +Fix running \fI\%setup.py\fP when passing in \-\-salt\-config\-dir and \-\-salt\-cache\-dir arguments. \fI\%#64114\fP +.IP \(bu 2 +Moved /etc/salt/proxy and /lib/systemd/system/salt\-proxy@.service to the salt\-minion DEB package \fI\%#64117\fP +.IP \(bu 2 +Stop passing \fB**kwargs\fP and be explicit about the keyword arguments to pass, namely, to \fBcp.cache_file\fP call in \fBsalt.states.pkg\fP \fI\%#64118\fP +.IP \(bu 2 +lgpo_reg.set_value now returns \fBTrue\fP on success instead of \fBNone\fP \fI\%#64126\fP +.IP \(bu 2 +Make salt user\(aqs home /opt/saltstack/salt \fI\%#64141\fP +.IP \(bu 2 +Fix cmd.run doesn\(aqt output changes in test mode \fI\%#64150\fP +.IP \(bu 2 +Move salt user and group creation to common package \fI\%#64158\fP +.IP \(bu 2 +Fixed issue in salt\-cloud so that multiple masters specified in the cloud +are written to the minion config properly \fI\%#64170\fP +.IP \(bu 2 +Make sure the \fBsalt\-ssh\fP CLI calls it\(aqs \fBfsclient.destroy()\fP method when done. \fI\%#64184\fP +.IP \(bu 2 +Stop using the deprecated \fBsalt.transport.client\fP imports. \fI\%#64186\fP +.IP \(bu 2 +Add a \fB\&.pth\fP to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. \fI\%#64192\fP +.IP \(bu 2 +Fix \fBlgpo_reg\fP state to work with User policy \fI\%#64200\fP +.IP \(bu 2 +Cloud deployment directories are owned by salt user and group \fI\%#64204\fP +.IP \(bu 2 +\fBlgpo_reg\fP state now enforces and reports changes to the registry \fI\%#64222\fP +.UNINDENT .sp See \fI\%Install a release candidate\fP for more information about installing an RC when one is available. diff --git a/doc/man/spm.1 b/doc/man/spm.1 index b680a20ddab..90cc6e3d2d7 100644 --- a/doc/man/spm.1 +++ b/doc/man/spm.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SPM" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SPM" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME spm \- Salt Package Manager Command .sp diff --git a/doc/topics/releases/3006.1.md b/doc/topics/releases/3006.1.md new file mode 100644 index 00000000000..2bf2dea1d31 --- /dev/null +++ b/doc/topics/releases/3006.1.md @@ -0,0 +1,52 @@ +(release-3006.1)= +# Salt 3006.1 release notes + + + + + + + + +## Changelog + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) diff --git a/pkg/debian/changelog b/pkg/debian/changelog index b014ff40a01..035085104b3 100644 --- a/pkg/debian/changelog +++ b/pkg/debian/changelog @@ -1,3 +1,41 @@ +salt (3006.1) stable; urgency=medium + + + # Fixed + + * Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) + * Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) + * add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) + * Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) + * Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) + * Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) + * Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) + * Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) + * Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) + * Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) + * Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) + * Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) + * Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) + * Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) + * lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) + * Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) + * Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) + * Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) + * Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) + * Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) + * Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) + * Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) + * Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) + * Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) + * ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + + -- Salt Project Packaging Fri, 05 May 2023 17:44:35 +0000 + salt (3006.0) stable; urgency=medium diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 3cf777f20bc..75b186f1cc1 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -25,7 +25,7 @@ %define fish_dir %{_datadir}/fish/vendor_functions.d Name: salt -Version: 3006.0 +Version: 3006.1 Release: 0 Summary: A parallel remote execution system Group: System Environment/Daemons @@ -496,6 +496,41 @@ fi %changelog +* Fri May 05 2023 Salt Project Packaging - 3006.1 + +# Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + * Tue Apr 18 2023 Salt Project Packaging - 3006.0 # Removed From 44b2c0185537e6004770b21f19a5f0346f7e71c9 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 3 May 2023 13:41:41 -0400 Subject: [PATCH 121/121] Remove the `release_branches` check, other rules we have setup lock it up well enough --- .github/workflows/release.yml | 11 ----------- .github/workflows/staging.yml | 11 ----------- .github/workflows/templates/layout.yml.jinja | 1 - .github/workflows/templates/release.yml.jinja | 11 ----------- .github/workflows/templates/staging.yml.jinja | 11 ----------- 5 files changed, 45 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0dd338bdd8a..61fc1f5783e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -44,17 +44,6 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run runs-on: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 53f5fd62454..2088976ec31 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -57,17 +57,6 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run runs-on: ubuntu-latest diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 4e0fa686e3e..59c2493b485 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -9,7 +9,6 @@ <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x"] %> --- <%- block name %> name: <{ workflow_name }> diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index ad651fcfaae..f5b3a456963 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -61,17 +61,6 @@ permissions: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index 548faa7a5e2..07c212a9d6c 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -71,17 +71,6 @@ concurrency: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %>