Merge 3006.x into master

Conflicts:
* doc/ref/configuration/minion.rst
This commit is contained in:
Pedro Algarvio 2023-04-28 10:20:04 +01:00
commit c33b56281b
No known key found for this signature in database
GPG key ID: BB36BF6584A298FF
70 changed files with 674 additions and 420 deletions

View file

@ -395,7 +395,7 @@ jobs:
- name: Run Flaky Tests
id: run-flaky-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'false' }}
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"

View file

@ -381,7 +381,7 @@ jobs:
- name: Run Flaky Tests
id: run-flaky-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'false' }}
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \

1
changelog/62477.fixed.md Normal file
View file

@ -0,0 +1 @@
Ensure NamedLoaderContext's have their value() used if passing to other modules

1
changelog/63589.fixed.md Normal file
View file

@ -0,0 +1 @@
add documentation note about reactor state ids.

1
changelog/64082.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix dmsetup device names with hyphen being picked up.

1
changelog/64117.fixed.md Normal file
View file

@ -0,0 +1 @@
Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package

1
changelog/64118.fixed.md Normal file
View file

@ -0,0 +1 @@
Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg`

1
changelog/64126.fixed.md Normal file
View file

@ -0,0 +1 @@
lgpo_reg.set_value now returns ``True`` on success instead of ``None``

2
changelog/64170.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly

View file

@ -2438,7 +2438,7 @@ enabled and can be disabled by changing this value to ``False``.
``saltenv`` will take its value. If both are used, ``environment`` will be
ignored and ``saltenv`` will be used.
The default fileserver environment to use when copy files and applying states.
The default fileserver environment to use when copying files and applying states.
.. code-block:: yaml

View file

@ -212,6 +212,10 @@ in :ref:`local reactions <reactor-local>`, but as noted above this is not very
user-friendly. Therefore, the new config schema is recommended if the master
is running a supported release.
.. note::
State ids of reactors for runners and wheels should all be unique. They can
overwrite each other when added to the async queue causing lost reactions.
The below two examples are equivalent:
+-------------------------------------------------+-------------------------------------------------+
@ -248,6 +252,10 @@ Like :ref:`runner reactions <reactor-runner>`, the old config schema called for
wheel reactions to have arguments passed directly under the name of the
:ref:`wheel function <all-salt.wheel>` (or in ``arg`` or ``kwarg`` parameters).
.. note::
State ids of reactors for runners and wheels should all be unique. They can
overwrite each other when added to the async queue causing lost reactions.
The below two examples are equivalent:
+-----------------------------------+---------------------------------+

View file

@ -1,7 +1,5 @@
pkg/common/salt-proxy@.service /lib/systemd/system
conf/roster /etc/salt
conf/cloud /etc/salt
conf/proxy /etc/salt
pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d

View file

@ -1,2 +1,4 @@
conf/minion /etc/salt
conf/proxy /etc/salt
pkg/common/salt-minion.service /lib/systemd/system
pkg/common/salt-proxy@.service /lib/systemd/system

View file

@ -36,7 +36,7 @@ python-etcd>0.4.2
pyvmomi
requests
rfc3987
sqlparse>=0.4.2
sqlparse>=0.4.4
strict_rfc3339>=0.7
toml
vcert~=0.7.0; sys_platform != 'win32'

View file

@ -838,7 +838,7 @@ smbprotocol==1.10.1
# pypsexec
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -820,7 +820,7 @@ six==1.16.0
# websocket-client
smmap==3.0.2
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -820,7 +820,7 @@ six==1.16.0
# websocket-client
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -808,7 +808,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -862,7 +862,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -369,7 +369,7 @@ six==1.15.0
# websocket-client
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -784,7 +784,7 @@ six==1.16.0
# websocket-client
smmap==3.0.2
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -792,7 +792,7 @@ six==1.16.0
# websocket-client
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -834,7 +834,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -367,7 +367,7 @@ six==1.15.0
# websocket-client
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -888,7 +888,7 @@ smbprotocol==1.10.1
# pypsexec
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -864,7 +864,7 @@ six==1.16.0
# websocket-client
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -859,7 +859,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -908,7 +908,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -384,7 +384,7 @@ six==1.15.0
# websocket-client
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -877,7 +877,7 @@ smbprotocol==1.10.1
# pypsexec
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -854,7 +854,7 @@ six==1.16.0
# websocket-client
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -850,7 +850,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -896,7 +896,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -372,7 +372,7 @@ six==1.15.0
# websocket-client
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -880,7 +880,7 @@ smbprotocol==1.10.1
# pypsexec
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -857,7 +857,7 @@ six==1.16.0
# websocket-client
smmap==3.0.2
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -857,7 +857,7 @@ six==1.16.0
# websocket-client
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -851,7 +851,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -901,7 +901,7 @@ slack-sdk==3.19.5
# via slack-bolt
smmap==3.0.4
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -373,7 +373,7 @@ six==1.15.0
# websocket-client
smmap==4.0.0
# via gitdb
sqlparse==0.4.2
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
strict-rfc3339==0.7
# via -r requirements/static/ci/common.in

View file

@ -9,6 +9,7 @@ import tarfile
import tempfile
from contextlib import closing
import salt.fileclient
import salt.utils.files
import salt.utils.json
import salt.utils.url
@ -28,65 +29,62 @@ def update_master_cache(states, saltenv="base"):
# Setup for copying states to gendir
gendir = tempfile.mkdtemp()
trans_tar = salt.utils.files.mkstemp()
if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
__context__[
"cp.fileclient_{}".format(id(__opts__))
] = salt.fileclient.get_file_client(__opts__)
with salt.fileclient.get_file_client(__opts__) as cp_fileclient:
# generate cp.list_states output and save to gendir
cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
cp_output_file = os.path.join(gendir, "cp_output.txt")
with salt.utils.files.fopen(cp_output_file, "w") as fp:
fp.write(cp_output)
# generate cp.list_states output and save to gendir
cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
cp_output_file = os.path.join(gendir, "cp_output.txt")
with salt.utils.files.fopen(cp_output_file, "w") as fp:
fp.write(cp_output)
# cp state directories to gendir
already_processed = []
sls_list = salt.utils.args.split_input(states)
for state_name in sls_list:
# generate low data for each state and save to gendir
state_low_file = os.path.join(gendir, state_name + ".low")
state_low_output = salt.utils.json.dumps(
__salt__["state.show_low_sls"](state_name)
)
with salt.utils.files.fopen(state_low_file, "w") as fp:
fp.write(state_low_output)
state_name = state_name.replace(".", os.sep)
if state_name in already_processed:
log.debug("Already cached state for %s", state_name)
else:
file_copy_file = os.path.join(gendir, state_name + ".copy")
log.debug("copying %s to %s", state_name, gendir)
qualified_name = salt.utils.url.create(state_name, saltenv)
# Duplicate cp.get_dir to gendir
copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
qualified_name, gendir, saltenv
# cp state directories to gendir
already_processed = []
sls_list = salt.utils.args.split_input(states)
for state_name in sls_list:
# generate low data for each state and save to gendir
state_low_file = os.path.join(gendir, state_name + ".low")
state_low_output = salt.utils.json.dumps(
__salt__["state.show_low_sls"](state_name)
)
if copy_result:
copy_result = [dir.replace(gendir, state_cache) for dir in copy_result]
copy_result_output = salt.utils.json.dumps(copy_result)
with salt.utils.files.fopen(file_copy_file, "w") as fp:
fp.write(copy_result_output)
already_processed.append(state_name)
with salt.utils.files.fopen(state_low_file, "w") as fp:
fp.write(state_low_output)
state_name = state_name.replace(".", os.sep)
if state_name in already_processed:
log.debug("Already cached state for %s", state_name)
else:
# If files were not copied, assume state.file.sls was given and just copy state
state_name = os.path.dirname(state_name)
file_copy_file = os.path.join(gendir, state_name + ".copy")
if state_name in already_processed:
log.debug("Already cached state for %s", state_name)
log.debug("copying %s to %s", state_name, gendir)
qualified_name = salt.utils.url.create(state_name, saltenv)
# Duplicate cp.get_dir to gendir
copy_result = cp_fileclient.get_dir(qualified_name, gendir, saltenv)
if copy_result:
copy_result = [
dir.replace(gendir, state_cache) for dir in copy_result
]
copy_result_output = salt.utils.json.dumps(copy_result)
with salt.utils.files.fopen(file_copy_file, "w") as fp:
fp.write(copy_result_output)
already_processed.append(state_name)
else:
qualified_name = salt.utils.url.create(state_name, saltenv)
copy_result = __context__[
"cp.fileclient_{}".format(id(__opts__))
].get_dir(qualified_name, gendir, saltenv)
if copy_result:
copy_result = [
dir.replace(gendir, state_cache) for dir in copy_result
]
copy_result_output = salt.utils.json.dumps(copy_result)
with salt.utils.files.fopen(file_copy_file, "w") as fp:
fp.write(copy_result_output)
already_processed.append(state_name)
# If files were not copied, assume state.file.sls was given and just copy state
state_name = os.path.dirname(state_name)
file_copy_file = os.path.join(gendir, state_name + ".copy")
if state_name in already_processed:
log.debug("Already cached state for %s", state_name)
else:
qualified_name = salt.utils.url.create(state_name, saltenv)
copy_result = cp_fileclient.get_dir(
qualified_name, gendir, saltenv
)
if copy_result:
copy_result = [
dir.replace(gendir, state_cache) for dir in copy_result
]
copy_result_output = salt.utils.json.dumps(copy_result)
with salt.utils.files.fopen(file_copy_file, "w") as fp:
fp.write(copy_result_output)
already_processed.append(state_name)
# turn gendir into tarball and remove gendir
try:

View file

@ -834,7 +834,7 @@ def show_highstate(**kwargs):
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
with salt.client.ssh.state.SSHHighState(
opts,
__pillar__,
__pillar__.value(),
__salt__,
__context__["fileclient"],
context=__context__.value(),

View file

@ -849,7 +849,6 @@ class Client:
kwargs.pop("env")
kwargs["saltenv"] = saltenv
url_data = urllib.parse.urlparse(url)
sfn = self.cache_file(url, saltenv, cachedir=cachedir)
if not sfn or not os.path.exists(sfn):
return ""
@ -1165,13 +1164,8 @@ class RemoteClient(Client):
if not salt.utils.platform.is_windows():
hash_server, stat_server = self.hash_and_stat_file(path, saltenv)
try:
mode_server = stat_server[0]
except (IndexError, TypeError):
mode_server = None
else:
hash_server = self.hash_file(path, saltenv)
mode_server = None
# Check if file exists on server, before creating files and
# directories
@ -1214,13 +1208,8 @@ class RemoteClient(Client):
if dest2check and os.path.isfile(dest2check):
if not salt.utils.platform.is_windows():
hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv)
try:
mode_local = stat_local[0]
except (IndexError, TypeError):
mode_local = None
else:
hash_local = self.hash_file(dest2check, saltenv)
mode_local = None
if hash_local == hash_server:
return dest2check

View file

@ -32,7 +32,7 @@ def loader_context(loader):
class NamedLoaderContext(collections.abc.MutableMapping):
"""
A NamedLoaderContext object is injected by the loader providing access to
Salt's 'magic dunders' (__salt__, __utils__, ect).
Salt's 'magic dunders' (__salt__, __utils__, etc).
"""
def __init__(self, name, loader_context, default=None):

View file

@ -217,7 +217,7 @@ def _gather_pillar(pillarenv, pillar_override):
"""
pillar = salt.pillar.get_pillar(
__opts__,
__grains__,
__grains__.value(),
__opts__["id"],
__opts__["saltenv"],
pillar_override=pillar_override,

View file

@ -41,7 +41,7 @@ def _gather_pillar(pillarenv, pillar_override):
"""
pillar = salt.pillar.get_pillar(
__opts__,
__grains__,
__grains__.value(),
__opts__["id"],
__opts__["saltenv"],
pillar_override=pillar_override,

View file

@ -113,7 +113,7 @@ def active():
ret = {}
# TODO: This command should be extended to collect more information, such as UUID.
devices = __salt__["cmd.run_stdout"]("dmsetup ls --target crypt")
out_regex = re.compile(r"(?P<devname>\w+)\W+\((?P<major>\d+), (?P<minor>\d+)\)")
out_regex = re.compile(r"(?P<devname>\S+)\s+\((?P<major>\d+), (?P<minor>\d+)\)")
log.debug(devices)
for line in devices.split("\n"):

View file

@ -6644,14 +6644,6 @@ def script_retcode(
)["retcode"]
def _mk_fileclient():
"""
Create a file client and add it to the context.
"""
if "cp.fileclient" not in __context__:
__context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__)
def _generate_tmp_path():
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
@ -6665,11 +6657,10 @@ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
_mk_fileclient()
trans_tar = salt.client.ssh.state.prep_trans_tar(
__context__["cp.fileclient"], chunks, refs, pillar, name
)
return trans_tar
with salt.fileclient.get_file_client(__opts__) as fileclient:
return salt.client.ssh.state.prep_trans_tar(
fileclient, chunks, refs, pillar, name
)
def _compile_state(sls_opts, mods=None):

View file

@ -137,9 +137,10 @@ def write_reg_pol(data, policy_class="Machine"):
Raises:
SaltInvocationError: Invalid policy class
CommandExecutionError: On failure
Returns:
None
bool: True if successful
CLI Example:
@ -175,7 +176,6 @@ def get_value(key, v_name, policy_class="Machine"):
file.
Args:
key (str): The registry key where the value name resides
v_name (str): The value name to retrieve
@ -228,7 +228,6 @@ def get_key(key, policy_class="Machine"):
Get all the values set in a key in the ``Registry.pol`` file.
Args:
key (str): The registry key where the values reside
policy_class (str): The registry class to read from. Can be one of the
@ -278,7 +277,6 @@ def set_value(
style policies. This is the equivalent of setting a policy to ``Enabled``
Args:
key (str): The registry key path
v_name (str): The registry value name within the key
@ -305,14 +303,14 @@ def set_value(
Default is ``Machine``
Returns:
bool: ``True`` if successful, otherwise ``False``
Raises:
SaltInvocationError: Invalid policy_class
SaltInvocationError: Invalid v_type
SaltInvocationError: v_data doesn't match v_type
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
@ -385,7 +383,7 @@ def set_value(
write_reg_pol(pol_data)
salt.utils.win_reg.set_value(
return salt.utils.win_reg.set_value(
hive=hive,
key=key,
vname=v_name,
@ -401,7 +399,6 @@ def disable_value(key, v_name, policy_class="machine"):
to ``Disabled`` in the Group Policy editor (``gpedit.msc``)
Args:
key (str): The registry key path
v_name (str): The registry value name within the key
@ -415,13 +412,14 @@ def disable_value(key, v_name, policy_class="machine"):
Default is ``Machine``
Raises:
SaltInvocationError: Invalid policy_class
CommandExecutionError: On failure
Returns:
bool: ``True`` if successful, otherwise ``False``
None: If already disabled
Raises:
SaltInvocationError: Invalid policy_class
CLI Example:
.. code-block:: bash
@ -468,7 +466,7 @@ def disable_value(key, v_name, policy_class="machine"):
write_reg_pol(pol_data)
salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
def delete_value(key, v_name, policy_class="Machine"):
@ -478,7 +476,6 @@ def delete_value(key, v_name, policy_class="Machine"):
``Not Configured``.
Args:
key (str): The registry key path
v_name (str): The registry value name within the key
@ -492,13 +489,14 @@ def delete_value(key, v_name, policy_class="Machine"):
Default is ``Machine``
Raises:
SaltInvocationError: Invalid policy_class
CommandExecutionError: On failure
Returns:
bool: ``True`` if successful, otherwise ``False``
None: Key/value not present
Raises:
SaltInvocationError: Invalid policy_class
CLI Example:
.. code-block:: bash
@ -538,7 +536,7 @@ def delete_value(key, v_name, policy_class="Machine"):
write_reg_pol(pol_data)
salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
# This is for testing different settings and verifying that we are writing the

View file

@ -1298,7 +1298,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
successful_verbose[short_path_name] = []
def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
def _get_source_sum(source_hash, file_path, saltenv, verify_ssl=True):
"""
Extract the hash sum, whether it is in a remote hash file, or just a string.
"""
@ -1315,7 +1315,7 @@ def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
# The source_hash is a file on a server
try:
cached_hash_file = __salt__["cp.cache_file"](
source_hash, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
source_hash, saltenv=saltenv, verify_ssl=verify_ssl
)
except MinionError as exc:
log.exception("Failed to cache %s", source_hash, exc_info=exc)
@ -1671,7 +1671,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
saltenv,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@ -1686,7 +1686,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
saltenv,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@ -1706,7 +1706,9 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# It's not cached. Cache it, mate.
try:
cached_pkg = __salt__["cp.cache_file"](
installer, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
installer,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(installer)
@ -1730,7 +1732,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
installer,
saltenv,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@ -1754,7 +1756,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# Compare the hash sums
source_hash = pkginfo[version_num].get("source_hash", False)
if source_hash:
source_sum = _get_source_sum(source_hash, cached_pkg, saltenv, **kwargs)
source_sum = _get_source_sum(
source_hash,
cached_pkg,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
log.debug(
"pkg.install: Source %s hash: %s",
source_sum["hash_type"],
@ -2126,7 +2133,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
saltenv,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@ -2150,7 +2157,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
saltenv,
saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:

View file

@ -9,7 +9,6 @@ import logging
import os
import sys
import traceback
import uuid
import salt.channel.client
import salt.ext.tornado.gen
@ -1341,6 +1340,11 @@ class Pillar:
if self._closing:
return
self._closing = True
if self.client:
try:
self.client.destroy()
except AttributeError:
pass
# pylint: disable=W1701
def __del__(self):

View file

@ -32,12 +32,10 @@ state:
- state: installed
"""
import logging
import os
import sys
# Import salt modules
import salt.fileclient
import salt.utils.decorators.path
from salt.utils.decorators import depends
@ -108,13 +106,6 @@ def __virtual__():
return __virtualname__
def _client():
"""
Get a fileclient
"""
return salt.fileclient.get_file_client(__opts__)
def _changes(plays):
"""
Find changes in ansible return data
@ -171,7 +162,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs=
}
if git_repo:
if not isinstance(rundir, str) or not os.path.isdir(rundir):
with _client() as client:
with salt.fileclient.get_file_client(__opts__) as client:
rundir = client._extrn_path(git_repo, "base")
log.trace("rundir set to %s", rundir)
if not isinstance(git_kwargs, dict):

View file

@ -760,7 +760,9 @@ def _find_install_targets(
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"], **kwargs
version_string,
saltenv=kwargs["saltenv"],
verify_ssl=kwargs.get("verify_ssl", True),
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))

View file

@ -19,6 +19,13 @@ If your service states are running into trouble with init system detection,
please see the :ref:`Overriding Virtual Module Providers <module-provider-override>`
section of Salt's module documentation to work around possible errors.
For services managed by systemd, the systemd_service module includes a built-in
feature to reload the daemon when unit files are changed or extended. This
feature is used automatically by the service state and the systemd_service
module when running on a systemd minion, so there is no need to set up your own
methods of reloading the daemon. If you need to manually reload the daemon for
some reason, you can use the :func:`systemd_service.systemctl_reload <salt.modules.systemd_service.systemctl_reload>` function provided by Salt.
.. note::
The current status of a service is determined by the return code of the init/rc
script status command. A status return code of 0 it is considered running. Any

View file

@ -72,23 +72,6 @@ def __virtual__():
return __virtualname__
def _format_changes(changes, key, v_name):
"""
Reformat the changes dictionary to group new and old together.
"""
new_changes = {"new": {}, "old": {}}
for item in changes:
if changes[item]["new"]:
new_changes["new"][item] = changes[item]["new"]
new_changes["new"]["key"] = key
new_changes["new"]["name"] = v_name
if changes[item]["old"]:
new_changes["old"][item] = changes[item]["old"]
new_changes["old"]["key"] = key
new_changes["old"]["name"] = v_name
return new_changes
def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"):
r"""
Ensure a registry setting is present in the Registry.pol file.
@ -170,12 +153,16 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine")
key=key, v_name=name, policy_class=policy_class
)
changes = salt.utils.data.compare_dicts(old, new)
if str(new["data"]) == v_data and new["type"] == v_type:
ret["comment"] = "Registry.pol value has been set"
ret["result"] = True
else:
ret["comment"] = "Failed to set Registry.pol value"
changes = salt.utils.data.recursive_diff(old, new)
if changes:
ret["comment"] = "Registry.pol value has been set"
ret["changes"] = _format_changes(changes, key, name)
ret["result"] = True
ret["changes"] = changes
return ret
@ -238,12 +225,16 @@ def value_disabled(name, key, policy_class="Machine"):
key=key, v_name=name, policy_class=policy_class
)
changes = salt.utils.data.compare_dicts(old, new)
if "**del." in str(new["data"]) and new["type"] == "REG_SZ":
ret["comment"] = "Registry.pol value disabled"
ret["result"] = True
else:
ret["comment"] = "Failed to disable Registry.pol value"
changes = salt.utils.data.recursive_diff(old, new)
if changes:
ret["comment"] = "Registry.pol value enabled"
ret["changes"] = _format_changes(changes, key, name)
ret["result"] = True
ret["changes"] = changes
return ret
@ -306,14 +297,17 @@ def value_absent(name, key, policy_class="Machine"):
key=key, v_name=name, policy_class=policy_class
)
if new is None:
if not new:
ret["comment"] = "Registry.pol value deleted"
ret["result"] = True
# We're setting this here in case new is None
new = {}
else:
ret["comment"] = "Failed to delete Registry.pol value"
changes = salt.utils.data.compare_dicts(old, new)
changes = salt.utils.data.recursive_diff(old, new)
if changes:
ret["comment"] = "Registry.pol value deleted"
ret["changes"] = _format_changes(changes, key, name)
ret["result"] = True
ret["changes"] = changes
return ret

View file

@ -131,7 +131,7 @@ class SyncWrapper:
result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
results.append(True)
results.append(result)
except Exception as exc: # pylint: disable=broad-except
except Exception: # pylint: disable=broad-except
results.append(False)
results.append(sys.exc_info())

View file

@ -1202,6 +1202,16 @@ def wait_for_passwd(
time.sleep(trysleep)
def _format_master_param(master):
"""
If the master is a list, we need to convert it to a comma delimited string
Otherwise, we just return master
"""
if isinstance(master, list):
return ",".join(master)
return master
def deploy_windows(
host,
port=445,
@ -1337,17 +1347,18 @@ def deploy_windows(
conn=smb_conn,
)
cmd = "c:\\salttemp\\{}".format(installer)
args = [
"/S",
"/master={}".format(_format_master_param(master)),
"/minion-name={}".format(name),
]
if use_winrm:
winrm_cmd(
winrm_session,
"c:\\salttemp\\{}".format(installer),
["/S", "/master={}".format(master), "/minion-name={}".format(name)],
)
winrm_cmd(winrm_session, cmd, args)
else:
cmd = "c:\\salttemp\\{}".format(installer)
args = "/S /master={} /minion-name={}".format(master, name)
stdout, stderr, ret_code = run_psexec_command(
cmd, args, host, username, password
cmd, " ".join(args), host, username, password
)
if ret_code != 0:

View file

@ -58,19 +58,6 @@ class SaltCacheLoader(BaseLoader):
and only loaded once per loader instance.
"""
_cached_pillar_client = None
_cached_client = None
@classmethod
def shutdown(cls):
for attr in ("_cached_client", "_cached_pillar_client"):
client = getattr(cls, attr, None)
if client is not None:
# PillarClient and LocalClient objects do not have a destroy method
if hasattr(client, "destroy"):
client.destroy()
setattr(cls, attr, None)
def __init__(
self,
opts,
@ -93,8 +80,7 @@ class SaltCacheLoader(BaseLoader):
log.debug("Jinja search path: %s", self.searchpath)
self.cached = []
self._file_client = _file_client
# Instantiate the fileclient
self.file_client()
self._close_file_client = _file_client is None
def file_client(self):
"""
@ -108,18 +94,10 @@ class SaltCacheLoader(BaseLoader):
or not hasattr(self._file_client, "opts")
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
):
attr = "_cached_pillar_client" if self.pillar_rend else "_cached_client"
cached_client = getattr(self, attr, None)
if (
cached_client is None
or not hasattr(cached_client, "opts")
or cached_client.opts["file_roots"] != self.opts["file_roots"]
):
cached_client = salt.fileclient.get_file_client(
self.opts, self.pillar_rend
)
setattr(SaltCacheLoader, attr, cached_client)
self._file_client = cached_client
self._file_client = salt.fileclient.get_file_client(
self.opts, self.pillar_rend
)
self._close_file_client = True
return self._file_client
def cache_file(self, template):
@ -221,6 +199,27 @@ class SaltCacheLoader(BaseLoader):
# there is no template file within searchpaths
raise TemplateNotFound(template)
def destroy(self):
if self._close_file_client is False:
return
if self._file_client is None:
return
file_client = self._file_client
self._file_client = None
try:
file_client.destroy()
except AttributeError:
# PillarClient and LocalClient objects do not have a destroy method
pass
def __enter__(self):
self.file_client()
return self
def __exit__(self, *args):
self.destroy()
class PrintableDict(OrderedDict):
"""

View file

@ -97,3 +97,10 @@ if HAS_MAKO:
self.cache[fpath] = self.file_client().get_file(
fpath, "", True, self.saltenv
)
def destroy(self):
if self.client:
try:
self.client.destroy()
except AttributeError:
pass

View file

@ -362,163 +362,169 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
elif tmplstr.endswith("\n"):
newline = "\n"
if not saltenv:
if tmplpath:
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
else:
loader = salt.utils.jinja.SaltCacheLoader(
opts,
saltenv,
pillar_rend=context.get("_pillar_rend", False),
_file_client=file_client,
)
env_args = {"extensions": [], "loader": loader}
if hasattr(jinja2.ext, "with_"):
env_args["extensions"].append("jinja2.ext.with_")
if hasattr(jinja2.ext, "do"):
env_args["extensions"].append("jinja2.ext.do")
if hasattr(jinja2.ext, "loopcontrols"):
env_args["extensions"].append("jinja2.ext.loopcontrols")
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
opt_jinja_env = opts.get("jinja_env", {})
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
# Pass through trim_blocks and lstrip_blocks Jinja parameters
# trim_blocks removes newlines around Jinja blocks
# lstrip_blocks strips tabs and spaces from the beginning of
# line to the start of a block.
if opts.get("jinja_trim_blocks", False):
log.debug("Jinja2 trim_blocks is enabled")
log.warning(
"jinja_trim_blocks is deprecated and will be removed in a future release,"
" please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["trim_blocks"] = True
opt_jinja_sls_env["trim_blocks"] = True
if opts.get("jinja_lstrip_blocks", False):
log.debug("Jinja2 lstrip_blocks is enabled")
log.warning(
"jinja_lstrip_blocks is deprecated and will be removed in a future release,"
" please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["lstrip_blocks"] = True
opt_jinja_sls_env["lstrip_blocks"] = True
def opt_jinja_env_helper(opts, optname):
for k, v in opts.items():
k = k.lower()
if hasattr(jinja2.defaults, k.upper()):
log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
env_args[k] = v
else:
log.warning("Jinja2 environment %s is not recognized", k)
if "sls" in context and context["sls"] != "":
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
if opts.get("allow_undefined", False):
jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
else:
jinja_env = jinja2.sandbox.SandboxedEnvironment(
undefined=jinja2.StrictUndefined, **env_args
)
indent_filter = jinja_env.filters.get("indent")
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
# Use the existing indent filter on Jinja versions where it's not broken
jinja_env.filters["indent"] = indent_filter
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
# globals
jinja_env.globals["odict"] = OrderedDict
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
jinja_env.tests["list"] = salt.utils.data.is_list
decoded_context = {}
for key, value in context.items():
if not isinstance(value, str):
if isinstance(value, NamedLoaderContext):
decoded_context[key] = value.value()
else:
decoded_context[key] = value
continue
try:
decoded_context[key] = salt.utils.stringutils.to_unicode(
value, encoding=SLS_ENCODING
)
except UnicodeDecodeError as ex:
log.debug(
"Failed to decode using default encoding (%s), trying system encoding",
SLS_ENCODING,
)
decoded_context[key] = salt.utils.data.decode(value)
jinja_env.globals.update(decoded_context)
try:
template = jinja_env.from_string(tmplstr)
output = template.render(**decoded_context)
except jinja2.exceptions.UndefinedError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
except (
jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.TemplateSyntaxError,
jinja2.exceptions.SecurityError,
) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Jinja syntax error: {}{}".format(exc, out), line, tmplstr
)
except (SaltInvocationError, CommandExecutionError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Problem running salt function in Jinja template: {}{}".format(exc, out),
line,
tmplstr,
)
except Exception as exc: # pylint: disable=broad-except
tracestr = traceback.format_exc()
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
if not saltenv:
if tmplpath:
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
else:
tmplstr += "\n{}".format(tracestr)
log.debug("Jinja Error")
log.debug("Exception:", exc_info=True)
log.debug("Out: %s", out)
log.debug("Line: %s", line)
log.debug("TmplStr: %s", tmplstr)
log.debug("TraceStr: %s", tracestr)
loader = salt.utils.jinja.SaltCacheLoader(
opts,
saltenv,
pillar_rend=context.get("_pillar_rend", False),
_file_client=file_client,
)
raise SaltRenderError(
"Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
env_args = {"extensions": [], "loader": loader}
if hasattr(jinja2.ext, "with_"):
env_args["extensions"].append("jinja2.ext.with_")
if hasattr(jinja2.ext, "do"):
env_args["extensions"].append("jinja2.ext.do")
if hasattr(jinja2.ext, "loopcontrols"):
env_args["extensions"].append("jinja2.ext.loopcontrols")
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
opt_jinja_env = opts.get("jinja_env", {})
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
opt_jinja_sls_env = (
opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
)
# Pass through trim_blocks and lstrip_blocks Jinja parameters
# trim_blocks removes newlines around Jinja blocks
# lstrip_blocks strips tabs and spaces from the beginning of
# line to the start of a block.
if opts.get("jinja_trim_blocks", False):
log.debug("Jinja2 trim_blocks is enabled")
log.warning(
"jinja_trim_blocks is deprecated and will be removed in a future release,"
" please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["trim_blocks"] = True
opt_jinja_sls_env["trim_blocks"] = True
if opts.get("jinja_lstrip_blocks", False):
log.debug("Jinja2 lstrip_blocks is enabled")
log.warning(
"jinja_lstrip_blocks is deprecated and will be removed in a future release,"
" please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["lstrip_blocks"] = True
opt_jinja_sls_env["lstrip_blocks"] = True
def opt_jinja_env_helper(opts, optname):
for k, v in opts.items():
k = k.lower()
if hasattr(jinja2.defaults, k.upper()):
log.debug(
"Jinja2 environment %s was set to %s by %s", k, v, optname
)
env_args[k] = v
else:
log.warning("Jinja2 environment %s is not recognized", k)
if "sls" in context and context["sls"] != "":
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
if opts.get("allow_undefined", False):
jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
else:
jinja_env = jinja2.sandbox.SandboxedEnvironment(
undefined=jinja2.StrictUndefined, **env_args
)
indent_filter = jinja_env.filters.get("indent")
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
# Use the existing indent filter on Jinja versions where it's not broken
jinja_env.filters["indent"] = indent_filter
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
# globals
jinja_env.globals["odict"] = OrderedDict
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
jinja_env.tests["list"] = salt.utils.data.is_list
decoded_context = {}
for key, value in context.items():
if not isinstance(value, str):
if isinstance(value, NamedLoaderContext):
decoded_context[key] = value.value()
else:
decoded_context[key] = value
continue
try:
decoded_context[key] = salt.utils.stringutils.to_unicode(
value, encoding=SLS_ENCODING
)
except UnicodeDecodeError:
log.debug(
"Failed to decode using default encoding (%s), trying system encoding",
SLS_ENCODING,
)
decoded_context[key] = salt.utils.data.decode(value)
jinja_env.globals.update(decoded_context)
try:
template = jinja_env.from_string(tmplstr)
output = template.render(**decoded_context)
except jinja2.exceptions.UndefinedError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
except (
jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.TemplateSyntaxError,
jinja2.exceptions.SecurityError,
) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Jinja syntax error: {}{}".format(exc, out), line, tmplstr
)
except (SaltInvocationError, CommandExecutionError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Problem running salt function in Jinja template: {}{}".format(
exc, out
),
line,
tmplstr,
)
except Exception as exc: # pylint: disable=broad-except
tracestr = traceback.format_exc()
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
else:
tmplstr += "\n{}".format(tracestr)
log.debug("Jinja Error")
log.debug("Exception:", exc_info=True)
log.debug("Out: %s", out)
log.debug("Line: %s", line)
log.debug("TmplStr: %s", tmplstr)
log.debug("TraceStr: %s", tracestr)
raise SaltRenderError(
"Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
)
finally:
if loader and hasattr(loader, "_file_client"):
if hasattr(loader._file_client, "destroy"):
loader._file_client.destroy()
if loader and isinstance(loader, salt.utils.jinja.SaltCacheLoader):
loader.destroy()
# Workaround a bug in Jinja that removes the final newline
# (https://github.com/mitsuhiko/jinja2/issues/75)
@ -569,9 +575,8 @@ def render_mako_tmpl(tmplstr, context, tmplpath=None):
except Exception: # pylint: disable=broad-except
raise SaltRenderError(mako.exceptions.text_error_template().render())
finally:
if lookup and hasattr(lookup, "_file_client"):
if hasattr(lookup._file_client, "destroy"):
lookup._file_client.destroy()
if lookup and isinstance(lookup, SaltMakoTemplateLookup):
lookup.destroy()
def render_wempy_tmpl(tmplstr, context, tmplpath=None):

View file

@ -67,13 +67,11 @@ def search_reg_pol(search_string, policy_data):
gpt.ini
Args:
search_string (str): The string to search for
policy_data (str): The data to be searched
Returns:
bool: ``True`` if the regex search_string is found, otherwise ``False``
"""
if policy_data:
@ -91,7 +89,6 @@ def read_reg_pol_file(reg_pol_path):
Helper function to read the content of the Registry.pol file
Args:
reg_pol_path (str): The path to the Registry.pol file
Returns:
@ -120,7 +117,6 @@ def write_reg_pol_data(
to be processed
Args:
data_to_write (bytes): Data to write into the user/machine registry.pol
file
@ -132,6 +128,12 @@ def write_reg_pol_data(
gpt_extension_guid (str): ADMX registry extension guid for the class
gpt_ini_path (str): The path to the gpt.ini file
Returns:
bool: True if successful
Raises:
CommandExecutionError: On failure
"""
# Write Registry.pol file
if not os.path.exists(policy_file_path):
@ -254,6 +256,7 @@ def write_reg_pol_data(
)
log.exception(msg)
raise CommandExecutionError(msg)
return True
def reg_pol_to_dict(policy_data):
@ -273,6 +276,12 @@ def reg_pol_to_dict(policy_data):
# https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-gpreg/5c092c22-bf6b-4e7f-b180-b20743d368f5
reg_pol_header = REG_POL_HEADER.encode("utf-16-le")
# If policy_data is None, that means the Registry.pol file is missing
# So, we'll create it
if policy_data is None:
policy_data = reg_pol_header
if not policy_data.startswith(reg_pol_header):
msg = "LGPO_REG Util: Invalid Header. Registry.pol may be corrupt"
raise CommandExecutionError(msg)

View file

@ -0,0 +1,37 @@
"""
Tests for payload
"""
import pytest
@pytest.mark.slow_test
@pytest.mark.skip_if_not_root
@pytest.mark.skip_on_windows
@pytest.mark.skip_on_darwin
def test_payload_no_exception(salt_cli, salt_master, salt_minion):
"""
Test to confirm that no exception is thrown with the jinja file
when executed on the minion
"""
test_set_hostname = """
{%- set host = pillar.get("hostname", "UNKNOWN") %}
{%- if host == 'UNKNOWN' %}
{{ raise("Unsupported UNKNOWN hostname") }}
{%- else %}
hostnamectl set-hostname {{ host }}
{%- endif %}
"""
with salt_master.state_tree.base.temp_file("set_hostname.j2", test_set_hostname):
ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id)
assert ret.returncode == 0
assert ret.data is True
ret = salt_cli.run(
"cmd.script",
"salt://set_hostname.j2",
"template=jinja",
pillar={"hostname": "test"},
minion_tgt=salt_minion.id,
)
assert "AttributeError:" not in ret.stdout

View file

@ -0,0 +1,40 @@
"""
Integration tests for the jinja includes in states
"""
import logging
import pytest
log = logging.getLogger(__name__)
@pytest.mark.slow_test
def test_issue_64111(salt_master, salt_minion, salt_call_cli):
# This needs to be an integration test. A functional test does not trigger
# the issue fixed.
macros_jinja = """
{% macro a_jinja_macro(arg) -%}
{{ arg }}
{%- endmacro %}
"""
init_sls = """
include:
- common.file1
"""
file1_sls = """
{% from 'common/macros.jinja' import a_jinja_macro with context %}
a state id:
cmd.run:
- name: echo {{ a_jinja_macro("hello world") }}
"""
tf = salt_master.state_tree.base.temp_file
with tf("common/macros.jinja", macros_jinja):
with tf("common/init.sls", init_sls):
with tf("common/file1.sls", file1_sls):
ret = salt_call_cli.run("state.apply", "common")
assert ret.returncode == 0

View file

@ -0,0 +1,38 @@
import pytest
import salt.modules.cryptdev as cryptdev
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {cryptdev: {"__opts__": minion_opts}}
def test_active(caplog):
with patch.dict(
cryptdev.__salt__,
{"cmd.run_stdout": MagicMock(return_value="my-device (253, 1)\n")},
):
assert cryptdev.active() == {
"my-device": {
"devname": "my-device",
"major": "253",
"minor": "1",
}
}
# debien output when no devices setup.
with patch.dict(cryptdev.__salt__, {"cmd.run_stdout": MagicMock(return_value="")}):
caplog.clear()
assert cryptdev.active() == {}
assert "dmsetup output does not match expected format" in caplog.text
# centos output of dmsetup when no devices setup.
with patch.dict(
cryptdev.__salt__,
{"cmd.run_stdout": MagicMock(return_value="No devices found")},
):
caplog.clear()
assert cryptdev.active() == {}
assert "dmsetup output does not match expected format" in caplog.text

View file

@ -152,12 +152,16 @@ def test_get_key_invalid_policy_class():
def test_set_value(empty_reg_pol):
expected = {"data": 1, "type": "REG_DWORD"}
key = "SOFTWARE\\MyKey"
v_name = "MyValue"
lgpo_reg.set_value(key=key, v_name=v_name, v_data="1")
# Test command return
result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1")
assert result is True
# Test value actually set in Registry.pol
expected = {"data": 1, "type": "REG_DWORD"}
result = lgpo_reg.get_value(key=key, v_name=v_name)
assert result == expected
# Test that the registry value has been set
expected = {
"hive": "HKLM",
"key": key,
@ -249,14 +253,18 @@ def test_set_value_invalid_reg_dword():
def test_disable_value(reg_pol):
key = "SOFTWARE\\MyKey1"
# Test that the command completed successfully
result = lgpo_reg.disable_value(key=key, v_name="MyValue1")
assert result is True
# Test that the value was actually set in Registry.pol
expected = {
"**del.MyValue1": {"data": " ", "type": "REG_SZ"},
"**del.MyValue2": {"data": " ", "type": "REG_SZ"},
}
key = "SOFTWARE\\MyKey1"
lgpo_reg.disable_value(key=key, v_name="MyValue1")
result = lgpo_reg.get_key(key=key)
assert result == expected
# Test that the registry value has been removed
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue1")
assert result is False
@ -283,16 +291,20 @@ def test_disable_value_invalid_policy_class():
def test_delete_value_existing(reg_pol):
key = "SOFTWARE\\MyKey1"
# Test that the command completes successfully
result = lgpo_reg.delete_value(key=key, v_name="MyValue1")
assert result is True
# Test that the value is actually removed from Registry.pol
expected = {
"**del.MyValue2": {
"data": " ",
"type": "REG_SZ",
},
}
key = "SOFTWARE\\MyKey1"
lgpo_reg.delete_value(key=key, v_name="MyValue1")
result = lgpo_reg.get_key(key=key)
assert result == expected
# Test that the registry entry has been removed
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue2")
assert result is False

View file

@ -262,7 +262,7 @@ def test_pkg_install_verify_ssl_false():
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
mock_cp.assert_called_once_with(
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
"base",
saltenv="base",
verify_ssl=False,
)
assert expected == result

View file

@ -3,6 +3,7 @@ import logging
import pytest
import salt.modules.beacons as beaconmod
import salt.modules.cp as cp
import salt.modules.pkg_resource as pkg_resource
import salt.modules.yumpkg as yumpkg
import salt.states.beacon as beaconstate
@ -15,19 +16,28 @@ log = logging.getLogger(__name__)
@pytest.fixture
def configure_loader_modules():
def configure_loader_modules(minion_opts):
return {
cp: {
"__opts__": minion_opts,
},
pkg: {
"__env__": "base",
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
"__opts__": {"test": False, "cachedir": ""},
"__opts__": minion_opts,
"__instance_id__": "",
"__low__": {},
"__utils__": {"state.gen_tag": state_utils.gen_tag},
},
beaconstate: {"__salt__": {}, "__opts__": {}},
beaconmod: {"__salt__": {}, "__opts__": {}},
beaconstate: {
"__salt__": {},
"__opts__": minion_opts,
},
beaconmod: {
"__salt__": {},
"__opts__": minion_opts,
},
pkg_resource: {
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
@ -35,7 +45,7 @@ def configure_loader_modules():
yumpkg: {
"__salt__": {},
"__grains__": {"osarch": "x86_64", "osmajorrelease": 7},
"__opts__": {},
"__opts__": minion_opts,
},
}
@ -565,6 +575,32 @@ def test_installed_with_changes_test_true(list_pkgs):
assert ret["changes"] == expected
def test_installed_with_sources(list_pkgs, tmp_path):
"""
Test pkg.installed with passing `sources`
"""
list_pkgs = MagicMock(return_value=list_pkgs)
pkg_source = tmp_path / "pkga-package-0.3.0.deb"
with patch.dict(
pkg.__salt__,
{
"cp.cache_file": cp.cache_file,
"pkg.list_pkgs": list_pkgs,
"pkg_resource.pack_sources": pkg_resource.pack_sources,
"lowpkg.bin_pkg_info": MagicMock(),
},
), patch("salt.fileclient.get_file_client", return_value=MagicMock()):
try:
ret = pkg.installed("install-pkgd", sources=[{"pkga": str(pkg_source)}])
assert ret["result"] is False
except TypeError as exc:
if "got multiple values for keyword argument 'saltenv'" in str(exc):
pytest.fail(f"TypeError should have not been raised: {exc}")
raise exc from None
@pytest.mark.parametrize("action", ["removed", "purged"])
def test_removed_purged_with_changes_test_true(list_pkgs, action):
"""

View file

@ -84,8 +84,6 @@ def test_value_present(empty_reg_pol):
expected = {
"changes": {
"new": {
"name": "MyValue",
"key": "SOFTWARE\\MyKey",
"data": 1,
"type": "REG_DWORD",
},
@ -111,14 +109,10 @@ def test_value_present_existing_change(reg_pol):
expected = {
"changes": {
"new": {
"name": "MyValue1",
"key": "SOFTWARE\\MyKey1",
"data": 2,
"type": "REG_DWORD",
},
"old": {
"name": "MyValue1",
"key": "SOFTWARE\\MyKey1",
"data": "squidward",
"type": "REG_SZ",
},
@ -183,14 +177,10 @@ def test_value_present_existing_disabled(reg_pol):
"changes": {
"new": {
"data": 2,
"key": "SOFTWARE\\MyKey1",
"name": "MyValue2",
"type": "REG_DWORD",
},
"old": {
"data": "**del.MyValue2",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue2",
"type": "REG_SZ",
},
},
@ -213,13 +203,11 @@ def test_value_disabled(empty_reg_pol):
"changes": {
"new": {
"data": "**del.MyValue1",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue1",
"type": "REG_SZ",
},
"old": {},
},
"comment": "Registry.pol value enabled",
"comment": "Registry.pol value disabled",
"name": "MyValue1",
"result": True,
}
@ -238,16 +226,12 @@ def test_value_disabled_existing_change(reg_pol):
"changes": {
"new": {
"data": "**del.MyValue1",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue1",
},
"old": {
"data": "squidward",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue1",
},
},
"comment": "Registry.pol value enabled",
"comment": "Registry.pol value disabled",
"name": "MyValue1",
"result": True,
}
@ -299,8 +283,6 @@ def test_value_absent(reg_pol):
"new": {},
"old": {
"data": "squidward",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue1",
"type": "REG_SZ",
},
},
@ -335,8 +317,6 @@ def test_value_absent_disabled(reg_pol):
"new": {},
"old": {
"data": "**del.MyValue2",
"key": "SOFTWARE\\MyKey1",
"name": "MyValue2",
"type": "REG_SZ",
},
},

View file

@ -15,7 +15,7 @@ import salt.utils.json # pylint: disable=unused-import
import salt.utils.stringutils # pylint: disable=unused-import
import salt.utils.yaml # pylint: disable=unused-import
from salt.utils.jinja import SaltCacheLoader
from tests.support.mock import Mock, patch
from tests.support.mock import Mock, call, patch
@pytest.fixture
@ -224,14 +224,45 @@ def test_file_client_kwarg(minion_opts, mock_file_client):
assert loader._file_client is mock_file_client
def test_cache_loader_shutdown(minion_opts, mock_file_client):
def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
"""
The shudown method can be called without raising an exception when the
file_client does not have a destroy method
"""
assert not hasattr(mock_file_client, "destroy")
mock_file_client.opts = minion_opts
loader = SaltCacheLoader(minion_opts, _file_client=mock_file_client)
assert loader._file_client is mock_file_client
# Shutdown method should not raise any exceptions
loader.shutdown()
# Test SaltCacheLoader creating and destroying the file client created
file_client = Mock()
with patch("salt.fileclient.get_file_client", return_value=file_client):
loader = SaltCacheLoader(minion_opts)
assert loader._file_client is None
with loader:
assert loader._file_client is file_client
assert loader._file_client is None
assert file_client.mock_calls == [call.destroy()]
# Test SaltCacheLoader reusing the file client passed
file_client = Mock()
file_client.opts = {"file_roots": minion_opts["file_roots"]}
with patch("salt.fileclient.get_file_client", return_value=Mock()):
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
assert loader._file_client is file_client
with loader:
assert loader._file_client is file_client
assert loader._file_client is file_client
assert file_client.mock_calls == []
# Test SaltCacheLoader creating a client even though a file client was
# passed because the "file_roots" option is different, and, as such,
# the destroy method on the new file client is called, but not on the
# file client passed in.
file_client = Mock()
file_client.opts = {"file_roots": ""}
new_file_client = Mock()
with patch("salt.fileclient.get_file_client", return_value=new_file_client):
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
assert loader._file_client is file_client
with loader:
assert loader._file_client is not file_client
assert loader._file_client is new_file_client
assert loader._file_client is None
assert file_client.mock_calls == []
assert new_file_client.mock_calls == [call.destroy()]

View file

@ -605,3 +605,55 @@ def test_deploy_script_ssh_timeout():
ssh_kwargs = root_cmd.call_args.kwargs
assert "ssh_timeout" in ssh_kwargs
assert ssh_kwargs["ssh_timeout"] == 34
@pytest.mark.parametrize(
"master,expected",
[
(None, None),
("single_master", "single_master"),
(["master1", "master2", "master3"], "master1,master2,master3"),
],
)
def test__format_master_param(master, expected):
result = cloud._format_master_param(master)
assert result == expected
@pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.")
@pytest.mark.parametrize(
"master,expected",
[
(None, None),
("single_master", "single_master"),
(["master1", "master2", "master3"], "master1,master2,master3"),
],
)
def test_deploy_windows_master(master, expected):
"""
Test deploy_windows with master parameter
"""
mock_true = MagicMock(return_value=True)
mock_tuple = MagicMock(return_value=(0, 0, 0))
with patch("salt.utils.smb.get_conn", MagicMock()), patch(
"salt.utils.smb.mkdirs", MagicMock()
), patch("salt.utils.smb.put_file", MagicMock()), patch(
"salt.utils.smb.delete_file", MagicMock()
), patch(
"salt.utils.smb.delete_directory", MagicMock()
), patch(
"time.sleep", MagicMock()
), patch.object(
cloud, "wait_for_port", mock_true
), patch.object(
cloud, "fire_event", MagicMock()
), patch.object(
cloud, "wait_for_psexecsvc", mock_true
), patch.object(
cloud, "run_psexec_command", mock_tuple
) as mock:
cloud.deploy_windows(host="test", win_installer="install.exe", master=master)
expected_cmd = "c:\\salttemp\\install.exe"
expected_args = "/S /master={} /minion-name=None".format(expected)
assert mock.call_args_list[0].args[0] == expected_cmd
assert mock.call_args_list[0].args[1] == expected_args