Finish removal of Vault modules

This commit is contained in:
jeanluc 2024-04-15 02:13:47 +02:00 committed by Daniel Wozniak
parent 32ac3cf679
commit e6e339ebbf
26 changed files with 0 additions and 10153 deletions

View file

@ -1,486 +0,0 @@
"""
:maintainer: SaltStack
:maturity: new
:platform: all
Utilities supporting modules for Hashicorp Vault. Configuration instructions are
documented in the :ref:`execution module docs <vault-setup>`.
"""
import logging
import requests
import salt.cache
import salt.crypt
import salt.exceptions
import salt.utils.data
import salt.utils.dictupdate
import salt.utils.json
import salt.utils.vault.helpers as hlp
import salt.utils.versions
from salt.utils.vault.auth import (
InvalidVaultSecretId,
InvalidVaultToken,
LocalVaultSecretId,
VaultAppRole,
)
from salt.utils.vault.exceptions import (
VaultAuthExpired,
VaultConfigExpired,
VaultException,
VaultInvocationError,
VaultNotFoundError,
VaultPermissionDeniedError,
VaultPreconditionFailedError,
VaultServerError,
VaultUnavailableError,
VaultUnsupportedOperationError,
VaultUnwrapException,
)
from salt.utils.vault.factory import (
clear_cache,
get_authd_client,
get_kv,
get_lease_store,
parse_config,
)
from salt.utils.vault.leases import (
VaultLease,
VaultSecretId,
VaultToken,
VaultWrappedResponse,
)
log = logging.getLogger(__name__)
logging.getLogger("requests").setLevel(logging.WARNING)
def query(
method,
endpoint,
opts,
context,
payload=None,
wrap=False,
raise_error=True,
is_unauthd=False,
**kwargs,
):
"""
Query the Vault API. Supplemental arguments to ``requests.request``
can be passed as kwargs.
method
HTTP verb to use.
endpoint
API path to call (without leading ``/v1/``).
opts
Pass ``__opts__`` from the module.
context
Pass ``__context__`` from the module.
payload
Dictionary of payload values to send, if any.
wrap
Whether to request response wrapping. Should be a time string
like ``30s`` or False (default).
raise_error
Whether to inspect the response code and raise exceptions.
Defaults to True.
is_unauthd
Whether the queried endpoint is an unauthenticated one and hence
does not deduct a token use. Only relevant for endpoints not found
in ``sys``. Defaults to False.
"""
client, config = get_authd_client(opts, context, get_config=True)
try:
return client.request(
method,
endpoint,
payload=payload,
wrap=wrap,
raise_error=raise_error,
is_unauthd=is_unauthd,
**kwargs,
)
except VaultPermissionDeniedError:
if not _check_clear(config, client):
raise
# in case policies have changed
clear_cache(opts, context)
client = get_authd_client(opts, context)
return client.request(
method,
endpoint,
payload=payload,
wrap=wrap,
raise_error=raise_error,
is_unauthd=is_unauthd,
**kwargs,
)
def query_raw(
method,
endpoint,
opts,
context,
payload=None,
wrap=False,
retry=True,
is_unauthd=False,
**kwargs,
):
"""
Query the Vault API, returning the raw response object. Supplemental
arguments to ``requests.request`` can be passed as kwargs.
method
HTTP verb to use.
endpoint
API path to call (without leading ``/v1/``).
opts
Pass ``__opts__`` from the module.
context
Pass ``__context__`` from the module.
payload
Dictionary of payload values to send, if any.
retry
Retry the query with cleared cache in case the permission
was denied (to check for revoked cached credentials).
Defaults to True.
wrap
Whether to request response wrapping. Should be a time string
like ``30s`` or False (default).
is_unauthd
Whether the queried endpoint is an unauthenticated one and hence
does not deduct a token use. Only relevant for endpoints not found
in ``sys``. Defaults to False.
"""
client, config = get_authd_client(opts, context, get_config=True)
res = client.request_raw(
method, endpoint, payload=payload, wrap=wrap, is_unauthd=is_unauthd, **kwargs
)
if not retry:
return res
if res.status_code == 403:
if not _check_clear(config, client):
return res
# in case policies have changed
clear_cache(opts, context)
client = get_authd_client(opts, context)
res = client.request_raw(
method,
endpoint,
payload=payload,
wrap=wrap,
is_unauthd=is_unauthd,
**kwargs,
)
return res
def is_v2(path, opts=None, context=None):
"""
Determines if a given secret path is kv version 1 or 2.
"""
if opts is None or context is None:
opts = globals().get("__opts__", {}) if opts is None else opts
context = globals().get("__context__", {}) if context is None else context
salt.utils.versions.warn_until(
3008,
"The __utils__ loader functionality will be removed. This will "
"cause context/opts dunders to be unavailable in utility modules. "
"Please pass opts and context from importing Salt modules explicitly.",
)
kv = get_kv(opts, context)
return kv.is_v2(path)
def read_kv(path, opts, context, include_metadata=False):
"""
Read secret at <path>.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.read(path, include_metadata=include_metadata)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.read(path, include_metadata=include_metadata)
def write_kv(path, data, opts, context):
"""
Write secret <data> to <path>.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.write(path, data)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.write(path, data)
def patch_kv(path, data, opts, context):
"""
Patch secret <data> at <path>.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.patch(path, data)
except VaultAuthExpired:
# patching can consume several token uses when
# 1) `patch` cap unvailable 2) KV v1 3) KV v2 w/ old Vault versions
kv = get_kv(opts, context)
return kv.patch(path, data)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.patch(path, data)
def delete_kv(path, opts, context, versions=None):
"""
Delete secret at <path>. For KV v2, versions can be specified,
which will be soft-deleted.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.delete(path, versions=versions)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.delete(path, versions=versions)
def destroy_kv(path, versions, opts, context):
"""
Destroy secret <versions> at <path>. Requires KV v2.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.destroy(path, versions)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.destroy(path, versions)
def list_kv(path, opts, context):
"""
List secrets at <path>.
"""
kv, config = get_kv(opts, context, get_config=True)
try:
return kv.list(path)
except VaultPermissionDeniedError:
if not _check_clear(config, kv.client):
raise
# in case policies have changed
clear_cache(opts, context)
kv = get_kv(opts, context)
return kv.list(path)
def _check_clear(config, client):
"""
Called when encountering a VaultPermissionDeniedError.
Decides whether caches should be cleared to retry with
possibly updated token policies.
"""
if config["cache"]["clear_on_unauthorized"]:
return True
try:
# verify the current token is still valid
return not client.token_valid(remote=True)
except VaultAuthExpired:
return True
####################################################################################
# The following functions were available in previous versions and are deprecated
# TODO: remove deprecated functions after v3008 (Argon)
####################################################################################
def get_vault_connection():
"""
Get the connection details for calling Vault, from local configuration if
it exists, or from the master otherwise
"""
salt.utils.versions.warn_until(
3008,
"salt.utils.vault.get_vault_connection is deprecated, "
"please use salt.utils.vault.get_authd_client.",
)
opts = globals().get("__opts__", {})
context = globals().get("__context__", {})
try:
vault = get_authd_client(opts, context)
except salt.exceptions.InvalidConfigError as err:
# This exception class was raised previously
raise salt.exceptions.CommandExecutionError(err) from err
token = vault.auth.get_token()
server_config = vault.get_config()
ret = {
"url": server_config["url"],
"namespace": server_config["namespace"],
"token": str(token),
"verify": server_config["verify"],
"issued": token.creation_time,
}
if hlp._get_salt_run_type(opts) in [
hlp.SALT_RUNTYPE_MASTER_IMPERSONATING,
hlp.SALT_RUNTYPE_MASTER_PEER_RUN,
hlp.SALT_RUNTYPE_MINION_REMOTE,
]:
ret["lease_duration"] = token.explicit_max_ttl
ret["uses"] = token.num_uses
else:
ret["ttl"] = token.explicit_max_ttl
return ret
def del_cache():
"""
Delete cache file
"""
salt.utils.versions.warn_until(
3008,
"salt.utils.vault.del_cache is deprecated, please use salt.utils.vault.clear_cache.",
)
clear_cache(
globals().get("__opts__", {}),
globals().get("__context__", {}),
connection=False,
)
def write_cache(connection): # pylint: disable=unused-argument
"""
Write the vault token to cache
"""
salt.utils.versions.warn_until(
3008,
"salt.utils.vault.write_cache is deprecated without replacement.",
)
# always return false since cache is managed internally
return False
def get_cache():
"""
Return connection information from vault cache file
"""
salt.utils.versions.warn_until(
3008,
"salt.utils.vault.get_cache is deprecated, please use salt.utils.vault.get_authd_client.",
)
return get_vault_connection()
def make_request(
method,
resource,
token=None,
vault_url=None,
namespace=None,
get_token_url=False,
retry=False,
**args,
):
"""
Make a request to Vault
"""
salt.utils.versions.warn_until(
3008,
"salt.utils.vault.make_request is deprecated, please use "
"salt.utils.vault.query or salt.utils.vault.query_raw."
"To override token/url/namespace, please make use of the "
"provided classes directly.",
)
def _get_client(token, vault_url, namespace, args):
vault = get_authd_client(opts, context)
if token is not None:
vault.session = requests.Session()
vault.auth.cache = None
vault.auth.token = VaultToken(
client_token=token, renewable=False, lease_duration=60, num_uses=1
)
if vault_url is not None:
vault.session = requests.Session()
vault.url = vault_url
if namespace is not None:
vault.namespace = namespace
if "verify" in args:
vault.verify = args.pop("verify")
return vault
opts = globals().get("__opts__", {})
context = globals().get("__context__", {})
endpoint = resource.lstrip("/").lstrip("v1/")
payload = args.pop("json", None)
if "data" in args:
payload = salt.utils.json.loads(args.pop("data"))
vault = _get_client(token, vault_url, namespace, args)
res = vault.request_raw(method, endpoint, payload=payload, wrap=False, **args)
if res.status_code == 403 and not retry:
# retry was used to indicate to only try once more
clear_cache(opts, context)
vault = _get_client(token, vault_url, namespace, args)
res = vault.request_raw(method, endpoint, payload=payload, wrap=False, **args)
if get_token_url:
return res, str(vault.auth.token), vault.get_config()["url"]
return res

View file

@ -1,467 +0,0 @@
import salt.utils.json
import salt.utils.vault.leases as vleases
from salt.utils.vault.exceptions import VaultInvocationError, VaultNotFoundError
class AppRoleApi:
"""
Wraps the Vault AppRole API.
.. note::
All durations can be specified either as an integer time in seconds
or a time string like ``1h``.
https://developer.hashicorp.com/vault/api-docs/auth/approle
"""
def __init__(self, client):
self.client = client
def list_approles(self, mount="approle"):
"""
List all AppRoles present on the specified mount.
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role"
return self.client.list(endpoint)["data"]["keys"]
def read_approle(self, name, mount="approle"):
"""
Read the properties of an existing AppRole.
Raises VaultNotFound if the AppRole does not exist on the mount.
name
Name of the AppRole to read the properties of.
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role/{name}"
return self.client.get(endpoint)["data"]
def write_approle(
self,
name,
bind_secret_id=None,
secret_id_bound_cidrs=None,
secret_id_num_uses=None,
secret_id_ttl=None,
local_secret_ids=None,
token_ttl=None,
token_max_ttl=None,
token_policies=None,
token_bound_cidrs=None,
token_explicit_max_ttl=None,
token_no_default_policy=None,
token_num_uses=None,
token_period=None,
token_type=None,
mount="approle",
):
"""
Create or update an AppRole.
name
Name of the AppRole to read the properties of.
bind_secret_id
Require a SecretID when authenticating with this AppRole.
Defaults to true.
secret_id_bound_cidrs
List of blocks of IP addresses in CIDR notation that
can perform the login operation.
secret_id_num_uses
Number of times a generated SecretID can be used to authenticate
with this AppRole by default. ``0`` means unlimited.
secret_id_ttl
Duration after which a generated SecretID for this AppRole expires by default.
local_secret_ids
If set, the secret IDs generated using this role will be cluster-local.
This can only be set during role creation and once set, it can't be reset later.
Defaults to false.
token_ttl
The incremental lifetime for tokens generated by authenticating with this AppRole.
This value will be referenced at renewal time.
token_max_ttl
The maximum lifetime for tokens generated by authenticating with this AppRole.
This value will be referenced at renewal time.
token_policies
List of token policies to encode onto generated tokens.
This list may be supplemented by user/group/other values.
token_bound_cidrs
List of blocks of IP addresses in CIDR notation that
can perform the login operation. The resulting token will be tied
to these blocks as well.
token_explicit_max_ttl
Place a hard cap on the maximum lifetime of tokens issued by authenticating
with this AppRole.
token_no_default_policy
Do not add the ``default`` policy to tokens generated by authenticating
with this AppRole. Defaults to false.
token_num_uses
Number of times a token generated by authenticating with this AppRole
may be used to issue requests. ``0`` means unlimited.
token_period
The maximum allowed period value when a periodic token is requested from this role.
token_type
The type of token that should be generated (``service``, ``batch`` or ``default``).
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role/{name}"
payload = _filter_none(
{
"bind_secret_id": bind_secret_id,
"secret_id_bound_cidrs": secret_id_bound_cidrs,
"secret_id_num_uses": secret_id_num_uses,
"secret_id_ttl": secret_id_ttl,
"local_secret_ids": local_secret_ids,
"token_ttl": token_ttl,
"token_max_ttl": token_max_ttl,
"token_policies": token_policies,
"token_bound_cidrs": token_bound_cidrs,
"token_explicit_max_ttl": token_explicit_max_ttl,
"token_no_default_policy": token_no_default_policy,
"token_num_uses": token_num_uses,
"token_period": token_period,
"token_type": token_type,
}
)
return self.client.post(endpoint, payload=payload)
def delete_approle(self, name, mount="approle"):
"""
Delete an existing AppRole.
Raises VaultNotFound if the AppRole does not exist on the mount.
name
Name of the AppRole to delete.
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role/{name}"
return self.client.delete(endpoint)
def read_role_id(self, name, wrap=False, mount="approle"):
"""
Read the associated RoleID of an existing AppRole.
Raises VaultNotFound if the AppRole does not exist on the mount.
name
Name of the AppRole.
wrap
If set, specifies the duration the resulting wrapping token should
be valid for. This token can be used once to access the
query result. Defaults to false (=> returns the RoleID as a string).
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role/{name}/role-id"
role_id = self.client.get(endpoint, wrap=wrap)
if wrap:
return role_id
return role_id["data"]["role_id"]
def generate_secret_id(
self,
name,
metadata=None,
cidr_list=None,
token_bound_cidrs=None,
num_uses=None,
ttl=None,
wrap=False,
mount="approle",
):
"""
Generate a SecretID for an existing AppRole.
Raises VaultNotFound if the AppRole does not exist on the mount.
name
Name of the AppRole.
metadata
Mapping of string keys to string values that specifies metadata
to be set on the token generated by authenticating with this
specific SecretID. It will be logged to audit logs in plaintext.
cidr_list
List of blocks of IP addresses in CIDR notation that
can perform the login operation with this specific SecretID.
If ``secret_id_bound_cidrs`` is set on the AppRole, this list
must be a subset of the ones specified there.
token_bound_cidrs
List of blocks of IP addresses in CIDR notation that
can perform the login operation. The resulting token will be tied
to these blocks as well.
If ``token_bound_cidrs`` is set on the AppRole, this list
must be a subset of the ones specified there.
num_uses
Number of times this specific SecretID can be used to authenticate
by default. ``0`` means unlimited.
Must be equal to or lower than ``secret_id_num_uses`` set on the AppRole.
ttl
Duration after which this SecretID should expire.
Must be equal to or lower than ``secret_id_ttl`` set on the AppRole.
wrap
If set, specifies the duration the resulting wrapping token should
be valid for. This token can be used once to access the
query result. Defaults to false (=> returns the SecretID as a string).
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
endpoint = f"auth/{mount}/role/{name}/secret-id"
if metadata is not None:
metadata = salt.utils.json.dumps(metadata)
payload = _filter_none(
{
"metadata": metadata,
"cidr_list": cidr_list,
"token_bound_cidrs": token_bound_cidrs,
"num_uses": num_uses,
"ttl": ttl,
}
)
response = self.client.post(endpoint, payload=payload, wrap=wrap)
if wrap:
return response
# Sadly, secret_id_num_uses is not part of the information returned, but
# it can be read with `read_secret_id` using the accessor.
return vleases.VaultSecretId(**response["data"])
def read_secret_id(self, name, secret_id=None, accessor=None, mount="approle"):
"""
Read properties of an existing SecretID.
Raises VaultNotFound if the AppRole and/or SecretID does not exist on the mount.
name
Name of the AppRole the SecretID belongs to.
secret_id
The SecretID to look up. Specify either this or ``accessor``.
accessor
The accessor of the SecretID to look up. Specify either this
or ``secret_id``.
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
if not secret_id and not accessor:
raise VaultInvocationError(
"Need either secret_id or accessor to read secret ID."
)
if secret_id:
endpoint = f"auth/{mount}/role/{name}/secret-id/lookup"
payload = {"secret_id": str(secret_id)}
else:
endpoint = f"auth/{mount}/role/{name}/secret-id-accessor/lookup"
payload = {"secret_id_accessor": accessor}
try:
return self.client.post(endpoint, payload=payload)["data"]
except TypeError:
# lookup does not raise exceptions, only returns True
raise VaultNotFoundError()
def destroy_secret_id(self, name, secret_id=None, accessor=None, mount="approle"):
"""
Destroy an existing SecretID.
Raises VaultNotFound if the AppRole and/or SecretID does not exist on the mount.
name
Name of the AppRole the SecretID belongs to.
secret_id
The SecretID to destroy. Specify either this or ``accessor``.
accessor
The accessor of the SecretID to destroy. Specify either this
or ``secret_id``.
mount
Name of the AppRole auth backend mount.
Defaults to ``approle``.
"""
if not secret_id and not accessor:
raise VaultInvocationError(
"Need either secret_id or accessor to destroy secret ID."
)
if secret_id:
endpoint = f"auth/{mount}/role/{name}/secret-id/destroy"
payload = {"secret_id": str(secret_id)}
else:
endpoint = f"auth/{mount}/role/{name}/secret-id-accessor/destroy"
payload = {"secret_id_accessor": accessor}
return self.client.post(endpoint, payload=payload)
class IdentityApi:
"""
Wraps the Vault ``Identity`` secret engine API.
https://developer.hashicorp.com/vault/api-docs/secret/identity
"""
def __init__(self, client):
self.client = client
def list_entities(self):
"""
Return a list of the names of all entities known by Vault.
"""
endpoint = "identity/entity/name"
return self.client.list(endpoint)["data"]["keys"]
def read_entity(self, name):
"""
Read the properties of an entity by its name.
Raises VaultNotFound if the entity does not exist.
name
Name of the entity to read the properties of.
"""
endpoint = f"identity/entity/name/{name}"
return self.client.get(endpoint)["data"]
def read_entity_by_alias(self, alias, mount):
"""
Lookup the properties of an entity by its alias name and mount.
Raises VaultNotFound if the entity does not exist.
alias
The name of the entity's alias on the specified
``mount``. For AppRole backends, this is the RoleID.
mount
The name of the mount the given alias is associated with.
For example, if the backend is mounted at ``auth/approle``,
this should be ``approle``.
"""
endpoint = "identity/lookup/entity"
payload = {
"alias_name": alias,
"alias_mount_accessor": self._lookup_mount_accessor(mount),
}
entity = self.client.post(endpoint, payload=payload)
if isinstance(entity, dict):
return entity["data"]
raise VaultNotFoundError()
def write_entity(self, name, metadata=None, policies=None, disabled=None):
"""
Create or update an entity by name.
name
The name of the entity.
metadata
Mapping of string keys to string values that specifies metadata
to be set on the entity. This can be used to template policies.
policies
List of policies to be tied to the entity. These policies will
be active in addition to auth method-specific policies.
disabled
Whether this entity should be disabled. Disabled entities' associated
tokens cannot be used, but are not revoked. Defaults to false.
"""
endpoint = f"identity/entity/name/{name}"
payload = _filter_none(
{
"metadata": metadata,
"policies": policies,
"disabled": disabled,
}
)
return self.client.post(endpoint, payload=payload)
def delete_entity(self, name):
"""
Delete an entity by name.
Raises VaultNotFound if the entity does not exist.
name
The name of the entity.
"""
endpoint = f"identity/entity/name/{name}"
return self.client.delete(endpoint)
def write_entity_alias(self, name, alias_name, mount, custom_metadata=None):
"""
Create/update the association between an entity and a specific
alias of an auth mount.
name
Name of the entity to associate with the alias.
alias_name
Name of the alias to associate with the entity.
The specifics are dependent on the type of the auth backend.
For AppRoles, this is the RoleID.
mount
The name of the mount the given alias is associated with.
For example, if the backend is mounted at ``auth/approle``,
this should be ``approle``.
custom_metadata
A map of arbitrary string to string valued user-provided
metadata meant to describe the alias.
"""
entity = self.read_entity(name)
mount_accessor = self._lookup_mount_accessor(mount)
payload = {
"canonical_id": entity["id"],
"mount_accessor": mount_accessor,
"name": alias_name,
}
if custom_metadata is not None:
payload["custom_metadata"] = custom_metadata
for alias in entity["aliases"]:
# Ensure an existing alias is updated
if alias["mount_accessor"] == mount_accessor:
payload["id"] = alias["id"]
break
return self.client.post("identity/entity-alias", payload=payload)
def _lookup_mount_accessor(self, mount):
endpoint = f"sys/auth/{mount}"
return self.client.get(endpoint)["data"]["accessor"]
def _filter_none(data):
return {k: v for k, v in data.items() if v is not None}

View file

@ -1,241 +0,0 @@
import logging
import salt.utils.vault.leases as leases
from salt.utils.vault.exceptions import VaultAuthExpired
log = logging.getLogger(__name__)
class VaultTokenAuth:
"""
Container for authentication tokens
"""
def __init__(self, cache=None, token=None):
self.cache = cache
if token is None and cache is not None:
token = cache.get()
if token is None:
token = InvalidVaultToken()
if isinstance(token, dict):
token = leases.VaultToken(**token)
self.token = token
def is_renewable(self):
"""
Check whether the contained token is renewable, which requires it
to be currently valid for at least two uses and renewable
"""
return self.token.is_renewable()
def is_valid(self, valid_for=0):
"""
Check whether the contained token is valid
"""
return self.token.is_valid(valid_for)
def get_token(self):
"""
Get the contained token if it is valid, otherwise
raises VaultAuthExpired
"""
if self.token.is_valid():
return self.token
raise VaultAuthExpired()
def used(self):
"""
Increment the use counter for the contained token
"""
self.token.used()
if self.token.num_uses != 0:
self._write_cache()
def update_token(self, auth):
"""
Partially update the contained token (e.g. after renewal)
"""
self.token = self.token.with_renewed(**auth)
self._write_cache()
def replace_token(self, token):
"""
Completely replace the contained token with a new one
"""
self.token = token
self._write_cache()
def _write_cache(self):
if self.cache is not None:
# Write the token indiscriminately since flushing
# raises VaultAuthExpired.
# This will be handled as part of the next request.
self.cache.store(self.token)
class VaultAppRoleAuth:
"""
Issues tokens from AppRole credentials.
"""
def __init__(self, approle, client, mount="approle", cache=None, token_store=None):
self.approle = approle
self.client = client
self.mount = mount
self.cache = cache
if token_store is None:
token_store = VaultTokenAuth()
self.token = token_store
def is_renewable(self):
"""
Check whether the currently used token is renewable.
Secret IDs are not renewable anyways.
"""
return self.token.is_renewable()
def is_valid(self, valid_for=0):
"""
Check whether the contained authentication data can be used
to issue a valid token
"""
return self.token.is_valid(valid_for) or self.approle.is_valid(valid_for)
def get_token(self):
"""
Return the token issued by the last login, if it is still valid, otherwise
login with the contained AppRole, if it is valid. Otherwise,
raises VaultAuthExpired
"""
if self.token.is_valid():
return self.token.get_token()
if self.approle.is_valid():
return self._login()
raise VaultAuthExpired()
def used(self):
"""
Increment the use counter for the currently used token
"""
self.token.used()
def update_token(self, auth):
"""
Partially update the contained token (e.g. after renewal)
"""
self.token.update_token(auth)
def _login(self):
log.debug("Vault token expired. Recreating one by authenticating with AppRole.")
endpoint = f"auth/{self.mount}/login"
payload = self.approle.payload()
res = self.client.post(endpoint, payload=payload)
self.approle.used()
self._replace_token(res["auth"])
self._write_cache()
return self.token.get_token()
def _write_cache(self):
if self.cache is not None and self.approle.secret_id is not None:
if isinstance(self.approle.secret_id, LocalVaultSecretId):
pass
elif self.approle.secret_id.num_uses == 0:
pass
elif self.approle.secret_id.is_valid():
self.cache.store(self.approle.secret_id)
else:
self.cache.flush()
def _replace_token(self, auth):
self.token.replace_token(leases.VaultToken(**auth))
class VaultAppRole:
"""
Container that represents an AppRole
"""
def __init__(self, role_id, secret_id=None):
self.role_id = role_id
self.secret_id = secret_id
def replace_secret_id(self, secret_id):
"""
Replace the contained secret ID with a new one
"""
self.secret_id = secret_id
def is_valid(self, valid_for=0, uses=1):
"""
Checks whether the contained data can be used to authenticate
to Vault. Secret IDs might not be required by the server when
bind_secret_id is set to false.
valid_for
Allows to check whether the AppRole will still be valid in the future.
This can be an integer, which will be interpreted as seconds, or a
time string using the same format as Vault does:
Suffix ``s`` for seconds, ``m`` for minutes, ``h`` for hours, ``d`` for days.
Defaults to 0.
uses
Check whether the AppRole has at least this number of uses left. Defaults to 1.
"""
if self.secret_id is None:
return True
return self.secret_id.is_valid(valid_for=valid_for, uses=uses)
def used(self):
"""
Increment the secret ID use counter by one, if this AppRole uses one.
"""
if self.secret_id is not None:
self.secret_id.used()
def payload(self):
"""
Return the payload to use for POST requests using this AppRole
"""
payload = {}
if self.secret_id is not None:
payload = self.secret_id.payload()
payload["role_id"] = self.role_id
return payload
class LocalVaultSecretId(leases.VaultSecretId):
"""
Represents a secret ID from local configuration and should not be cached.
"""
def is_valid(self, valid_for=0, uses=1):
"""
Local secret IDs are always assumed to be valid until proven otherwise
"""
return True
class InvalidVaultToken(leases.VaultToken):
"""
Represents a missing token
"""
def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called
self.renewable = False
self.use_count = 0
self.num_uses = 0
def is_valid(self, valid_for=0, uses=1):
return False
class InvalidVaultSecretId(leases.VaultSecretId):
"""
Represents a missing secret ID
"""
def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called
pass
def is_valid(self, valid_for=0, uses=1):
return False

View file

@ -1,431 +0,0 @@
import copy
import logging
import time
import salt.cache
import salt.utils.vault.helpers as hlp
import salt.utils.vault.leases as leases
from salt.utils.vault.exceptions import VaultConfigExpired, VaultLeaseExpired
log = logging.getLogger(__name__)
def _get_config_cache(opts, context, cbank, ckey="config"):
"""
Factory for VaultConfigCache to get around some
chicken-and-egg problems
"""
config = None
if cbank in context and ckey in context[cbank]:
config = context[cbank][ckey]
else:
cache = salt.cache.factory(opts)
if cache.contains(cbank, ckey):
# expiration check is done inside the class
config = cache.fetch(cbank, ckey)
elif opts.get("cache", "localfs") != "localfs":
local_opts = copy.copy(opts)
local_opts["cache"] = "localfs"
cache = salt.cache.factory(local_opts)
if cache.contains(cbank, ckey):
# expiration check is done inside the class
config = cache.fetch(cbank, ckey)
return VaultConfigCache(
context,
cbank,
ckey,
opts,
init_config=config,
flush_exception=VaultConfigExpired,
)
def _get_cache_backend(config, opts):
if config["cache"]["backend"] == "session":
return None
if config["cache"]["backend"] in ["localfs", "disk", "file"]:
# cache.Cache does not allow setting the type of cache by param
local_opts = copy.copy(opts)
local_opts["cache"] = "localfs"
return salt.cache.factory(local_opts)
# this should usually resolve to localfs as well on minions,
# but can be overridden by setting cache in the minion config
return salt.cache.factory(opts)
def _get_cache_bank(opts, force_local=False, connection=True, session=False):
minion_id = None
# force_local is necessary because pillar compilation would otherwise
# leak tokens between master and minions
if not force_local and hlp._get_salt_run_type(opts) in [
hlp.SALT_RUNTYPE_MASTER_IMPERSONATING,
hlp.SALT_RUNTYPE_MASTER_PEER_RUN,
]:
minion_id = opts["grains"]["id"]
prefix = "vault" if minion_id is None else f"minions/{minion_id}/vault"
if session:
return prefix + "/connection/session"
if connection:
return prefix + "/connection"
return prefix
class CommonCache:
"""
Base class that unifies context and other cache backends.
"""
def __init__(
self, context, cbank, cache_backend=None, ttl=None, flush_exception=None
):
self.context = context
self.cbank = cbank
self.cache = cache_backend
self.ttl = ttl
self.flush_exception = flush_exception
def _ckey_exists(self, ckey, flush=True):
if self.cbank in self.context and ckey in self.context[self.cbank]:
return True
if self.cache is not None:
if not self.cache.contains(self.cbank, ckey):
return False
if self.ttl is not None:
updated = self.cache.updated(self.cbank, ckey)
if int(time.time()) - updated >= self.ttl:
if flush:
log.debug(
"Cached data in %s/%s outdated, flushing.", self.cbank, ckey
)
self.flush()
return False
return True
return False
def _get_ckey(self, ckey, flush=True):
if not self._ckey_exists(ckey, flush=flush):
return None
if self.cbank in self.context and ckey in self.context[self.cbank]:
return self.context[self.cbank][ckey]
if self.cache is not None:
return (
self.cache.fetch(self.cbank, ckey) or None
) # account for race conditions
raise RuntimeError("This code path should not have been hit.")
def _store_ckey(self, ckey, value):
if self.cache is not None:
self.cache.store(self.cbank, ckey, value)
if self.cbank not in self.context:
self.context[self.cbank] = {}
self.context[self.cbank][ckey] = value
def _flush(self, ckey=None):
if not ckey and self.flush_exception is not None:
# Flushing caches in Vault often requires an orchestrated effort
# to ensure leases/sessions are terminated instead of left open.
raise self.flush_exception()
if self.cache is not None:
self.cache.flush(self.cbank, ckey)
if self.cbank in self.context:
if ckey is None:
self.context.pop(self.cbank)
else:
self.context[self.cbank].pop(ckey, None)
# also remove sub-banks from context to mimic cache behavior
if ckey is None:
for bank in list(self.context):
if bank.startswith(self.cbank):
self.context.pop(bank)
def _list(self):
ckeys = []
if self.cbank in self.context:
ckeys += list(self.context[self.cbank])
if self.cache is not None:
ckeys += self.cache.list(self.cbank)
return set(ckeys)
class VaultCache(CommonCache):
"""
Encapsulates session and other cache backends for a single domain
like secret path metadata. Uses a single cache key.
"""
def __init__(
self, context, cbank, ckey, cache_backend=None, ttl=None, flush_exception=None
):
super().__init__(
context,
cbank,
cache_backend=cache_backend,
ttl=ttl,
flush_exception=flush_exception,
)
self.ckey = ckey
def exists(self, flush=True):
"""
Check whether data for this domain exists
"""
return self._ckey_exists(self.ckey, flush=flush)
def get(self, flush=True):
"""
Return the cached data for this domain or None
"""
return self._get_ckey(self.ckey, flush=flush)
def flush(self, cbank=False):
"""
Flush the cache for this domain
"""
return self._flush(self.ckey if not cbank else None)
def store(self, value):
"""
Store data for this domain
"""
return self._store_ckey(self.ckey, value)
class VaultConfigCache(VaultCache):
"""
Handles caching of received configuration
"""
def __init__(
self,
context,
cbank,
ckey,
opts,
cache_backend_factory=_get_cache_backend,
init_config=None,
flush_exception=None,
): # pylint: disable=super-init-not-called
self.context = context
self.cbank = cbank
self.ckey = ckey
self.opts = opts
self.config = None
self.cache = None
self.ttl = None
self.cache_backend_factory = cache_backend_factory
self.flush_exception = flush_exception
if init_config is not None:
self._load(init_config)
def exists(self, flush=True):
"""
Check if a configuration has been loaded and cached
"""
if self.config is None:
return False
return super().exists(flush=flush)
def get(self, flush=True):
"""
Return the current cached configuration
"""
if self.config is None:
return None
return super().get(flush=flush)
def flush(self, cbank=True):
"""
Flush all connection-scoped data
"""
if self.config is None:
log.warning(
"Tried to flush uninitialized configuration cache. Skipping flush."
)
return
# flush the whole connection-scoped cache by default
super().flush(cbank=cbank)
self.config = None
self.cache = None
self.ttl = None
def _load(self, config):
if self.config is not None:
if (
self.config["cache"]["backend"] != "session"
and self.config["cache"]["backend"] != config["cache"]["backend"]
):
self.flush()
self.config = config
self.cache = self.cache_backend_factory(self.config, self.opts)
self.ttl = self.config["cache"]["config"]
def store(self, value):
"""
Reload cache configuration, then store the new Vault configuration,
overwriting the existing one.
"""
self._load(value)
super().store(value)
class LeaseCacheMixin:
"""
Mixin for auth and lease cache that checks validity
and acts with hydrated objects
"""
def __init__(self, *args, **kwargs):
self.lease_cls = kwargs.pop("lease_cls", leases.VaultLease)
self.expire_events = kwargs.pop("expire_events", None)
super().__init__(*args, **kwargs)
def _check_validity(self, lease_data, valid_for=0):
lease = self.lease_cls(**lease_data)
try:
# is_valid on auth classes accounts for duration and uses
if lease.is_valid(valid_for):
log.debug("Using cached lease.")
return lease
except AttributeError:
if lease.is_valid_for(valid_for):
log.debug("Using cached lease.")
return lease
if self.expire_events is not None:
raise VaultLeaseExpired()
return None
class VaultLeaseCache(LeaseCacheMixin, CommonCache):
"""
Handles caching of Vault leases. Supports multiple cache keys.
Checks whether cached leases are still valid before returning.
"""
def get(self, ckey, valid_for=0, flush=True):
"""
Returns valid cached lease data or None.
Flushes cache if invalid by default.
"""
data = self._get_ckey(ckey, flush=flush)
if data is None:
return data
try:
ret = self._check_validity(data, valid_for=valid_for)
except VaultLeaseExpired:
if self.expire_events is not None:
self.expire_events(
tag=f"vault/lease/{ckey}/expire",
data={
"valid_for_less": (
valid_for
if valid_for is not None
else data.get("min_ttl") or 0
),
},
)
ret = None
if ret is None and flush:
log.debug("Cached lease not valid anymore. Flushing cache.")
self._flush(ckey)
return ret
def store(self, ckey, value):
"""
Store a lease in cache
"""
try:
value = value.to_dict()
except AttributeError:
pass
return self._store_ckey(ckey, value)
def exists(self, ckey, flush=True):
"""
Check whether a named lease exists in cache. Does not filter invalid ones,
so fetching a reported one might still return None.
"""
return self._ckey_exists(ckey, flush=flush)
def flush(self, ckey=None):
"""
Flush the lease cache or a single lease from the lease cache
"""
return self._flush(ckey)
def list(self):
"""
List all cached leases. Does not filter invalid ones,
so fetching a reported one might still return None.
"""
return self._list()
class VaultAuthCache(LeaseCacheMixin, CommonCache):
"""
Implements authentication secret-specific caches. Checks whether
the cached secrets are still valid before returning.
"""
def __init__(
self,
context,
cbank,
ckey,
auth_cls,
cache_backend=None,
ttl=None,
flush_exception=None,
):
super().__init__(
context,
cbank,
lease_cls=auth_cls,
cache_backend=cache_backend,
ttl=ttl,
flush_exception=flush_exception,
)
self.ckey = ckey
self.flush_exception = flush_exception
def exists(self, flush=True):
"""
Check whether data for this domain exists
"""
return self._ckey_exists(self.ckey, flush=flush)
def get(self, valid_for=0, flush=True):
"""
Returns valid cached auth data or None.
Flushes cache if invalid by default.
"""
data = self._get_ckey(self.ckey, flush=flush)
if data is None:
return data
ret = self._check_validity(data, valid_for=valid_for)
if ret is None and flush:
log.debug("Cached auth data not valid anymore. Flushing cache.")
self.flush()
return ret
def store(self, value):
"""
Store an auth credential in cache. Will overwrite possibly existing one.
"""
try:
value = value.to_dict()
except AttributeError:
pass
return self._store_ckey(self.ckey, value)
def flush(self, cbank=None):
"""
Flush the cached auth credentials. If this is a token cache,
flushing it will delete the whole session-scoped cache bank.
"""
if self.lease_cls is leases.VaultToken:
# flush the whole cbank (session-scope) if this is a token cache
ckey = None
else:
ckey = None if cbank else self.ckey
return self._flush(ckey)

View file

@ -1,529 +0,0 @@
import logging
import re
import requests
from requests.packages.urllib3.util.ssl_ import create_urllib3_context
import salt.exceptions
import salt.utils.vault.leases as leases
from salt.utils.vault.exceptions import (
VaultAuthExpired,
VaultInvocationError,
VaultNotFoundError,
VaultPermissionDeniedError,
VaultPreconditionFailedError,
VaultServerError,
VaultUnavailableError,
VaultUnsupportedOperationError,
VaultUnwrapException,
)
log = logging.getLogger(__name__)
logging.getLogger("requests").setLevel(logging.WARNING)
# This list is not complete at all, but contains
# the most important paths.
VAULT_UNAUTHD_PATHS = (
"sys/wrapping/lookup",
"sys/internal/ui/mounts",
"sys/internal/ui/namespaces",
"sys/seal-status",
"sys/health",
)
def _get_expected_creation_path(secret_type, config=None):
if secret_type == "token":
return r"auth/token/create(/[^/]+)?"
if secret_type == "secret_id":
if config is not None:
return r"auth/{}/role/{}/secret\-id".format(
re.escape(config["auth"]["approle_mount"]),
re.escape(config["auth"]["approle_name"]),
)
return r"auth/[^/]+/role/[^/]+/secret\-id"
if secret_type == "role_id":
if config is not None:
return r"auth/{}/role/{}/role\-id".format(
re.escape(config["auth"]["approle_mount"]),
re.escape(config["auth"]["approle_name"]),
)
return r"auth/[^/]+/role/[^/]+/role\-id"
raise salt.exceptions.SaltInvocationError(
f"secret_type must be one of token, secret_id, role_id, got `{secret_type}`."
)
class VaultClient:
"""
Unauthenticated client for the Vault API.
Base class for authenticated client.
"""
def __init__(self, url, namespace=None, verify=None, session=None):
self.url = url
self.namespace = namespace
self.verify = verify
ca_cert = None
try:
if verify.startswith("-----BEGIN CERTIFICATE"):
ca_cert = verify
verify = None
except AttributeError:
pass
# Keep the actual requests parameter separate from the client config
# to reduce complexity in config validation.
self._requests_verify = verify
if session is None:
session = requests.Session()
if ca_cert:
adapter = CACertHTTPSAdapter(ca_cert)
session.mount(url, adapter)
self.session = session
def delete(self, endpoint, wrap=False, raise_error=True, add_headers=None):
"""
Wrapper for client.request("DELETE", ...)
"""
return self.request(
"DELETE",
endpoint,
wrap=wrap,
raise_error=raise_error,
add_headers=add_headers,
)
def get(self, endpoint, wrap=False, raise_error=True, add_headers=None):
"""
Wrapper for client.request("GET", ...)
"""
return self.request(
"GET", endpoint, wrap=wrap, raise_error=raise_error, add_headers=add_headers
)
def list(self, endpoint, wrap=False, raise_error=True, add_headers=None):
"""
Wrapper for client.request("LIST", ...)
TODO: configuration to enable GET requests with query parameters for LIST?
"""
return self.request(
"LIST",
endpoint,
wrap=wrap,
raise_error=raise_error,
add_headers=add_headers,
)
def post(
self, endpoint, payload=None, wrap=False, raise_error=True, add_headers=None
):
"""
Wrapper for client.request("POST", ...)
Vault considers POST and PUT to be synonymous.
"""
return self.request(
"POST",
endpoint,
payload=payload,
wrap=wrap,
raise_error=raise_error,
add_headers=add_headers,
)
def patch(self, endpoint, payload, wrap=False, raise_error=True, add_headers=None):
"""
Wrapper for client.request("PATCH", ...)
"""
return self.request(
"PATCH",
endpoint,
payload=payload,
wrap=wrap,
raise_error=raise_error,
add_headers=add_headers,
)
def request(
self,
method,
endpoint,
payload=None,
wrap=False,
raise_error=True,
add_headers=None,
**kwargs,
):
"""
Issue a request against the Vault API.
Returns boolean when no data was returned, otherwise the decoded json data
or a VaultWrappedResponse object if wrapping was requested.
"""
res = self.request_raw(
method,
endpoint,
payload=payload,
wrap=wrap,
add_headers=add_headers,
**kwargs,
)
if res.status_code == 204:
return True
data = res.json()
if not res.ok:
if raise_error:
self._raise_status(res)
return data
if wrap:
return leases.VaultWrappedResponse(**data["wrap_info"])
return data
def request_raw(
self, method, endpoint, payload=None, wrap=False, add_headers=None, **kwargs
):
"""
Issue a request against the Vault API. Returns the raw response object.
"""
url = self._get_url(endpoint)
headers = self._get_headers(wrap)
try:
headers.update(add_headers)
except TypeError:
pass
res = self.session.request(
method,
url,
headers=headers,
json=payload,
verify=self._requests_verify,
**kwargs,
)
return res
def unwrap(self, wrapped, expected_creation_path=None):
"""
Unwraps the data associated with a wrapping token.
wrapped
Wrapping token to unwrap
expected_creation_path
Regex expression or list of expressions that should fully match the
wrapping token creation path. At least one match is required.
Defaults to None, which skips the check.
.. note::
This check prevents tampering with wrapping tokens, which are
valid for one request only. Usually, if an attacker sniffs a wrapping
token, there will be two unwrapping requests, causing an audit warning.
If the attacker can issue a new wrapping token and insert it into the
response instead, this warning would be silenced. Assuming they do not
possess the permissions to issue a wrapping token from the correct
endpoint, checking the creation path makes this kind of attack obvious.
"""
if expected_creation_path:
wrap_info = self.wrap_info(wrapped)
if not isinstance(expected_creation_path, list):
expected_creation_path = [expected_creation_path]
if not any(
re.fullmatch(p, wrap_info["creation_path"])
for p in expected_creation_path
):
raise VaultUnwrapException(
actual=wrap_info["creation_path"],
expected=expected_creation_path,
url=self.url,
namespace=self.namespace,
verify=self.verify,
)
url = self._get_url("sys/wrapping/unwrap")
headers = self._get_headers()
payload = {}
if "X-Vault-Token" not in headers:
headers["X-Vault-Token"] = str(wrapped)
else:
payload["token"] = str(wrapped)
res = self.session.request("POST", url, headers=headers, json=payload)
if not res.ok:
self._raise_status(res)
return res.json()
def wrap_info(self, wrapped):
"""
Lookup wrapping token meta information.
"""
endpoint = "sys/wrapping/lookup"
add_headers = {"X-Vault-Token": str(wrapped)}
return self.post(endpoint, wrap=False, add_headers=add_headers)["data"]
def token_lookup(self, token=None, accessor=None, raw=False):
"""
Lookup token meta information.
token
The token to look up or to use to look up the accessor.
Required.
accessor
The accessor to use to query the token meta information.
raw
Return the raw response object instead of response data.
Also disables status code checking.
"""
endpoint = "auth/token/lookup-self"
method = "GET"
payload = {}
if token is None:
raise VaultInvocationError(
"Unauthenticated VaultClient needs a token to lookup."
)
add_headers = {"X-Vault-Token": token}
if accessor is not None:
endpoint = "auth/token/lookup-accessor"
payload["accessor"] = accessor
res = self.request_raw(
method, endpoint, payload=payload, wrap=False, add_headers=add_headers
)
if raw:
return res
self._raise_status(res)
return res.json()["data"]
def token_valid(self, valid_for=0, remote=True): # pylint: disable=unused-argument
return False
def get_config(self):
"""
Returns Vault server configuration used by this client.
"""
return {
"url": self.url,
"namespace": self.namespace,
"verify": self.verify,
}
def _get_url(self, endpoint):
endpoint = endpoint.strip("/")
return f"{self.url}/v1/{endpoint}"
def _get_headers(self, wrap=False):
headers = {"Content-Type": "application/json", "X-Vault-Request": "true"}
if self.namespace is not None:
headers["X-Vault-Namespace"] = self.namespace
if wrap:
headers["X-Vault-Wrap-TTL"] = str(wrap)
return headers
def _raise_status(self, res):
errors = ", ".join(res.json().get("errors", []))
if res.status_code == 400:
raise VaultInvocationError(errors)
if res.status_code == 403:
raise VaultPermissionDeniedError(errors)
if res.status_code == 404:
raise VaultNotFoundError(errors)
if res.status_code == 405:
raise VaultUnsupportedOperationError(errors)
if res.status_code == 412:
raise VaultPreconditionFailedError(errors)
if res.status_code in [500, 502]:
raise VaultServerError(errors)
if res.status_code == 503:
raise VaultUnavailableError(errors)
res.raise_for_status()
class AuthenticatedVaultClient(VaultClient):
"""
Authenticated client for the Vault API.
This should be used for most operations.
"""
auth = None
def __init__(self, auth, url, **kwargs):
self.auth = auth
super().__init__(url, **kwargs)
def token_valid(self, valid_for=0, remote=True):
"""
Check whether this client's authentication information is
still valid.
remote
Check with the remote Vault server as well. This consumes
a token use. Defaults to true.
"""
if not self.auth.is_valid(valid_for):
return False
if not remote:
return True
try:
res = self.token_lookup(raw=True)
if res.status_code != 200:
return False
return True
except Exception as err: # pylint: disable=broad-except
raise salt.exceptions.CommandExecutionError(
"Error while looking up self token."
) from err
def token_lookup(self, token=None, accessor=None, raw=False):
"""
Lookup token meta information.
token
The token to look up. If neither token nor accessor
are specified, looks up the current token in use by
this client.
accessor
The accessor of the token to query the meta information for.
raw
Return the raw response object instead of response data.
Also disables status code checking.
"""
endpoint = "auth/token/lookup"
method = "POST"
payload = {}
if token is None and accessor is None:
endpoint += "-self"
method = "GET"
if token is not None:
payload["token"] = token
elif accessor is not None:
endpoint += "-accessor"
payload["accessor"] = accessor
if raw:
return self.request_raw(method, endpoint, payload=payload, wrap=False)
return self.request(method, endpoint, payload=payload, wrap=False)["data"]
def token_renew(self, increment=None, token=None, accessor=None):
"""
Renew a token.
increment
Request the token to be valid for this amount of time from the current
point of time onwards. Can also be used to reduce the validity period.
The server might not honor this increment.
Can be an integer (seconds) or a time string like ``1h``. Optional.
token
The token that should be renewed. Optional.
If token and accessor are unset, renews the token currently in use
by this client.
accessor
The accessor of the token that should be renewed. Optional.
"""
endpoint = "auth/token/renew"
payload = {}
if token is None and accessor is None:
if not self.auth.is_renewable():
return False
endpoint += "-self"
if increment is not None:
payload["increment"] = increment
if token is not None:
payload["token"] = token
elif accessor is not None:
endpoint += "-accessor"
payload["accessor"] = accessor
res = self.post(endpoint, payload=payload)
if token is None and accessor is None:
self.auth.update_token(res["auth"])
return res["auth"]
def token_revoke(self, delta=1, token=None, accessor=None):
"""
Revoke a token by setting its TTL to 1s.
delta
The time in seconds to request revocation after.
Defaults to 1s.
token
The token that should be revoked. Optional.
If token and accessor are unset, revokes the token currently in use
by this client.
accessor
The accessor of the token that should be revoked. Optional.
"""
try:
self.token_renew(increment=delta, token=token, accessor=accessor)
except (VaultPermissionDeniedError, VaultNotFoundError, VaultAuthExpired):
# if we're trying to revoke ourselves and this happens,
# the token was already invalid
if token or accessor:
raise
return False
return True
def request_raw(
self,
method,
endpoint,
payload=None,
wrap=False,
add_headers=None,
is_unauthd=False,
**kwargs,
): # pylint: disable=arguments-differ
"""
Issue an authenticated request against the Vault API. Returns the raw response object.
"""
ret = super().request_raw(
method,
endpoint,
payload=payload,
wrap=wrap,
add_headers=add_headers,
**kwargs,
)
# tokens are used regardless of status code
if not is_unauthd and not endpoint.startswith(VAULT_UNAUTHD_PATHS):
self.auth.used()
return ret
def _get_headers(self, wrap=False):
headers = super()._get_headers(wrap)
headers["X-Vault-Token"] = str(self.auth.get_token())
return headers
class CACertHTTPSAdapter(requests.sessions.HTTPAdapter):
"""
Allows to restrict requests CA chain validation
to a single root certificate without writing it to disk.
"""
def __init__(self, ca_cert_data, *args, **kwargs):
self.ca_cert_data = ca_cert_data
super().__init__(*args, **kwargs)
def init_poolmanager(
self,
connections,
maxsize,
block=requests.adapters.DEFAULT_POOLBLOCK,
**pool_kwargs,
):
ssl_context = create_urllib3_context()
ssl_context.load_verify_locations(cadata=self.ca_cert_data)
pool_kwargs["ssl_context"] = ssl_context
return super().init_poolmanager(
connections, maxsize, block=block, **pool_kwargs
)

View file

@ -1,101 +0,0 @@
import salt.exceptions
class VaultException(salt.exceptions.SaltException):
"""
Base class for exceptions raised by this module
"""
class VaultLeaseExpired(VaultException):
"""
Raised when a cached lease is reported to be expired locally.
"""
class VaultAuthExpired(VaultException):
"""
Raised when cached authentication data is reported to be outdated locally.
"""
class VaultConfigExpired(VaultException):
"""
Raised when secret authentication data queried from the master reports
a different server configuration than locally cached or an explicit
cache TTL set in the configuration has been reached.
"""
class VaultUnwrapException(VaultException):
"""
Raised when an expected creation path for a wrapping token differs
from the reported one.
This has to be taken seriously as it indicates tampering.
"""
def __init__(self, expected, actual, url, namespace, verify, *args, **kwargs):
msg = (
"Wrapped response was not created from expected Vault path: "
f"`{actual}` is not matched by any of `{expected}`.\n"
"This indicates tampering with the wrapping token by a third party "
"and should be taken very seriously! If you changed some authentication-"
"specific configuration on the master recently, especially minion "
"approle mount, you should consider if this error was caused by outdated "
"cached data on this minion instead."
)
super().__init__(msg, *args, **kwargs)
self.event_data = {
"expected": expected,
"actual": actual,
"url": url,
"namespace": namespace,
"verify": verify,
}
# https://www.vaultproject.io/api-docs#http-status-codes
class VaultInvocationError(VaultException):
"""
HTTP 400 and InvalidArgumentException for this module
"""
class VaultPermissionDeniedError(VaultException):
"""
HTTP 403
"""
class VaultNotFoundError(VaultException):
"""
HTTP 404
In some cases, this is also raised when the client does not have
the correct permissions for the requested endpoint.
"""
class VaultUnsupportedOperationError(VaultException):
"""
HTTP 405
"""
class VaultPreconditionFailedError(VaultException):
"""
HTTP 412
"""
class VaultServerError(VaultException):
"""
HTTP 500
HTTP 502
"""
class VaultUnavailableError(VaultException):
"""
HTTP 503
Indicates maintenance or sealed status.
"""

File diff suppressed because it is too large Load diff

View file

@ -1,156 +0,0 @@
import datetime
import re
import string
from salt.exceptions import InvalidConfigError, SaltInvocationError
SALT_RUNTYPE_MASTER = 0
SALT_RUNTYPE_MASTER_IMPERSONATING = 1
SALT_RUNTYPE_MASTER_PEER_RUN = 2
SALT_RUNTYPE_MINION_LOCAL = 3
SALT_RUNTYPE_MINION_REMOTE = 4
def _get_salt_run_type(opts):
if "vault" in opts and opts.get("__role", "minion") == "master":
if opts.get("minion_id"):
return SALT_RUNTYPE_MASTER_IMPERSONATING
if "grains" in opts and "id" in opts["grains"]:
return SALT_RUNTYPE_MASTER_PEER_RUN
return SALT_RUNTYPE_MASTER
config_location = opts.get("vault", {}).get("config_location")
if config_location and config_location not in ["local", "master"]:
raise InvalidConfigError(
"Invalid vault configuration: config_location must be either local or master"
)
if config_location == "master":
pass
elif any(
(
opts.get("local", None),
opts.get("file_client", None) == "local",
opts.get("master_type", None) == "disable",
config_location == "local",
)
):
return SALT_RUNTYPE_MINION_LOCAL
return SALT_RUNTYPE_MINION_REMOTE
def iso_to_timestamp(iso_time):
"""
Most endpoints respond with RFC3339-formatted strings
This is a hacky way to use inbuilt tools only for converting
to a timestamp
"""
# drop subsecond precision to make it easier on us
# (length would need to be 3, 6 or 9)
iso_time = re.sub(r"\.[\d]+", "", iso_time)
iso_time = re.sub(r"Z$", "+00:00", iso_time)
try:
# Python >=v3.7
return int(datetime.datetime.fromisoformat(iso_time).timestamp())
except AttributeError:
# Python < v3.7
dstr, tstr = iso_time.split("T")
year = int(dstr[:4])
month = int(dstr[5:7])
day = int(dstr[8:10])
hour = int(tstr[:2])
minute = int(tstr[3:5])
second = int(tstr[6:8])
tz_pos = (tstr.find("-") + 1 or tstr.find("+") + 1) - 1
tz_hour = int(tstr[tz_pos + 1 : tz_pos + 3])
tz_minute = int(tstr[tz_pos + 4 : tz_pos + 6])
if all(x == 0 for x in (tz_hour, tz_minute)):
tz = datetime.timezone.utc
else:
tz_sign = -1 if tstr[tz_pos] == "-" else 1
td = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
tz = datetime.timezone(tz_sign * td)
return int(
datetime.datetime(year, month, day, hour, minute, second, 0, tz).timestamp()
)
def expand_pattern_lists(pattern, **mappings):
"""
Expands the pattern for any list-valued mappings, such that for any list of
length N in the mappings present in the pattern, N copies of the pattern are
returned, each with an element of the list substituted.
pattern:
A pattern to expand, for example ``by-role/{grains[roles]}``
mappings:
A dictionary of variables that can be expanded into the pattern.
Example: Given the pattern `` by-role/{grains[roles]}`` and the below grains
.. code-block:: yaml
grains:
roles:
- web
- database
This function will expand into two patterns,
``[by-role/web, by-role/database]``.
Note that this method does not expand any non-list patterns.
"""
expanded_patterns = []
f = string.Formatter()
# This function uses a string.Formatter to get all the formatting tokens from
# the pattern, then recursively replaces tokens whose expanded value is a
# list. For a list with N items, it will create N new pattern strings and
# then continue with the next token. In practice this is expected to not be
# very expensive, since patterns will typically involve a handful of lists at
# most.
for _, field_name, _, _ in f.parse(pattern):
if field_name is None:
continue
(value, _) = f.get_field(field_name, None, mappings)
if isinstance(value, list):
token = f"{{{field_name}}}"
expanded = [pattern.replace(token, str(elem)) for elem in value]
for expanded_item in expanded:
result = expand_pattern_lists(expanded_item, **mappings)
expanded_patterns += result
return expanded_patterns
return [pattern]
def timestring_map(val):
"""
Turn a time string (like ``60m``) into a float with seconds as a unit.
"""
if val is None:
return val
if isinstance(val, (int, float)):
return float(val)
try:
return float(val)
except ValueError:
pass
if not isinstance(val, str):
raise SaltInvocationError("Expected integer or time string")
if not re.match(r"^\d+(?:\.\d+)?[smhd]$", val):
raise SaltInvocationError(f"Invalid time string format: {val}")
raw, unit = float(val[:-1]), val[-1]
if unit == "s":
return raw
raw *= 60
if unit == "m":
return raw
raw *= 60
if unit == "h":
return raw
raw *= 24
if unit == "d":
return raw
raise RuntimeError("This path should not have been hit")

View file

@ -1,259 +0,0 @@
import logging
from salt.utils.vault.exceptions import (
VaultException,
VaultInvocationError,
VaultPermissionDeniedError,
VaultUnsupportedOperationError,
)
log = logging.getLogger(__name__)
class VaultKV:
"""
Interface to Vault secret paths
"""
def __init__(self, client, metadata_cache):
self.client = client
self.metadata_cache = metadata_cache
def read(self, path, include_metadata=False):
"""
Read secret data at path.
include_metadata
For kv-v2, include metadata in the return value:
``{"data": {} ,"metadata": {}}``.
"""
v2_info = self.is_v2(path)
if v2_info["v2"]:
path = v2_info["data"]
res = self.client.get(path)
ret = res["data"]
if v2_info["v2"] and not include_metadata:
return ret["data"]
return ret
def write(self, path, data):
"""
Write secret data to path.
"""
v2_info = self.is_v2(path)
if v2_info["v2"]:
path = v2_info["data"]
data = {"data": data}
return self.client.post(path, payload=data)
def patch(self, path, data):
"""
Patch existing data.
Tries to use a PATCH request, otherwise falls back to updating in memory
and writing back the whole secret, thus might consume more than one token use.
Since this uses JSON Merge Patch format, values set to ``null`` (``None``)
will be dropped. For details, see
https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-merge-patch-07
"""
def apply_json_merge_patch(data, patch):
if not patch:
return data
if not isinstance(data, dict) or not isinstance(patch, dict):
raise ValueError("Data and patch must be dictionaries.")
for key, value in patch.items():
if value is None:
data.pop(key, None)
elif isinstance(value, dict):
data[key] = apply_json_merge_patch(data.get(key, {}), value)
else:
data[key] = value
return data
def patch_in_memory(path, data):
current = self.read(path)
updated = apply_json_merge_patch(current, data)
return self.write(path, updated)
v2_info = self.is_v2(path)
if not v2_info["v2"]:
return patch_in_memory(path, data)
path = v2_info["data"]
payload = {"data": data}
add_headers = {"Content-Type": "application/merge-patch+json"}
try:
return self.client.patch(path, payload=payload, add_headers=add_headers)
except VaultPermissionDeniedError:
log.warning("Failed patching secret, is the `patch` capability set?")
except VaultUnsupportedOperationError:
pass
return patch_in_memory(path, data)
def delete(self, path, versions=None):
"""
Delete secret path data. For kv-v1, this is permanent.
For kv-v2, this only soft-deletes the data.
versions
For kv-v2, specifies versions to soft-delete. Needs to be castable
to a list of integers.
"""
method = "DELETE"
payload = None
versions = self._parse_versions(versions)
v2_info = self.is_v2(path)
if v2_info["v2"]:
if versions is not None:
method = "POST"
path = v2_info["delete_versions"]
payload = {"versions": versions}
else:
# data and delete operations only differ by HTTP verb
path = v2_info["data"]
elif versions is not None:
raise VaultInvocationError("Versioning support requires kv-v2.")
return self.client.request(method, path, payload=payload)
def destroy(self, path, versions):
"""
Permanently remove version data. Requires kv-v2.
versions
Specifies versions to destroy. Needs to be castable
to a list of integers.
"""
versions = self._parse_versions(versions)
v2_info = self.is_v2(path)
if not v2_info["v2"]:
raise VaultInvocationError("Destroy operation requires kv-v2.")
path = v2_info["destroy"]
payload = {"versions": versions}
return self.client.post(path, payload=payload)
def _parse_versions(self, versions):
if versions is None:
return versions
if not isinstance(versions, list):
versions = [versions]
try:
versions = [int(x) for x in versions]
except ValueError as err:
raise VaultInvocationError(
"Versions have to be specified as integers."
) from err
return versions
def nuke(self, path):
"""
Delete path metadata and version data, including all version history.
Requires kv-v2.
"""
v2_info = self.is_v2(path)
if not v2_info["v2"]:
raise VaultInvocationError("Nuke operation requires kv-v2.")
path = v2_info["metadata"]
return self.client.delete(path)
def list(self, path):
"""
List keys at path.
"""
v2_info = self.is_v2(path)
if v2_info["v2"]:
path = v2_info["metadata"]
return self.client.list(path)["data"]["keys"]
def is_v2(self, path):
"""
Determines if a given secret path is kv version 1 or 2.
"""
ret = {
"v2": False,
"data": path,
"metadata": path,
"delete": path,
"type": None,
}
path_metadata = self._get_secret_path_metadata(path)
if not path_metadata:
# metadata lookup failed. Simply return not v2
return ret
ret["type"] = path_metadata.get("type", "kv")
if (
ret["type"] == "kv"
and path_metadata["options"] is not None
and path_metadata.get("options", {}).get("version", "1") in ["2"]
):
ret["v2"] = True
ret["data"] = self._v2_the_path(path, path_metadata.get("path", path))
ret["metadata"] = self._v2_the_path(
path, path_metadata.get("path", path), "metadata"
)
ret["delete"] = ret["data"]
ret["delete_versions"] = self._v2_the_path(
path, path_metadata.get("path", path), "delete"
)
ret["destroy"] = self._v2_the_path(
path, path_metadata.get("path", path), "destroy"
)
return ret
def _v2_the_path(self, path, pfilter, ptype="data"):
"""
Given a path, a filter, and a path type, properly inject
'data' or 'metadata' into the path.
"""
possible_types = ["data", "metadata", "delete", "destroy"]
if ptype not in possible_types:
raise AssertionError()
msg = f"Path {path} already contains {ptype} in the right place - saltstack duct tape?"
path = path.rstrip("/").lstrip("/")
pfilter = pfilter.rstrip("/").lstrip("/")
together = pfilter + "/" + ptype
otype = possible_types[0] if possible_types[0] != ptype else possible_types[1]
other = pfilter + "/" + otype
if path.startswith(other):
path = path.replace(other, together, 1)
msg = f'Path is a "{otype}" type but "{ptype}" type requested - Flipping: {path}'
elif not path.startswith(together):
old_path = path
path = path.replace(pfilter, together, 1)
msg = f"Converting path to v2 {old_path} => {path}"
log.debug(msg)
return path
def _get_secret_path_metadata(self, path):
"""
Given a path, query vault to determine mount point, type, and version.
"""
cache_content = self.metadata_cache.get() or {}
ret = None
if path.startswith(tuple(cache_content.keys())):
log.debug("Found cached metadata for %s", path)
ret = next(v for k, v in cache_content.items() if path.startswith(k))
else:
log.debug("Fetching metadata for %s", path)
try:
endpoint = f"sys/internal/ui/mounts/{path}"
res = self.client.get(endpoint)
if "data" in res:
log.debug("Got metadata for %s", path)
cache_content[path] = ret = res["data"]
self.metadata_cache.store(cache_content)
else:
raise VaultException("Unexpected response to metadata query.")
except Exception as err: # pylint: disable=broad-except
log.error(
"Failed to get secret metadata %s: %s", type(err).__name__, err
)
return ret

View file

@ -1,603 +0,0 @@
import copy
import fnmatch
import logging
import time
from salt.utils.vault.exceptions import (
VaultException,
VaultInvocationError,
VaultNotFoundError,
VaultPermissionDeniedError,
)
from salt.utils.vault.helpers import iso_to_timestamp, timestring_map
log = logging.getLogger(__name__)
class DurationMixin:
"""
Mixin that handles expiration with time
"""
def __init__(
self,
renewable=False,
duration=0,
creation_time=None,
expire_time=None,
**kwargs,
):
if "lease_duration" in kwargs:
duration = kwargs.pop("lease_duration")
self.renewable = renewable
self.duration = duration
creation_time = (
creation_time if creation_time is not None else round(time.time())
)
try:
creation_time = int(creation_time)
except ValueError:
creation_time = iso_to_timestamp(creation_time)
self.creation_time = creation_time
expire_time = (
expire_time if expire_time is not None else round(time.time()) + duration
)
try:
expire_time = int(expire_time)
except ValueError:
expire_time = iso_to_timestamp(expire_time)
self.expire_time = expire_time
super().__init__(**kwargs)
def is_renewable(self):
"""
Checks whether the lease is renewable
"""
return self.renewable
def is_valid_for(self, valid_for=0, blur=0):
"""
Checks whether the entity is valid
valid_for
Check whether the entity will still be valid in the future.
This can be an integer, which will be interpreted as seconds, or a
time string using the same format as Vault does:
Suffix ``s`` for seconds, ``m`` for minutes, ``h`` for hours, ``d`` for days.
Defaults to 0.
blur
Allow undercutting ``valid_for`` for this amount of seconds.
Defaults to 0.
"""
if not self.duration:
return True
delta = self.expire_time - time.time() - timestring_map(valid_for)
if delta >= 0:
return True
return abs(delta) <= blur
class UseCountMixin:
"""
Mixin that handles expiration with number of uses
"""
def __init__(self, num_uses=0, use_count=0, **kwargs):
self.num_uses = num_uses
self.use_count = use_count
super().__init__(**kwargs)
def used(self):
"""
Increment the use counter by one.
"""
self.use_count += 1
def has_uses_left(self, uses=1):
"""
Check whether this entity has uses left.
"""
return self.num_uses == 0 or self.num_uses - (self.use_count + uses) >= 0
class DropInitKwargsMixin:
"""
Mixin that breaks the chain of passing unhandled kwargs up the MRO.
"""
def __init__(self, *args, **kwargs): # pylint: disable=unused-argument
super().__init__(*args)
class AccessorMixin:
"""
Mixin that manages accessor information relevant for tokens/secret IDs
"""
def __init__(self, accessor=None, wrapping_accessor=None, **kwargs):
# ensure the accessor always points to the actual entity
if "wrapped_accessor" in kwargs:
wrapping_accessor = accessor
accessor = kwargs.pop("wrapped_accessor")
self.accessor = accessor
self.wrapping_accessor = wrapping_accessor
super().__init__(**kwargs)
def accessor_payload(self):
if self.accessor is not None:
return {"accessor": self.accessor}
raise VaultInvocationError("No accessor information available")
class BaseLease(DurationMixin, DropInitKwargsMixin):
"""
Base class for leases that expire with time.
"""
def __init__(self, lease_id, **kwargs):
self.id = self.lease_id = lease_id
super().__init__(**kwargs)
def __str__(self):
return self.id
def __repr__(self):
return repr(self.to_dict())
def __eq__(self, other):
try:
data = other.__dict__
except AttributeError:
data = other
return data == self.__dict__
def with_renewed(self, **kwargs):
"""
Partially update the contained data after lease renewal
"""
attrs = copy.copy(self.__dict__)
# ensure expire_time is reset properly
attrs.pop("expire_time")
attrs.update(kwargs)
return type(self)(**attrs)
def to_dict(self):
"""
Return a dict of all contained attributes
"""
return self.__dict__
class VaultLease(BaseLease):
"""
Data object representing a Vault lease.
"""
def __init__(self, lease_id, data, **kwargs):
# save lease-associated data
self.data = data
super().__init__(lease_id, **kwargs)
class VaultToken(UseCountMixin, AccessorMixin, BaseLease):
"""
Data object representing an authentication token
"""
def __init__(self, **kwargs):
if "client_token" in kwargs:
# Ensure response data from Vault is accepted as well
kwargs["lease_id"] = kwargs.pop("client_token")
super().__init__(**kwargs)
def is_valid(self, valid_for=0, uses=1):
"""
Checks whether the token is valid for an amount of time and number of uses
valid_for
Check whether the token will still be valid in the future.
This can be an integer, which will be interpreted as seconds, or a
time string using the same format as Vault does:
Suffix ``s`` for seconds, ``m`` for minutes, ``h`` for hours, ``d`` for days.
Defaults to 0.
uses
Check whether the token has at least this number of uses left. Defaults to 1.
"""
return self.is_valid_for(valid_for) and self.has_uses_left(uses)
def is_renewable(self):
"""
Check whether the token is renewable, which requires it
to be currently valid for at least two uses and renewable
"""
# Renewing a token deducts a use, hence it does not make sense to
# renew a token on the last use
return self.renewable and self.is_valid(uses=2)
def payload(self):
"""
Return the payload to use for POST requests using this token
"""
return {"token": str(self)}
def serialize_for_minion(self):
"""
Serialize all necessary data to recreate this object
into a dict that can be sent to a minion.
"""
return {
"client_token": self.id,
"renewable": self.renewable,
"lease_duration": self.duration,
"num_uses": self.num_uses,
"creation_time": self.creation_time,
"expire_time": self.expire_time,
}
class VaultSecretId(UseCountMixin, AccessorMixin, BaseLease):
"""
Data object representing an AppRole secret ID.
"""
def __init__(self, **kwargs):
if "secret_id" in kwargs:
# Ensure response data from Vault is accepted as well
kwargs["lease_id"] = kwargs.pop("secret_id")
kwargs["lease_duration"] = kwargs.pop("secret_id_ttl")
kwargs["num_uses"] = kwargs.pop("secret_id_num_uses", 0)
kwargs["accessor"] = kwargs.pop("secret_id_accessor", None)
if "expiration_time" in kwargs:
kwargs["expire_time"] = kwargs.pop("expiration_time")
super().__init__(**kwargs)
def is_valid(self, valid_for=0, uses=1):
"""
Checks whether the secret ID is valid for an amount of time and number of uses
valid_for
Check whether the secret ID will still be valid in the future.
This can be an integer, which will be interpreted as seconds, or a
time string using the same format as Vault does:
Suffix ``s`` for seconds, ``m`` for minutes, ``h`` for hours, ``d`` for days.
Defaults to 0.
uses
Check whether the secret ID has at least this number of uses left. Defaults to 1.
"""
return self.is_valid_for(valid_for) and self.has_uses_left(uses)
def payload(self):
"""
Return the payload to use for POST requests using this secret ID
"""
return {"secret_id": str(self)}
def serialize_for_minion(self):
"""
Serialize all necessary data to recreate this object
into a dict that can be sent to a minion.
"""
return {
"secret_id": self.id,
"secret_id_ttl": self.duration,
"secret_id_num_uses": self.num_uses,
"creation_time": self.creation_time,
"expire_time": self.expire_time,
}
class VaultWrappedResponse(AccessorMixin, BaseLease):
"""
Data object representing a wrapped response
"""
def __init__(
self,
creation_path,
**kwargs,
):
if "token" in kwargs:
# Ensure response data from Vault is accepted as well
kwargs["lease_id"] = kwargs.pop("token")
kwargs["lease_duration"] = kwargs.pop("ttl")
if "renewable" not in kwargs:
# Not renewable might be incorrect, wrapped tokens are,
# but we cannot know what was wrapped here.
kwargs["renewable"] = False
super().__init__(**kwargs)
self.creation_path = creation_path
def serialize_for_minion(self):
"""
Serialize all necessary data to recreate this object
into a dict that can be sent to a minion.
"""
return {
"wrap_info": {
"token": self.id,
"ttl": self.duration,
"creation_time": self.creation_time,
"creation_path": self.creation_path,
},
}
class LeaseStore:
"""
Caches leases and handles lease operations
"""
def __init__(self, client, cache, expire_events=None):
self.client = client
self.cache = cache
self.expire_events = expire_events
# to update cached leases after renewal/revocation, we need a mapping id => ckey
self.lease_id_ckey_cache = {}
def get(
self,
ckey,
valid_for=0,
renew=True,
renew_increment=None,
renew_blur=2,
revoke=60,
):
"""
Return cached lease or None.
ckey
Cache key the lease has been saved in.
valid_for
Ensure the returned lease is valid for at least this amount of time.
This can be an integer, which will be interpreted as seconds, or a
time string using the same format as Vault does:
Suffix ``s`` for seconds, ``m`` for minutes, ``h`` for hours, ``d`` for days.
Defaults to 0.
.. note::
This does not take into account token validity, which active leases
are bound to as well.
renew
If the lease is still valid, but not valid for ``valid_for``, attempt to
renew it. Defaults to true.
renew_increment
When renewing, request the lease to be valid for this amount of time from
the current point of time onwards.
If unset, will renew the lease by its default validity period and, if
the renewed lease does not pass ``valid_for``, will try to renew it
by ``valid_for``.
renew_blur
When checking validity after renewal, allow this amount of seconds in leeway
to account for latency. Especially important when renew_increment is unset
and the default validity period is less than ``valid_for``.
Defaults to 2.
revoke
If the lease is not valid for ``valid_for`` and renewals
are disabled or impossible, attempt to have Vault revoke the lease
after this amount of time and flush the cache. Defaults to 60s.
"""
if renew_increment is not None and timestring_map(valid_for) > timestring_map(
renew_increment
):
raise VaultInvocationError(
"When renew_increment is set, it must be at least valid_for to make sense"
)
def check_revoke(lease):
if self.expire_events is not None:
self.expire_events(
tag=f"vault/lease/{ckey}/expire", data={"valid_for_less": valid_for}
)
if revoke:
self.revoke(lease, delta=revoke)
return None
# Since we can renew leases, do not check for future validity in cache
lease = self.cache.get(ckey, flush=bool(revoke))
if lease is not None:
self.lease_id_ckey_cache[str(lease)] = ckey
if lease is None or lease.is_valid_for(valid_for):
return lease
if not renew:
return check_revoke(lease)
try:
lease = self.renew(lease, increment=renew_increment, raise_all_errors=False)
except VaultNotFoundError:
# The cached lease was already revoked
return check_revoke(lease)
if not lease.is_valid_for(valid_for, blur=renew_blur):
if renew_increment is not None:
# valid_for cannot possibly be respected
return check_revoke(lease)
# Maybe valid_for is greater than the default validity period, so check if
# the lease can be renewed by valid_for
try:
lease = self.renew(lease, increment=valid_for, raise_all_errors=False)
except VaultNotFoundError:
# The cached lease was already revoked
return check_revoke(lease)
if not lease.is_valid_for(valid_for, blur=renew_blur):
return check_revoke(lease)
return lease
def list(self):
"""
List all cached leases.
"""
return self.cache.list()
def lookup(self, lease):
"""
Lookup lease meta information.
lease
A lease ID or VaultLease object to look up.
"""
endpoint = "sys/leases/lookup"
payload = {"lease_id": str(lease)}
return self.client.post(endpoint, payload=payload)
def renew(self, lease, increment=None, raise_all_errors=True, _store=True):
"""
Renew a lease.
lease
A lease ID or VaultLease object to renew.
increment
Request the lease to be valid for this amount of time from the current
point of time onwards. Can also be used to reduce the validity period.
The server might not honor this increment.
Can be an integer (seconds) or a time string like ``1h``. Optional.
raise_all_errors
When ``lease`` is a VaultLease and the renewal does not succeed,
do not catch exceptions. If this is false, the lease will be returned
unmodified if the exception does not indicate it is invalid (NotFound).
Defaults to true.
"""
endpoint = "sys/leases/renew"
payload = {"lease_id": str(lease)}
if increment is not None:
payload["increment"] = int(timestring_map(increment))
if not isinstance(lease, VaultLease) and lease in self.lease_id_ckey_cache:
lease = self.cache.get(self.lease_id_ckey_cache[lease], flush=False)
if lease is None:
raise VaultNotFoundError("Lease is already expired")
try:
ret = self.client.post(endpoint, payload=payload)
except VaultException as err:
if raise_all_errors or not isinstance(lease, VaultLease):
raise
if isinstance(err, VaultNotFoundError):
raise
return lease
if _store and isinstance(lease, VaultLease):
# Do not overwrite data of renewed leases!
ret.pop("data", None)
new_lease = lease.with_renewed(**ret)
if str(new_lease) in self.lease_id_ckey_cache:
self.store(self.lease_id_ckey_cache[str(new_lease)], new_lease)
return new_lease
return ret
def renew_cached(self, match, increment=None):
"""
Renew cached leases.
match
Only renew cached leases whose ckey matches this glob pattern.
Defaults to ``*``.
increment
Request the leases to be valid for this amount of time from the current
point of time onwards. Can also be used to reduce the validity period.
The server might not honor this increment.
Can be an integer (seconds) or a time string like ``1h``. Optional.
"""
failed = []
for ckey in self.list():
if not fnmatch.fnmatch(ckey, match):
continue
lease = self.cache.get(ckey, flush=True)
if lease is None:
continue
self.lease_id_ckey_cache[str(lease)] = ckey
try:
self.renew(lease, increment=increment)
except (VaultPermissionDeniedError, VaultNotFoundError) as err:
log.warning("Failed renewing cached lease: %s", type(err).__name__)
log.debug("Lease ID was: %s", lease)
failed.append(ckey)
if failed:
raise VaultException(f"Failed renewing some leases: {list(failed)}")
return True
def revoke(self, lease, delta=60):
"""
Revoke a lease. Will also remove the cached lease,
if it has been requested from this LeaseStore before.
lease
A lease ID or VaultLease object to revoke.
delta
Time after which the lease should be requested
to be revoked by Vault.
Defaults to 60s.
"""
try:
# 0 would attempt a complete renewal
self.renew(lease, increment=delta or 1, _store=False)
except VaultNotFoundError:
pass
if str(lease) in self.lease_id_ckey_cache:
self.cache.flush(self.lease_id_ckey_cache.pop(str(lease)))
return True
def revoke_cached(
self,
match="*",
delta=60,
flush_on_failure=True,
):
"""
Revoke cached leases.
match
Only revoke cached leases whose ckey matches this glob pattern.
Defaults to ``*``.
delta
Time after which the leases should be revoked by Vault.
Defaults to 60s.
flush_on_failure
If a revocation fails, remove the lease from cache anyways.
Defaults to true.
"""
failed = []
for ckey in self.list():
if not fnmatch.fnmatch(ckey, match):
continue
lease = self.cache.get(ckey, flush=True)
if lease is None:
continue
self.lease_id_ckey_cache[str(lease)] = ckey
try:
self.revoke(lease, delta=delta)
except VaultPermissionDeniedError:
failed.append(ckey)
if flush_on_failure:
# Forget the lease and let Vault's automatic revocation handle it
self.cache.flush(self.lease_id_ckey_cache.pop(str(lease)))
if failed:
raise VaultException(f"Failed revoking some leases: {list(failed)}")
return True
def store(self, ckey, lease):
"""
Cache a lease.
ckey
The cache key the lease should be saved in.
lease
A lease ID or VaultLease object to store.
"""
self.cache.store(ckey, lease)
self.lease_id_ckey_cache[str(lease)] = ckey
return True

View file

@ -1,44 +0,0 @@
# Test minion token creation
path "auth/token/create" {
capabilities = ["create", "read", "update"]
}
# Test minion token creation with token roles
path "auth/token/create/*" {
capabilities = ["create", "read", "update"]
}
# AppRole/entity management testing
path "auth/salt-minions/role" {
capabilities = ["list"]
}
path "auth/salt-minions/role/*" {
capabilities = ["read", "create", "update", "delete"]
}
path "sys/auth/salt-minions" {
capabilities = ["read", "sudo"]
}
path "identity/lookup/entity" {
capabilities = ["create", "update"]
allowed_parameters = {
"alias_name" = []
"alias_mount_accessor" = []
}
}
path "identity/entity/name/salt_minion_*" {
capabilities = ["read", "create", "update", "delete"]
}
path "identity/entity-alias" {
capabilities = ["create", "update"]
allowed_parameters = {
"id" = []
"canonical_id" = []
"mount_accessor" = []
"name" = []
}
}

View file

@ -1,29 +0,0 @@
# General KV v1 testing
path "secret/*" {
capabilities = ["read", "list", "create", "update", "delete"]
}
# General KV v2 testing
path "kv-v2/*" {
capabilities = ["read", "list", "create", "update", "delete", "patch"]
}
# ACL policy templating tests
path "salt/+/minions/{{identity.entity.metadata.minion-id}}" {
capabilities = ["create", "read", "update", "delete", "list", "patch"]
}
# ACL policy templating tests with pillar values
path "salt/data/roles/{{identity.entity.metadata.role}}" {
capabilities = ["read"]
}
# Test list policies
path "sys/policy" {
capabilities = ["read"]
}
# Test managing policies
path "sys/policy/*" {
capabilities = ["read", "create", "update", "delete"]
}

View file

@ -1,29 +0,0 @@
# General KV v1 testing
path "secret/*" {
capabilities = ["read", "list", "create", "update", "delete"]
}
# General KV v2 testing
path "kv-v2/*" {
capabilities = ["read", "list", "create", "update", "delete"]
}
# ACL policy templating tests
path "salt/+/minions/{{identity.entity.metadata.minion-id}}" {
capabilities = ["create", "read", "update", "delete", "list"]
}
# ACL policy templating tests with pillar values
path "salt/data/roles/{{identity.entity.metadata.role}}" {
capabilities = ["read"]
}
# Test list policies
path "sys/policy" {
capabilities = ["read"]
}
# Test managing policies
path "sys/policy/*" {
capabilities = ["read", "create", "update", "delete"]
}

View file

@ -1,164 +0,0 @@
import logging
import pytest
import requests.exceptions
# pylint: disable=unused-import
from tests.support.pytest.vault import (
vault_container_version,
vault_delete_secret,
vault_environ,
vault_list_secrets,
vault_read_secret,
vault_write_secret,
)
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd", "vault", "getent"),
]
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def minion_config_overrides(vault_port):
return {
"vault": {
"auth": {
"method": "token",
"token": "testsecret",
},
"server": {
"url": f"http://127.0.0.1:{vault_port}",
},
}
}
@pytest.fixture
def vault(loaders):
return loaders.utils.vault
@pytest.fixture(scope="module", autouse=True)
def vault_testing_data(vault_container_version):
vault_write_secret("secret/utils/read", success="yup")
vault_write_secret("secret/utils/deleteme", success="nope")
try:
yield
finally:
secret_path = "secret/utils"
for secret in vault_list_secrets(secret_path):
vault_delete_secret(f"{secret_path}/{secret}", metadata=True)
def test_make_request_get_unauthd(vault):
"""
Test that unauthenticated GET requests are possible
"""
res = vault.make_request("GET", "/v1/sys/health")
assert res.status_code == 200
assert res.json()
assert "initialized" in res.json()
def test_make_request_get_authd(vault, vault_container_version):
"""
Test that authenticated GET requests are possible
"""
endpoint = "secret/utils/read"
if vault_container_version in ["1.3.1", "latest"]:
endpoint = "secret/data/utils/read"
res = vault.make_request("GET", f"/v1/{endpoint}")
assert res.status_code == 200
data = res.json()["data"]
if vault_container_version in ["1.3.1", "latest"]:
data = data["data"]
assert "success" in data
assert data["success"] == "yup"
def test_make_request_post_json(vault, vault_container_version):
"""
Test that POST requests are possible with json param
"""
data = {"success": "yup"}
endpoint = "secret/utils/write"
if vault_container_version in ["1.3.1", "latest"]:
data = {"data": data}
endpoint = "secret/data/utils/write"
res = vault.make_request("POST", f"/v1/{endpoint}", json=data)
assert res.status_code in [200, 204]
assert vault_read_secret("secret/utils/write") == {"success": "yup"}
def test_make_request_post_data(vault, vault_container_version):
"""
Test that POST requests are possible with data param
"""
data = '{"success": "yup_data"}'
endpoint = "secret/utils/write"
if vault_container_version in ["1.3.1", "latest"]:
data = '{"data": {"success": "yup_data"}}'
endpoint = "secret/data/utils/write"
res = vault.make_request("POST", f"/v1/{endpoint}", data=data)
assert res.status_code in [200, 204]
assert vault_read_secret("secret/utils/write") == {"success": "yup_data"}
def test_make_request_delete(vault, vault_container_version):
"""
Test that DELETE requests are possible
"""
endpoint = "secret/utils/deleteme"
if vault_container_version in ["1.3.1", "latest"]:
endpoint = "secret/data/utils/deleteme"
res = vault.make_request("DELETE", f"/v1/{endpoint}")
assert res.status_code in [200, 204]
assert vault_read_secret("secret/utils/deleteme") is None
def test_make_request_list(vault, vault_container_version):
"""
Test that LIST requests are possible
"""
endpoint = "secret/utils"
if vault_container_version in ["1.3.1", "latest"]:
endpoint = "secret/metadata/utils"
res = vault.make_request("LIST", f"/v1/{endpoint}")
assert res.status_code == 200
assert res.json()["data"]["keys"] == vault_list_secrets("secret/utils")
def test_make_request_token_override(vault, vault_container_version):
"""
Test that overriding the token in use is possible
"""
endpoint = "secret/utils/read"
if vault_container_version in ["1.3.1", "latest"]:
endpoint = "secret/data/utils/read"
res = vault.make_request("GET", f"/v1/{endpoint}", token="invalid")
assert res.status_code == 403
def test_make_request_url_override(vault, vault_container_version):
"""
Test that overriding the server URL is possible
"""
endpoint = "secret/utils/read"
if vault_container_version in ["1.3.1", "latest"]:
endpoint = "secret/data/utils/read"
with pytest.raises(
requests.exceptions.ConnectionError, match=".*Max retries exceeded with url:.*"
):
vault.make_request(
"GET", f"/v1/{endpoint}", vault_url="http://127.0.0.1:1", timeout=2
)

View file

@ -1,587 +0,0 @@
import pytest
import requests
import salt.modules.event
import salt.utils.vault as vault
import salt.utils.vault.auth as vauth
import salt.utils.vault.client as vclient
import salt.utils.vault.helpers as hlp
from tests.support.mock import MagicMock, Mock, patch
def _mock_json_response(data, status_code=200, reason=""):
"""
Mock helper for http response
"""
response = Mock(spec=requests.models.Response)
response.json.return_value = data
response.status_code = status_code
response.reason = reason
if status_code < 400:
response.ok = True
else:
response.ok = False
response.raise_for_status.side_effect = requests.exceptions.HTTPError
return response
@pytest.fixture(params=[{}])
def server_config(request):
conf = {
"url": "http://127.0.0.1:8200",
"namespace": None,
"verify": None,
}
conf.update(request.param)
return conf
@pytest.fixture(params=["token", "approle"])
def test_config(server_config, request):
defaults = {
"auth": {
"approle_mount": "approle",
"approle_name": "salt-master",
"method": "token",
"secret_id": None,
"token_lifecycle": {
"minimum_ttl": 10,
"renew_increment": None,
},
},
"cache": {
"backend": "session",
"clear_attempt_revocation": 60,
"clear_on_unauthorized": True,
"config": 3600,
"expire_events": False,
"secret": "ttl",
},
"issue": {
"allow_minion_override_params": False,
"type": "token",
"approle": {
"mount": "salt-minions",
"params": {
"bind_secret_id": True,
"secret_id_num_uses": 1,
"secret_id_ttl": 60,
"token_explicit_max_ttl": 60,
"token_num_uses": 10,
},
},
"token": {
"role_name": None,
"params": {
"explicit_max_ttl": None,
"num_uses": 1,
},
},
"wrap": "30s",
},
"issue_params": {},
"metadata": {
"entity": {
"minion-id": "{minion}",
},
"token": {
"saltstack-jid": "{jid}",
"saltstack-minion": "{minion}",
"saltstack-user": "{user}",
},
},
"policies": {
"assign": [
"saltstack/minions",
"saltstack/{minion}",
],
"cache_time": 60,
"refresh_pillar": None,
},
"server": server_config,
}
if request.param == "token":
defaults["auth"]["token"] = "test-token"
return defaults
if request.param == "wrapped_token":
defaults["auth"]["method"] = "wrapped_token"
defaults["auth"]["token"] = "test-wrapped-token"
return defaults
if request.param == "approle":
defaults["auth"]["method"] = "approle"
defaults["auth"]["role_id"] = "test-role-id"
defaults["auth"]["secret_id"] = "test-secret-id"
return defaults
if request.param == "approle_no_secretid":
defaults["auth"]["method"] = "approle"
defaults["auth"]["role_id"] = "test-role-id"
return defaults
@pytest.fixture(params=["token", "approle"])
def test_remote_config(server_config, request):
defaults = {
"auth": {
"approle_mount": "approle",
"approle_name": "salt-master",
"method": "token",
"secret_id": None,
"token_lifecycle": {
"minimum_ttl": 10,
"renew_increment": None,
},
},
"cache": {
"backend": "session",
"clear_attempt_revocation": 60,
"clear_on_unauthorized": True,
"config": 3600,
"expire_events": False,
"kv_metadata": "connection",
"secret": "ttl",
},
"server": server_config,
}
if request.param == "token":
defaults["auth"]["token"] = "test-token"
return defaults
if request.param == "wrapped_token":
defaults["auth"]["method"] = "wrapped_token"
defaults["auth"]["token"] = "test-wrapped-token"
return defaults
if request.param == "token_changed":
defaults["auth"]["token"] = "test-token-changed"
return defaults
if request.param == "approle":
defaults["auth"]["method"] = "approle"
defaults["auth"]["role_id"] = "test-role-id"
# actual remote config would not contain secret_id, but
# this is used for testing both from local and from remote
defaults["auth"]["secret_id"] = "test-secret-id"
return defaults
if request.param == "approle_no_secretid":
defaults["auth"]["method"] = "approle"
defaults["auth"]["role_id"] = "test-role-id"
return defaults
# this happens when wrapped role_ids are merged by _query_master
if request.param == "approle_wrapped_roleid":
defaults["auth"]["method"] = "approle"
defaults["auth"]["role_id"] = {"role_id": "test-role-id"}
# actual remote config does not contain secret_id
defaults["auth"]["secret_id"] = True
return defaults
@pytest.fixture
def role_id_response():
return {
"request_id": "c85838c5-ecfe-6d07-4b28-1935ac2e304a",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {"role_id": "58b4c650-3d13-5932-a2fa-03865c8e85d7"},
"warnings": None,
}
@pytest.fixture
def secret_id_response():
return {
"request_id": "c85838c5-ecfe-6d07-4b28-1935ac2e304a",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"secret_id_accessor": "84896a0c-1347-aa90-a4f6-aca8b7558780",
"secret_id": "841771dc-11c9-bbc7-bcac-6a3945a69cd9",
"secret_id_ttl": 1337,
},
"warnings": None,
}
@pytest.fixture
def secret_id_meta_response():
return {
"request_id": "7c97d03d-2166-6217-8da1-19604febae5c",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"cidr_list": [],
"creation_time": "2022-08-22T17:37:07.753989459+00:00",
"expiration_time": "2339-07-13T13:23:46.753989459+00:00",
"last_updated_time": "2022-08-22T17:37:07.753989459+00:00",
"metadata": {},
"secret_id_accessor": "b1c88755-f2f5-2fd2-4bcc-cade95f6ba96",
"secret_id_num_uses": 0,
"secret_id_ttl": 9999999999,
"token_bound_cidrs": [],
},
"warnings": None,
}
@pytest.fixture
def wrapped_role_id_response():
return {
"request_id": "",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": None,
"warnings": None,
"wrap_info": {
"token": "test-wrapping-token",
"accessor": "test-wrapping-token-accessor",
"ttl": 180,
"creation_time": "2022-09-10T13:37:12.123456789+00:00",
"creation_path": "auth/approle/role/test-minion/role-id",
"wrapped_accessor": "",
},
}
@pytest.fixture
def wrapped_secret_id_response():
return {
"request_id": "",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": None,
"warnings": None,
"wrap_info": {
"token": "test-wrapping-token",
"accessor": "test-wrapping-token-accessor",
"ttl": 180,
"creation_time": "2022-09-10T13:37:12.123456789+00:00",
"creation_path": "auth/approle/role/test-minion/secret-id",
"wrapped_accessor": "",
},
}
@pytest.fixture
def wrapped_role_id_lookup_response():
return {
"request_id": "31e7020e-3ce3-2c63-e453-d5da8a9890f1",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"creation_path": "auth/approle/role/test-minion/role-id",
"creation_time": "2022-09-10T13:37:12.123456789+00:00",
"creation_ttl": 180,
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def wrapped_token_auth_response():
return {
"request_id": "",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": None,
"warnings": None,
"wrap_info": {
"token": "test-wrapping-token",
"accessor": "test-wrapping-token-accessor",
"ttl": 180,
"creation_time": "2022-09-10T13:37:12.123456789+00:00",
"creation_path": "auth/token/create/salt-minion",
"wrapped_accessor": "",
},
}
@pytest.fixture
def token_lookup_self_response():
return {
"request_id": "0e8c388e-2cb6-bcb2-83b7-625127d568bb",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"accessor": "test-token-accessor",
"creation_time": 1661188581,
"creation_ttl": 9999999999,
"display_name": "",
"entity_id": "",
"expire_time": "2339-07-13T11:03:00.473212541+00:00",
"explicit_max_ttl": 0,
"id": "test-token",
"issue_time": "2022-08-22T17:16:21.473219641+00:00",
"meta": {},
"num_uses": 0,
"orphan": True,
"path": "",
"policies": ["default"],
"renewable": True,
"ttl": 9999999999,
"type": "service",
},
"warnings": None,
}
@pytest.fixture
def token_renew_self_response():
return {
"auth": {
"client_token": "test-token",
"policies": ["default", "renewed"],
"metadata": {},
},
"lease_duration": 3600,
"renewable": True,
}
@pytest.fixture
def token_renew_other_response():
return {
"auth": {
"client_token": "other-test-token",
"policies": ["default", "renewed"],
"metadata": {},
},
"lease_duration": 3600,
"renewable": True,
}
@pytest.fixture
def token_renew_accessor_response():
return {
"auth": {
"client_token": "",
"policies": ["default", "renewed"],
"metadata": {},
},
"lease_duration": 3600,
"renewable": True,
}
@pytest.fixture
def token_auth():
return {
"request_id": "0e8c388e-2cb6-bcb2-83b7-625127d568bb",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"auth": {
"client_token": "test-token",
"renewable": True,
"lease_duration": 9999999999,
"num_uses": 0,
"creation_time": 1661188581,
},
}
@pytest.fixture
def lease_response():
return {
"request_id": "0e8c388e-2cb6-bcb2-83b7-625127d568bb",
"lease_id": "database/creds/testrole/abcd",
"lease_duration": 1337,
"renewable": True,
"data": {
"username": "test",
"password": "test",
},
}
@pytest.fixture
def lease():
return {
"id": "database/creds/testrole/abcd",
"lease_id": "database/creds/testrole/abcd",
"renewable": True,
"duration": 1337,
"creation_time": 0,
"expire_time": 1337,
"data": {
"username": "test",
"password": "test",
},
}
@pytest.fixture
def session():
return Mock(spec=requests.Session)
@pytest.fixture
def req(session):
yield session.request
@pytest.fixture
def req_failed(req, request):
status_code = getattr(request, "param", 502)
req.return_value = _mock_json_response({"errors": ["foo"]}, status_code=status_code)
yield req
@pytest.fixture
def req_success(req):
req.return_value = _mock_json_response(None, status_code=204)
yield req
@pytest.fixture(params=[200])
def req_any(req, request):
data = {}
if request.param != 204:
data["data"] = {"foo": "bar"}
if request.param >= 400:
data["errors"] = ["foo"]
req.return_value = _mock_json_response(data, status_code=request.param)
yield req
@pytest.fixture
def req_unwrapping(wrapped_role_id_lookup_response, role_id_response, req):
req.side_effect = lambda method, url, **kwargs: (
_mock_json_response(wrapped_role_id_lookup_response)
if url.endswith("sys/wrapping/lookup")
else _mock_json_response(role_id_response)
)
yield req
@pytest.fixture(params=["data"])
def unauthd_client_mock(server_config, request):
client = Mock(spec=vclient.VaultClient)
client.get_config.return_value = server_config
client.unwrap.return_value = {request.param: {"bar": "baz"}}
yield client
@pytest.fixture(params=[None, "valid_token"])
def client(server_config, request, session):
if request.param is None:
return vclient.VaultClient(**server_config, session=session)
if request.param == "valid_token":
token = request.getfixturevalue(request.param)
auth = Mock(spec=vauth.VaultTokenAuth)
auth.is_renewable.return_value = True
auth.is_valid.return_value = True
auth.get_token.return_value = token
return vclient.AuthenticatedVaultClient(auth, **server_config, session=session)
if request.param == "invalid_token":
token = request.getfixturevalue(request.param)
auth = Mock(spec=vauth.VaultTokenAuth)
auth.is_renewable.return_value = True
auth.is_valid.return_value = False
auth.get_token.side_effect = vault.VaultAuthExpired
return vclient.AuthenticatedVaultClient(auth, **server_config, session=session)
@pytest.fixture
def valid_token(token_auth):
token = MagicMock(spec=vault.VaultToken, **token_auth["auth"])
token.is_valid.return_value = True
token.is_renewable.return_value = True
token.payload.return_value = {"token": token_auth["auth"]["client_token"]}
token.__str__.return_value = token_auth["auth"]["client_token"]
token.to_dict.return_value = token_auth["auth"]
return token
@pytest.fixture
def invalid_token(valid_token):
valid_token.is_valid.return_value = False
valid_token.is_renewable.return_value = False
return valid_token
@pytest.fixture
def cache_factory():
with patch("salt.cache.factory", autospec=True) as factory:
yield factory
@pytest.fixture
def events():
return Mock(spec=salt.modules.event.send)
@pytest.fixture(
params=["MASTER", "MASTER_IMPERSONATING", "MINION_LOCAL", "MINION_REMOTE"]
)
def salt_runtype(request):
runtype = Mock(spec=hlp._get_salt_run_type)
runtype.return_value = getattr(hlp, f"SALT_RUNTYPE_{request.param}")
with patch("salt.utils.vault.helpers._get_salt_run_type", runtype):
yield
@pytest.fixture(
params=[
"master",
"master_impersonating",
"minion_local_1",
"minion_local_2",
"minion_local_3",
"minion_remote",
]
)
def opts_runtype(request):
rtype = {
"master": {
"__role": "master",
"vault": {},
},
"master_peer_run": {
"__role": "master",
"grains": {
"id": "test-minion",
},
"vault": {},
},
"master_impersonating": {
"__role": "master",
"minion_id": "test-minion",
"grains": {
"id": "test-minion",
},
"vault": {},
},
"minion_local_1": {
"grains": {"id": "test-minion"},
"local": True,
},
"minion_local_2": {
"file_client": "local",
"grains": {"id": "test-minion"},
},
"minion_local_3": {
"grains": {"id": "test-minion"},
"master_type": "disable",
},
"minion_remote": {
"grains": {"id": "test-minion"},
},
}
return rtype[request.param]

View file

@ -1,401 +0,0 @@
import pytest
import salt.utils.vault as vaultutil
import salt.utils.vault.api as vapi
import salt.utils.vault.client as vclient
from tests.support.mock import Mock, patch
@pytest.fixture
def entity_lookup_response():
return {
"data": {
"aliases": [],
"creation_time": "2017-11-13T21:01:33.543497Z",
"direct_group_ids": [],
"group_ids": [],
"id": "043fedec-967d-b2c9-d3af-0c467b04e1fd",
"inherited_group_ids": [],
"last_update_time": "2017-11-13T21:01:33.543497Z",
"merged_entity_ids": None,
"metadata": None,
"name": "test-minion",
"policies": None,
}
}
@pytest.fixture
def entity_fetch_response():
return {
"data": {
"aliases": [],
"creation_time": "2018-09-19T17:20:27.705389973Z",
"direct_group_ids": [],
"disabled": False,
"group_ids": [],
"id": "test-entity-id",
"inherited_group_ids": [],
"last_update_time": "2018-09-19T17:20:27.705389973Z",
"merged_entity_ids": None,
"metadata": {
"minion-id": "test-minion",
},
"name": "salt_minion_test-minion",
"policies": [
"default",
"saltstack/minions",
"saltstack/minion/test-minion",
],
}
}
@pytest.fixture
def secret_id_response():
return {
"request_id": "0e8c388e-2cb6-bcb2-83b7-625127d568bb",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"secret_id_accessor": "84896a0c-1347-aa90-a4f6-aca8b7558780",
"secret_id": "841771dc-11c9-bbc7-bcac-6a3945a69cd9",
"secret_id_ttl": 60,
},
}
@pytest.fixture
def secret_id_lookup_accessor_response():
return {
"request_id": "28f2f9fb-26c0-6022-4970-baeb6366b085",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"cidr_list": [],
"creation_time": "2022-09-09T15:11:28.358490481+00:00",
"expiration_time": "2022-10-11T15:11:28.358490481+00:00",
"last_updated_time": "2022-09-09T15:11:28.358490481+00:00",
"metadata": {},
"secret_id_accessor": "0380eb9f-3041-1c1c-234c-fde31a1a1fc1",
"secret_id_num_uses": 1,
"secret_id_ttl": 9999999999,
"token_bound_cidrs": [],
},
"warnings": None,
}
@pytest.fixture
def wrapped_response():
return {
"request_id": "",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": None,
"warnings": None,
"wrap_info": {
"token": "test-wrapping-token",
"accessor": "test-wrapping-token-accessor",
"ttl": 180,
"creation_time": "2022-09-10T13:37:12.123456789+00:00",
"creation_path": "whatever/not/checked/here",
"wrapped_accessor": "84896a0c-1347-aa90-a4f6-aca8b7558780",
},
}
@pytest.fixture
def approle_meta(secret_id_serialized):
return {
"bind_secret_id": True,
"local_secret_ids": False,
"secret_id_bound_cidrs": [],
"secret_id_num_uses": secret_id_serialized["secret_id_num_uses"],
"secret_id_ttl": secret_id_serialized["secret_id_ttl"],
"token_bound_cidrs": [],
"token_explicit_max_ttl": 9999999999,
"token_max_ttl": 0,
"token_no_default_policy": False,
"token_num_uses": 1,
"token_period": 0,
"token_policies": ["default"],
"token_ttl": 0,
"token_type": "default",
}
@pytest.fixture
def secret_id_serialized(secret_id_response):
return {
"secret_id": secret_id_response["data"]["secret_id"],
"secret_id_ttl": secret_id_response["data"]["secret_id_ttl"],
"secret_id_num_uses": 1,
# + creation_time
# + expire_time
}
@pytest.fixture
def lookup_mount_response():
return {
"request_id": "7a49be19-199b-ce19-c139-0c334bf07d72",
"lease_id": "",
"lease_duration": 0,
"renewable": False,
"data": {
"accessor": "auth_approle_cafebabe",
"config": {
"allowed_response_headers": [""],
"audit_non_hmac_request_keys": [""],
"audit_non_hmac_response_keys": [""],
"default_lease_ttl": 0,
"force_no_cache": False,
"max_lease_ttl": 0,
"passthrough_request_headers": [""],
"token_type": "default-service",
},
"deprecation_status": "supported",
"description": "",
"external_entropy_access": False,
"local": False,
"options": None,
"plugin_version": "",
"running_plugin_version": "v1.13.1+builtin.vault",
"running_sha256": "",
"seal_wrap": False,
"type": "approle",
"uuid": "testuuid",
},
"warnings": None,
}
@pytest.fixture
def client():
yield Mock(spec=vclient.AuthenticatedVaultClient)
@pytest.fixture
def approle_api(client):
yield vapi.AppRoleApi(client)
@pytest.fixture
def identity_api(client):
yield vapi.IdentityApi(client)
def test_list_approles(approle_api, client):
"""
Ensure list_approles call the API as expected and returns only a list of names
"""
client.list.return_value = {"data": {"keys": ["foo", "bar"]}}
res = approle_api.list_approles(mount="salt-minions")
assert res == ["foo", "bar"]
client.list.assert_called_once_with("auth/salt-minions/role")
def test_destroy_secret_id_by_secret_id(approle_api, client):
"""
Ensure destroy_secret_id calls the API as expected.
"""
approle_api.destroy_secret_id(
"test-minion", secret_id="test-secret-id", mount="salt-minions"
)
client.post.assert_called_once_with(
"auth/salt-minions/role/test-minion/secret-id/destroy",
payload={"secret_id": "test-secret-id"},
)
def test_destroy_secret_id_by_accessor(approle_api, client):
"""
Ensure destroy_secret_id calls the API as expected.
"""
approle_api.destroy_secret_id(
"test-minion", accessor="test-accessor", mount="salt-minions"
)
client.post.assert_called_once_with(
"auth/salt-minions/role/test-minion/secret-id-accessor/destroy",
payload={"secret_id_accessor": "test-accessor"},
)
@pytest.mark.parametrize(
"aliases",
[
[],
[
{"mount_accessor": "test-accessor", "id": "test-entity-alias-id"},
{"mount_accessor": "other-accessor", "id": "other-entity-alias-id"},
],
],
)
def test_write_entity_alias(client, aliases, entity_fetch_response, identity_api):
"""
Ensure write_entity_alias calls the API as expected.
"""
metadata = {"foo": "bar"}
payload = {
"canonical_id": "test-entity-id",
"mount_accessor": "test-accessor",
"name": "test-role-id",
"custom_metadata": metadata,
}
if aliases:
entity_fetch_response["data"]["aliases"] = aliases
if aliases[0]["mount_accessor"] == "test-accessor":
payload["id"] = aliases[0]["id"]
with patch(
"salt.utils.vault.api.IdentityApi._lookup_mount_accessor",
return_value="test-accessor",
), patch(
"salt.utils.vault.api.IdentityApi.read_entity",
return_value=entity_fetch_response["data"],
):
identity_api.write_entity_alias(
"salt_minion_test-minion",
alias_name="test-role-id",
mount="salt-minions",
custom_metadata=metadata,
)
client.post.assert_called_with("identity/entity-alias", payload=payload)
def test_write_entity(client, identity_api):
"""
Ensure write_entity calls the API as expected.
"""
metadata = {"foo": "bar"}
identity_api.write_entity("salt_minion_test-minion", metadata=metadata)
payload = {"metadata": metadata}
client.post.assert_called_with(
"identity/entity/name/salt_minion_test-minion", payload=payload
)
def test_read_entity_by_alias_failed(client, identity_api):
"""
Ensure read_entity_by_alias raises VaultNotFoundError if the lookup fails.
"""
with patch(
"salt.utils.vault.api.IdentityApi._lookup_mount_accessor",
return_value="test-accessor",
):
client.post.return_value = []
with pytest.raises(vapi.VaultNotFoundError):
identity_api.read_entity_by_alias(
alias="test-role-id", mount="salt-minions"
)
def test_read_entity_by_alias(client, entity_lookup_response, identity_api):
"""
Ensure read_entity_by_alias calls the API as expected.
"""
with patch(
"salt.utils.vault.api.IdentityApi._lookup_mount_accessor",
return_value="test-accessor",
):
client.post.return_value = entity_lookup_response
res = identity_api.read_entity_by_alias(
alias="test-role-id", mount="salt-minions"
)
assert res == entity_lookup_response["data"]
payload = {
"alias_name": "test-role-id",
"alias_mount_accessor": "test-accessor",
}
client.post.assert_called_once_with("identity/lookup/entity", payload=payload)
def test_lookup_mount_accessor(client, identity_api, lookup_mount_response):
"""
Ensure _lookup_mount_accessor calls the API as expected.
"""
client.get.return_value = lookup_mount_response
res = identity_api._lookup_mount_accessor("salt-minions")
client.get.assert_called_once_with("sys/auth/salt-minions")
assert res == "auth_approle_cafebabe"
@pytest.mark.parametrize("wrap", ["30s", False])
def test_generate_secret_id(
client, wrapped_response, secret_id_response, wrap, approle_api
):
"""
Ensure generate_secret_id calls the API as expected.
"""
def res_or_wrap(*args, **kwargs):
if kwargs.get("wrap"):
return vaultutil.VaultWrappedResponse(**wrapped_response["wrap_info"])
return secret_id_response
client.post.side_effect = res_or_wrap
metadata = {"foo": "bar"}
res = approle_api.generate_secret_id(
"test-minion", mount="salt-minions", metadata=metadata, wrap=wrap
)
if wrap:
assert res == vaultutil.VaultWrappedResponse(**wrapped_response["wrap_info"])
else:
assert res == vaultutil.VaultSecretId(**secret_id_response["data"])
client.post.assert_called_once_with(
"auth/salt-minions/role/test-minion/secret-id",
payload={"metadata": '{"foo": "bar"}'},
wrap=wrap,
)
@pytest.mark.parametrize("wrap", ["30s", False])
def test_read_role_id(client, wrapped_response, wrap, approle_api):
"""
Ensure read_role_id calls the API as expected.
"""
def res_or_wrap(*args, **kwargs):
if kwargs.get("wrap"):
return vaultutil.VaultWrappedResponse(**wrapped_response["wrap_info"])
return {"data": {"role_id": "test-role-id"}}
client.get.side_effect = res_or_wrap
res = approle_api.read_role_id("test-minion", mount="salt-minions", wrap=wrap)
if wrap:
assert res == vaultutil.VaultWrappedResponse(**wrapped_response["wrap_info"])
else:
assert res == "test-role-id"
client.get.assert_called_once_with(
"auth/salt-minions/role/test-minion/role-id", wrap=wrap
)
def test_read_approle(client, approle_api, approle_meta):
"""
Ensure read_approle calls the API as expected.
"""
client.get.return_value = {"data": approle_meta}
res = approle_api.read_approle("test-minion", mount="salt-minions")
assert res == approle_meta
client.get.assert_called_once_with("auth/salt-minions/role/test-minion")
def test_write_approle(approle_api, client):
"""
Ensure _manage_approle calls the API as expected.
"""
policies = {"foo": "bar"}
payload = {
"token_explicit_max_ttl": 9999999999,
"token_num_uses": 1,
"token_policies": policies,
}
approle_api.write_approle("test-minion", mount="salt-minions", **payload)
client.post.assert_called_once_with(
"auth/salt-minions/role/test-minion", payload=payload
)

View file

@ -1,312 +0,0 @@
import pytest
import salt.utils.vault as vault
import salt.utils.vault.auth as vauth
import salt.utils.vault.cache as vcache
import salt.utils.vault.client as vclient
import salt.utils.vault.leases as vleases
from tests.support.mock import Mock, patch
@pytest.fixture
def token(token_auth):
return vleases.VaultToken(**token_auth["auth"])
@pytest.fixture
def token_invalid(token_auth):
token_auth["auth"]["num_uses"] = 1
token_auth["auth"]["use_count"] = 1
return vleases.VaultToken(**token_auth["auth"])
@pytest.fixture
def token_unrenewable(token_auth):
token_auth["auth"]["renewable"] = False
return vleases.VaultToken(**token_auth["auth"])
@pytest.fixture
def secret_id(secret_id_response):
return vleases.VaultSecretId(**secret_id_response["data"])
@pytest.fixture
def secret_id_invalid(secret_id_response):
secret_id_response["data"]["secret_id_num_uses"] = 1
secret_id_response["data"]["use_count"] = 1
return vleases.VaultSecretId(**secret_id_response["data"])
@pytest.fixture(params=["secret_id"])
def approle(request):
secret_id = request.param
if secret_id is not None:
secret_id = request.getfixturevalue(secret_id)
return vauth.VaultAppRole("test-role-id", secret_id)
@pytest.fixture
def approle_invalid(secret_id_invalid):
return vauth.VaultAppRole("test-role-id", secret_id_invalid)
@pytest.fixture
def token_store(token):
store = Mock(spec=vauth.VaultTokenAuth)
store.is_valid.return_value = True
store.get_token.return_value = token
return store
@pytest.fixture
def token_store_empty(token_store):
token_store.is_valid.return_value = False
token_store.get_token.side_effect = vault.VaultAuthExpired
return token_store
@pytest.fixture
def token_store_empty_first(token_store, token):
token_store.is_valid.side_effect = (False, True)
token_store.get_token.side_effect = (token, vault.VaultException)
return token_store
@pytest.fixture
def uncached():
cache = Mock(spec=vcache.VaultAuthCache)
cache.exists.return_value = False
cache.get.return_value = None
return cache
@pytest.fixture
def cached_token(uncached, token):
uncached.exists.return_value = True
uncached.get.return_value = token
return uncached
@pytest.fixture
def client(token_auth):
token_auth["auth"]["client_token"] = "new-test-token"
client = Mock(spec=vclient.VaultClient)
client.post.return_value = token_auth
return client
def test_token_auth_uninitialized(uncached):
"""
Test that an exception is raised when a token is requested
and the authentication container was not passed a valid token.
"""
auth = vauth.VaultTokenAuth(cache=uncached)
uncached.get.assert_called_once()
assert auth.is_valid() is False
assert auth.is_renewable() is False
auth.used()
with pytest.raises(vault.VaultAuthExpired):
auth.get_token()
def test_token_auth_cached(cached_token, token):
"""
Test that tokens are read from cache.
"""
auth = vauth.VaultTokenAuth(cache=cached_token)
assert auth.is_valid()
assert auth.get_token() == token
def test_token_auth_invalid_token(invalid_token):
"""
Test that an exception is raised when a token is requested
and the container's token is invalid.
"""
auth = vauth.VaultTokenAuth(token=invalid_token)
assert auth.is_valid() is False
assert auth.is_renewable() is False
with pytest.raises(vault.VaultAuthExpired):
auth.get_token()
def test_token_auth_unrenewable_token(token_unrenewable):
"""
Test that it is reported correctly by the container
when a token is not renewable.
"""
auth = vauth.VaultTokenAuth(token=token_unrenewable)
assert auth.is_valid() is True
assert auth.is_renewable() is False
assert auth.get_token() == token_unrenewable
@pytest.mark.parametrize("num_uses", [0, 1, 10])
def test_token_auth_used_num_uses(uncached, token, num_uses):
"""
Ensure that cache writes for use count are only done when
num_uses is not 0 (= unlimited).
Single-use tokens still require cache writes for updating
``uses``. The cache cannot be flushed here since
exceptions might be used to indicate the token expiry
to factory methods.
"""
token = token.with_renewed(num_uses=num_uses)
auth = vauth.VaultTokenAuth(cache=uncached, token=token)
auth.used()
if num_uses > 0:
uncached.store.assert_called_once_with(token)
else:
uncached.store.assert_not_called()
@pytest.mark.parametrize("num_uses", [0, 1, 10])
def test_token_auth_update_token(uncached, token, num_uses):
"""
Ensure that partial updates to the token in use are possible
and that the cache writes are independent from num_uses.
Also ensure the token is treated as immutable
"""
auth = vauth.VaultTokenAuth(cache=uncached, token=token)
old_token = token
old_token_ttl = old_token.duration
auth.update_token({"num_uses": num_uses, "ttl": 8483})
updated_token = token.with_renewed(num_uses=num_uses, ttl=8483)
assert auth.token == updated_token
assert old_token.duration == old_token_ttl
uncached.store.assert_called_once_with(updated_token)
def test_token_auth_replace_token(uncached, token):
"""
Ensure completely replacing the token is possible and
results in a cache write. This is important when an
InvalidVaultToken has to be replaced with a VaultToken,
eg by a different authentication method.
"""
auth = vauth.VaultTokenAuth(cache=uncached)
assert isinstance(auth.token, vauth.InvalidVaultToken)
auth.replace_token(token)
assert isinstance(auth.token, vleases.VaultToken)
assert auth.token == token
uncached.store.assert_called_once_with(token)
@pytest.mark.parametrize("token", [False, True])
@pytest.mark.parametrize("approle", [False, True])
def test_approle_auth_is_valid(token, approle):
"""
Test that is_valid reports true when either the token
or the secret ID is valid
"""
token = Mock(spec=vleases.VaultToken)
token.is_valid.return_value = token
approle = Mock(spec=vleases.VaultSecretId)
approle.is_valid.return_value = approle
auth = vauth.VaultAppRoleAuth(approle, None, token_store=token)
assert auth.is_valid() is (token or approle)
def test_approle_auth_get_token_store_available(token_store, approle, token):
"""
Ensure no login attempt is made when a cached token is available
"""
auth = vauth.VaultAppRoleAuth(approle, None, token_store=token_store)
with patch("salt.utils.vault.auth.VaultAppRoleAuth._login") as login:
res = auth.get_token()
login.assert_not_called()
assert res == token
def test_approle_auth_get_token_store_empty(token_store_empty, approle, token):
"""
Ensure a token is returned if no cached token is available
"""
auth = vauth.VaultAppRoleAuth(approle, None, token_store=token_store_empty)
with patch("salt.utils.vault.auth.VaultAppRoleAuth._login") as login:
login.return_value = token
res = auth.get_token()
login.assert_called_once()
assert res == token
def test_approle_auth_get_token_invalid(token_store_empty, approle_invalid):
"""
Ensure VaultAuthExpired is raised if a token request was made, but
cannot be fulfilled
"""
auth = vauth.VaultAppRoleAuth(approle_invalid, None, token_store=token_store_empty)
with pytest.raises(vault.VaultAuthExpired):
auth.get_token()
@pytest.mark.parametrize("mount", ["approle", "salt_minions"])
@pytest.mark.parametrize("approle", ["secret_id", None], indirect=True)
def test_approle_auth_get_token_login(
approle, mount, client, token_store_empty_first, token
):
"""
Ensure that login with secret-id returns a token that is passed to the
token store/cache as well
"""
auth = vauth.VaultAppRoleAuth(
approle, client, mount=mount, token_store=token_store_empty_first
)
res = auth.get_token()
assert res == token
args, kwargs = client.post.call_args
endpoint = args[0]
payload = kwargs.get("payload", {})
assert endpoint == f"auth/{mount}/login"
assert "role_id" in payload
if approle.secret_id is not None:
assert "secret_id" in payload
token_store_empty_first.replace_token.assert_called_once_with(res)
@pytest.mark.parametrize("num_uses", [0, 1, 10])
def test_approle_auth_used_num_uses(
token_store_empty_first, approle, client, uncached, num_uses, token
):
"""
Ensure that cache writes for use count are only done when
num_uses is not 0 (= unlimited)
"""
approle.secret_id = approle.secret_id.with_renewed(num_uses=num_uses)
auth = vauth.VaultAppRoleAuth(
approle, client, cache=uncached, token_store=token_store_empty_first
)
res = auth.get_token()
assert res == token
if num_uses > 1:
uncached.store.assert_called_once_with(approle.secret_id)
elif num_uses:
uncached.store.assert_not_called()
uncached.flush.assert_called_once()
else:
uncached.store.assert_not_called()
def test_approle_auth_used_locally_configured(
token_store_empty_first, approle, client, uncached, token
):
"""
Ensure that locally configured secret IDs are not cached.
"""
approle.secret_id = vault.LocalVaultSecretId(**approle.secret_id.to_dict())
auth = vauth.VaultAppRoleAuth(
approle, client, cache=uncached, token_store=token_store_empty_first
)
res = auth.get_token()
assert res == token
uncached.store.assert_not_called()
def test_approle_allows_no_secret_id():
"""
Ensure AppRole containers are still valid if no
secret ID has been set (bind_secret_id can be set to False!)
"""
role = vauth.VaultAppRole("test-role-id")
assert role.is_valid()

View file

@ -1,588 +0,0 @@
import copy
import time
import pytest
import salt.cache
import salt.utils.vault as vault
import salt.utils.vault.cache as vcache
from tests.support.mock import ANY, Mock, patch
@pytest.fixture
def cbank():
return "vault/connection"
@pytest.fixture
def ckey():
return "test"
@pytest.fixture
def data():
return {"foo": "bar"}
@pytest.fixture
def context(cbank, ckey, data):
return {cbank: {ckey: data}}
@pytest.fixture
def cached(cache_factory, data):
cache = Mock(spec=salt.cache.Cache)
cache.contains.return_value = True
cache.fetch.return_value = data
cache.updated.return_value = time.time()
cache_factory.return_value = cache
return cache
@pytest.fixture
def cached_outdated(cache_factory, data):
cache = Mock(spec=salt.cache.Cache)
cache.contains.return_value = True
cache.fetch.return_value = data
cache.updated.return_value = time.time() - 9999999
cache_factory.return_value = cache
return cache
@pytest.fixture
def uncached(cache_factory):
cache = Mock(spec=salt.cache.Cache)
cache.contains.return_value = False
cache.fetch.return_value = None
cache.updated.return_value = None
cache_factory.return_value = cache
return cache
@pytest.fixture(autouse=True, params=[0])
def time_stopped(request):
with patch(
"salt.utils.vault.cache.time.time", autospec=True, return_value=request.param
):
yield
@pytest.mark.parametrize("connection", [True, False])
@pytest.mark.parametrize(
"salt_runtype,force_local,expected",
[
("MASTER", False, "vault"),
("MASTER_IMPERSONATING", False, "minions/test-minion/vault"),
("MASTER_IMPERSONATING", True, "vault"),
("MINION_LOCAL", False, "vault"),
("MINION_REMOTE", False, "vault"),
],
indirect=["salt_runtype"],
)
def test_get_cache_bank(connection, salt_runtype, force_local, expected):
"""
Ensure the cache banks are mapped as expected, depending on run type
"""
opts = {"grains": {"id": "test-minion"}}
cbank = vcache._get_cache_bank(opts, force_local=force_local, connection=connection)
if connection:
expected += "/connection"
assert cbank == expected
class TestVaultCache:
@pytest.mark.parametrize("config", ["session", "other"])
def test_get_uncached(self, config, uncached, cbank, ckey):
"""
Ensure that unavailable cached data is reported as None.
"""
cache = vcache.VaultCache(
{}, cbank, ckey, cache_backend=uncached if config != "session" else None
)
res = cache.get()
assert res is None
if config != "session":
uncached.contains.assert_called_once_with(cbank, ckey)
def test_get_cached_from_context(self, context, cached, cbank, ckey, data):
"""
Ensure that cached data in __context__ is respected, regardless
of cache backend.
"""
cache = vcache.VaultCache(context, cbank, ckey, cache_backend=cached)
res = cache.get()
assert res == data
cached.updated.assert_not_called()
cached.fetch.assert_not_called()
def test_get_cached_not_outdated(self, cached, cbank, ckey, data):
"""
Ensure that cached data that is still valid is returned.
"""
cache = vcache.VaultCache({}, cbank, ckey, cache_backend=cached, ttl=3600)
res = cache.get()
assert res == data
cached.updated.assert_called_once_with(cbank, ckey)
cached.fetch.assert_called_once_with(cbank, ckey)
def test_get_cached_outdated(self, cached_outdated, cbank, ckey):
"""
Ensure that cached data that is not valid anymore is flushed
and None is returned by default.
"""
cache = vcache.VaultCache({}, cbank, ckey, cache_backend=cached_outdated, ttl=1)
res = cache.get()
assert res is None
cached_outdated.updated.assert_called_once_with(cbank, ckey)
cached_outdated.flush.assert_called_once_with(cbank, ckey)
cached_outdated.fetch.assert_not_called()
@pytest.mark.parametrize("config", ["session", "other"])
def test_flush(self, config, context, cached, cbank, ckey):
"""
Ensure that flushing clears the context key only and, if
a cache backend is in use, it is also cleared.
"""
cache = vcache.VaultCache(
context, cbank, ckey, cache_backend=cached if config != "session" else None
)
cache.flush()
assert context == {cbank: {}}
if config != "session":
cached.flush.assert_called_once_with(cbank, ckey)
@pytest.mark.parametrize("config", ["session", "other"])
def test_flush_cbank(self, config, context, cached, cbank, ckey):
"""
Ensure that flushing with cbank=True clears the context bank and, if
a cache backend is in use, it is also cleared.
"""
cache = vcache.VaultCache(
context, cbank, ckey, cache_backend=cached if config != "session" else None
)
cache.flush(cbank=True)
assert context == {}
if config != "session":
cached.flush.assert_called_once_with(cbank, None)
@pytest.mark.parametrize("context", [{}, {"vault/connection": {}}])
@pytest.mark.parametrize("config", ["session", "other"])
def test_store(self, config, context, uncached, cbank, ckey, data):
"""
Ensure that storing data in cache always updates the context
and, if a cache backend is in use, it is also stored there.
"""
cache = vcache.VaultCache(
context,
cbank,
ckey,
cache_backend=uncached if config != "session" else None,
)
cache.store(data)
assert context == {cbank: {ckey: data}}
if config != "session":
uncached.store.assert_called_once_with(cbank, ckey, data)
class TestVaultConfigCache:
@pytest.fixture(params=["session", "other", None])
def config(self, request):
if request.param is None:
return None
return {
"cache": {
"backend": request.param,
"config": 3600,
"secret": "ttl",
}
}
@pytest.fixture
def data(self, config):
return {
"cache": {
"backend": "new",
"config": 1337,
"secret": "ttl",
}
}
@pytest.mark.usefixtures("uncached")
def test_get_config_cache_uncached(self, cbank, ckey):
"""
Ensure an uninitialized instance is returned when there is no cache
"""
res = vault.cache._get_config_cache({}, {}, cbank, ckey)
assert res.config is None
def test_get_config_context_cached(self, uncached, cbank, ckey, context):
"""
Ensure cached data in context wins
"""
res = vault.cache._get_config_cache({}, context, cbank, ckey)
assert res.config == context[cbank][ckey]
uncached.contains.assert_not_called()
def test_get_config_other_cached(self, cached, cbank, ckey, data):
"""
Ensure cached data from other sources is respected
"""
res = vault.cache._get_config_cache({}, {}, cbank, ckey)
assert res.config == data
cached.contains.assert_called_once_with(cbank, ckey)
cached.fetch.assert_called_once_with(cbank, ckey)
def test_reload(self, config, data, cbank, ckey):
"""
Ensure that a changed configuration is reloaded correctly and
during instantiation. When the config backend changes and the
previous was not session only, it should be flushed.
"""
with patch("salt.utils.vault.cache.VaultConfigCache.flush") as flush:
cache = vcache.VaultConfigCache({}, cbank, ckey, {}, init_config=config)
assert cache.config == config
if config is not None:
assert cache.ttl == config["cache"]["config"]
if config["cache"]["backend"] != "session":
assert cache.cache is not None
else:
assert cache.ttl is None
assert cache.cache is None
cache._load(data)
assert cache.ttl == data["cache"]["config"]
assert cache.cache is not None
if config is not None and config["cache"]["backend"] != "session":
flush.assert_called_once()
@pytest.mark.usefixtures("cached")
def test_exists(self, config, context, cbank, ckey):
"""
Ensure exists always evaluates to false when uninitialized
"""
cache = vcache.VaultConfigCache(context, cbank, ckey, {}, init_config=config)
res = cache.exists()
assert res is bool(config)
def test_get(self, config, cached, context, cbank, ckey, data):
"""
Ensure cached data is returned and backend settings honored,
unless the instance has not been initialized yet
"""
if config is not None and config["cache"]["backend"] != "session":
context = {}
cache = vcache.VaultConfigCache(context, cbank, ckey, {}, init_config=config)
res = cache.get()
if config is not None:
assert res == data
if config["cache"]["backend"] != "session":
cached.fetch.assert_called_once_with(cbank, ckey)
else:
cached.contains.assert_not_called()
cached.fetch.assert_not_called()
else:
# uninitialized should always return None
# initialization when first stored or constructed with init_config
cached.contains.assert_not_called()
assert res is None
def test_flush(self, config, context, cached, cbank, ckey):
"""
Ensure flushing deletes the whole cache bank (=connection scope),
unless the configuration has not been initialized.
Also, it should uninitialize the instance.
"""
if config is None:
context_old = copy.deepcopy(context)
cache = vcache.VaultConfigCache(context, cbank, ckey, {}, init_config=config)
cache.flush()
if config is None:
assert context == context_old
cached.flush.assert_not_called()
else:
if config["cache"]["backend"] == "session":
assert context == {}
else:
cached.flush.assert_called_once_with(cbank, None)
assert cache.ttl is None
assert cache.cache is None
assert cache.config is None
@pytest.mark.usefixtures("uncached")
def test_store(self, data, cbank, ckey):
"""
Ensure storing config in cache also reloads the instance
"""
cache = vcache.VaultConfigCache({}, {}, cbank, ckey)
assert cache.config is None
with patch("salt.utils.vault.cache.VaultConfigCache._load") as rld:
with patch("salt.utils.vault.cache.VaultCache.store") as store:
cache.store(data)
rld.assert_called_once_with(data)
store.assert_called_once()
@pytest.mark.parametrize("config", ["other"], indirect=True)
def test_flush_exceptions_with_flush(self, config, cached, cbank, ckey):
"""
Ensure internal flushing is disabled when the object is initialized
with a reference to an exception class.
"""
cache = vcache.VaultConfigCache(
{},
cbank,
ckey,
{},
cache_backend_factory=lambda *args: cached,
flush_exception=vault.VaultConfigExpired,
init_config=config,
)
with pytest.raises(vault.VaultConfigExpired):
cache.flush()
@pytest.mark.parametrize("config", ["other"], indirect=True)
def test_flush_exceptions_with_get(self, config, cached_outdated, cbank, ckey):
"""
Ensure internal flushing is disabled when the object is initialized
with a reference to an exception class.
"""
cache = vcache.VaultConfigCache(
{},
cbank,
ckey,
{},
cache_backend_factory=lambda *args: cached_outdated,
flush_exception=vault.VaultConfigExpired,
init_config=config,
)
with pytest.raises(vault.VaultConfigExpired):
cache.get()
class TestVaultAuthCache:
@pytest.fixture
def uncached(self):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=False,
autospec=True,
):
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=None,
autospec=True,
) as get:
yield get
@pytest.fixture
def cached(self, token_auth):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=True,
autospec=True,
):
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=token_auth["auth"],
autospec=True,
) as get:
yield get
@pytest.fixture
def cached_outdated(self, token_auth):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=True,
autospec=True,
):
token_auth["auth"]["creation_time"] = 0
token_auth["auth"]["lease_duration"] = 1
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=token_auth["auth"],
autospec=True,
) as get:
yield get
@pytest.fixture
def cached_invalid_flush(self, token_auth, cached):
with patch("salt.utils.vault.cache.CommonCache._flush", autospec=True) as flush:
token_auth["auth"]["num_uses"] = 1
token_auth["auth"]["use_count"] = 1
cached.return_value = token_auth["auth"]
yield flush
@pytest.mark.usefixtures("uncached")
def test_get_uncached(self):
"""
Ensure that unavailable cached data is reported as None.
"""
cache = vcache.VaultAuthCache({}, "cbank", "ckey", vault.VaultToken)
res = cache.get()
assert res is None
@pytest.mark.usefixtures("cached")
def test_get_cached(self, token_auth):
"""
Ensure that cached data that is still valid is returned.
"""
cache = vcache.VaultAuthCache({}, "cbank", "ckey", vault.VaultToken)
res = cache.get()
assert res is not None
assert res == vault.VaultToken(**token_auth["auth"])
def test_get_cached_invalid(self, cached_invalid_flush):
"""
Ensure that cached data that is not valid anymore is flushed
and None is returned.
"""
cache = vcache.VaultAuthCache({}, "cbank", "ckey", vault.VaultToken)
res = cache.get()
assert res is None
cached_invalid_flush.assert_called_once()
def test_store(self, token_auth):
"""
Ensure that storing authentication data sends a dictionary
representation to the store implementation of the parent class.
"""
token = vault.VaultToken(**token_auth["auth"])
cache = vcache.VaultAuthCache({}, "cbank", "ckey", vault.VaultToken)
with patch("salt.utils.vault.cache.CommonCache._store_ckey") as store:
cache.store(token)
store.assert_called_once_with("ckey", token.to_dict())
def test_flush_exceptions_with_flush(self, cached, cbank, ckey):
"""
Ensure internal flushing is disabled when the object is initialized
with a reference to an exception class.
"""
cache = vcache.VaultAuthCache(
{},
cbank,
ckey,
vault.VaultToken,
cache_backend=cached,
flush_exception=vault.VaultAuthExpired,
)
with pytest.raises(vault.VaultAuthExpired):
cache.flush()
def test_flush_exceptions_with_get(self, cached_outdated, cbank, ckey):
"""
Ensure internal flushing is disabled when the object is initialized
with a reference to an exception class.
"""
cache = vcache.VaultAuthCache(
{}, cbank, ckey, vault.VaultToken, flush_exception=vault.VaultAuthExpired
)
with pytest.raises(vault.VaultAuthExpired):
cache.get(10)
class TestVaultLeaseCache:
@pytest.fixture
def uncached(self):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=False,
autospec=True,
):
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=None,
autospec=True,
) as get:
yield get
@pytest.fixture
def cached(self, lease):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=True,
autospec=True,
):
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=lease,
autospec=True,
) as get:
yield get
@pytest.fixture
def cached_outdated(self, lease):
with patch(
"salt.utils.vault.cache.CommonCache._ckey_exists",
return_value=True,
autospec=True,
):
lease["duration"] = 6
lease["expire_time"] = 6
with patch(
"salt.utils.vault.cache.CommonCache._get_ckey",
return_value=lease,
autospec=True,
) as get:
yield get
@pytest.mark.usefixtures("uncached")
def test_get_uncached(self):
"""
Ensure that unavailable cached data is reported as None.
"""
cache = vcache.VaultLeaseCache({}, "cbank")
res = cache.get("testlease")
assert res is None
@pytest.mark.usefixtures("cached")
def test_get_cached(self, lease):
"""
Ensure that cached data that is still valid is returned.
"""
cache = vcache.VaultLeaseCache({}, "cbank")
res = cache.get("testlease")
assert res is not None
assert res == vault.VaultLease(**lease)
@pytest.mark.usefixtures("cached", "time_stopped")
@pytest.mark.parametrize("valid_for,expected", ((1, True), (99999999, False)))
def test_get_cached_valid_for(self, valid_for, expected, lease):
"""
Ensure that requesting leases with a validity works as expected.
The lease should be returned if it is valid, otherwise only
the invalid ckey should be flushed and None returned.
"""
cache = vcache.VaultLeaseCache({}, "cbank")
with patch(
"salt.utils.vault.cache.CommonCache._flush",
autospec=True,
) as flush:
res = cache.get("testlease", valid_for=valid_for, flush=True)
if expected:
flush.assert_not_called()
assert res is not None
assert res == vault.VaultLease(**lease)
else:
flush.assert_called_once_with(ANY, "testlease")
assert res is None
def test_store(self, lease):
"""
Ensure that storing authentication data sends a dictionary
representation to the store implementation of the parent class.
"""
lease_ = vault.VaultLease(**lease)
cache = vcache.VaultLeaseCache({}, "cbank")
with patch("salt.utils.vault.cache.CommonCache._store_ckey") as store:
cache.store("ckey", lease_)
store.assert_called_once_with("ckey", lease_.to_dict())
def test_expire_events_with_get(self, events, cached_outdated, cbank, ckey, lease):
"""
Ensure internal flushing is disabled when the object is initialized
with a reference to an exception class.
"""
cache = vcache.VaultLeaseCache({}, "cbank", expire_events=events)
ret = cache.get("ckey", 10)
assert ret is None
events.assert_called_once_with(
tag="vault/lease/ckey/expire", data={"valid_for_less": 10}
)

View file

@ -1,650 +0,0 @@
import pytest
import requests
import salt.exceptions
import salt.utils.vault as vault
import salt.utils.vault.client as vclient
from tests.pytests.unit.utils.vault.conftest import _mock_json_response
from tests.support.mock import ANY, Mock, patch
@pytest.mark.parametrize(
"endpoint",
[
"secret/some/path",
"/secret/some/path",
"secret/some/path/",
"/secret/some/path/",
],
)
def test_vault_client_request_raw_url(endpoint, client, req):
"""
Test that requests are sent to the correct endpoint, regardless of leading or trailing slashes
"""
expected_url = f"{client.url}/v1/secret/some/path"
client.request_raw("GET", endpoint)
req.assert_called_with(
"GET",
expected_url,
headers=ANY,
json=None,
verify=client.get_config()["verify"],
)
def test_vault_client_request_raw_kwargs_passthrough(client, req):
"""
Test that kwargs for requests.request are passed through
"""
client.request_raw(
"GET", "secret/some/path", allow_redirects=False, cert="/etc/certs/client.pem"
)
req.assert_called_with(
"GET",
ANY,
headers=ANY,
json=ANY,
verify=ANY,
allow_redirects=False,
cert="/etc/certs/client.pem",
)
@pytest.mark.parametrize("namespace", [None, "test-namespace"])
@pytest.mark.parametrize("client", [None], indirect=True)
def test_vault_client_request_raw_headers_namespace(namespace, client, req):
"""
Test that namespace is present in the HTTP headers only if it was specified
"""
if namespace is not None:
client.namespace = namespace
namespace_header = "X-Vault-Namespace"
client.request_raw("GET", "secret/some/path")
headers = req.call_args.kwargs.get("headers", {})
if namespace is None:
assert namespace_header not in headers
else:
assert headers.get(namespace_header) == namespace
@pytest.mark.parametrize("wrap", [False, 30, "1h"])
def test_vault_client_request_raw_headers_wrap(wrap, client, req):
"""
Test that the wrap header is present only if it was specified and supports time strings
"""
wrap_header = "X-Vault-Wrap-TTL"
client.request_raw("GET", "secret/some/path", wrap=wrap)
headers = req.call_args.kwargs.get("headers", {})
if not wrap:
assert wrap_header not in headers
else:
assert headers.get(wrap_header) == str(wrap)
@pytest.mark.parametrize("header", ["X-Custom-Header", "X-Existing-Header"])
def test_vault_client_request_raw_headers_additional(header, client, req):
"""
Test that additional headers are passed correctly and override default ones
"""
with patch.object(
client, "_get_headers", Mock(return_value={"X-Existing-Header": "unchanged"})
):
client.request_raw("GET", "secret/some/path", add_headers={header: "changed"})
actual_header = req.call_args.kwargs.get("headers", {}).get(header)
assert actual_header == "changed"
@pytest.mark.usefixtures("req_failed")
@pytest.mark.parametrize(
"req_failed",
[400, 403, 404, 502, 401],
indirect=True,
)
@pytest.mark.parametrize(
"client",
[None],
indirect=True,
)
def test_vault_client_request_raw_does_not_raise_http_exception(client):
"""
request_raw should return the raw response object regardless of HTTP status code
"""
res = client.request_raw("GET", "secret/some/path")
with pytest.raises(requests.exceptions.HTTPError):
res.raise_for_status()
@pytest.mark.parametrize(
"req_failed,expected",
[
(400, vault.VaultInvocationError),
(403, vault.VaultPermissionDeniedError),
(404, vault.VaultNotFoundError),
(405, vault.VaultUnsupportedOperationError),
(412, vault.VaultPreconditionFailedError),
(500, vault.VaultServerError),
(502, vault.VaultServerError),
(503, vault.VaultUnavailableError),
(401, requests.exceptions.HTTPError),
],
indirect=["req_failed"],
)
@pytest.mark.parametrize("raise_error", [True, False])
def test_vault_client_request_respects_raise_error(
raise_error, req_failed, expected, client
):
"""
request should inspect the response object and raise appropriate errors
or fall back to raise_for_status if raise_error is true
"""
if raise_error:
with pytest.raises(expected):
client.request("GET", "secret/some/path", raise_error=raise_error)
else:
res = client.request("GET", "secret/some/path", raise_error=raise_error)
assert "errors" in res
def test_vault_client_request_returns_whole_response_data(
role_id_response, req, client
):
"""
request should return the whole returned payload, not auth/data etc only
"""
req.return_value = _mock_json_response(role_id_response)
res = client.request("GET", "auth/approle/role/test-minion/role-id")
assert res == role_id_response
def test_vault_client_request_hydrates_wrapped_response(
wrapped_role_id_response, req, client
):
"""
request should detect wrapped responses and return an instance of VaultWrappedResponse
instead of raw data
"""
req.return_value = _mock_json_response(wrapped_role_id_response)
res = client.request("GET", "auth/approle/role/test-minion/role-id", wrap="180s")
assert isinstance(res, vault.VaultWrappedResponse)
@pytest.mark.usefixtures("req_success")
def test_vault_client_request_returns_true_when_no_data_is_reported(client):
"""
HTTP 204 indicates success with no data returned
"""
res = client.request("GET", "secret/some/path")
assert res is True
def test_vault_client_get_config(server_config, client):
"""
The returned configuration should match the one used to create an instance of VaultClient
"""
assert client.get_config() == server_config
@pytest.mark.parametrize("client", [None], indirect=["client"])
def test_vault_client_token_valid_false(client):
"""
The unauthenticated client should always report the token as being invalid
"""
assert client.token_valid() is False
@pytest.mark.parametrize("client", ["valid_token", "invalid_token"], indirect=True)
@pytest.mark.parametrize("req_any", [200, 403], indirect=True)
@pytest.mark.parametrize("remote", [False, True])
def test_vault_client_token_valid(client, remote, req_any):
valid = client.token_valid(remote=remote)
if not remote or not client.auth.is_valid():
req_any.assert_not_called()
else:
req_any.assert_called_once()
should_be_valid = client.auth.is_valid() and (
not remote or req_any("POST", "abc").status_code == 200
)
assert valid is should_be_valid
@pytest.mark.parametrize("func", ["get", "delete", "post", "list"])
def test_vault_client_wrapper_should_not_require_payload(func, client, req):
"""
Check that wrappers for get/delete/post/list do not require a payload
"""
req.return_value = _mock_json_response({}, status_code=200)
tgt = getattr(client, func)
res = tgt("auth/approle/role/test-role/secret-id")
assert res == {}
@pytest.mark.parametrize("func", ["patch"])
def test_vault_client_wrapper_should_require_payload(func, client, req):
"""
Check that patch wrapper does require a payload
"""
req.return_value = _mock_json_response({}, status_code=200)
tgt = getattr(client, func)
with pytest.raises(TypeError):
tgt("auth/approle/role/test-role/secret-id")
def test_vault_client_wrap_info_only_data(wrapped_role_id_lookup_response, client, req):
"""
wrap_info should only return the data portion of the returned wrapping information
"""
req.return_value = _mock_json_response(wrapped_role_id_lookup_response)
res = client.wrap_info("test-wrapping-token")
assert res == wrapped_role_id_lookup_response["data"]
@pytest.mark.parametrize(
"req_failed,expected", [(502, vault.VaultServerError)], indirect=["req_failed"]
)
def test_vault_client_wrap_info_should_fail_with_sensible_response(
req_failed, expected, client
):
"""
wrap_info should return sensible Exceptions, not KeyError etc
"""
with pytest.raises(expected):
client.wrap_info("test-wrapping-token")
def test_vault_client_unwrap_returns_whole_response(role_id_response, client, req):
"""
The unwrapped response should be returned as a whole, not auth/data etc only
"""
req.return_value = _mock_json_response(role_id_response)
res = client.unwrap("test-wrapping-token")
assert res == role_id_response
def test_vault_client_unwrap_should_default_to_token_header_before_payload(
role_id_response, client, req
):
"""
When unwrapping a wrapping token, it can be used as the authentication token header.
If the client has a valid token, it should be used in the header instead and the
unwrapping token should be passed in the payload
"""
token = "test-wrapping-token"
req.return_value = _mock_json_response(role_id_response)
client.unwrap(token)
if client.token_valid(remote=False):
payload = req.call_args.kwargs.get("json", {})
assert payload.get("token") == token
else:
headers = req.call_args.kwargs.get("headers", {})
assert headers.get("X-Vault-Token") == token
@pytest.mark.parametrize("func", ["unwrap", "token_lookup"])
@pytest.mark.parametrize(
"req_failed,expected",
[
(400, vault.VaultInvocationError),
(403, vault.VaultPermissionDeniedError),
(404, vault.VaultNotFoundError),
(502, vault.VaultServerError),
(401, requests.exceptions.HTTPError),
],
indirect=["req_failed"],
)
def test_vault_client_unwrap_should_raise_appropriate_errors(
func, req_failed, expected, client
):
"""
unwrap/token_lookup should raise exceptions the same way request does
"""
with pytest.raises(expected):
tgt = getattr(client, func)
tgt("test-wrapping-token")
@pytest.mark.usefixtures("req_unwrapping")
@pytest.mark.parametrize(
"path",
[
"auth/approle/role/test-minion/role-id",
"auth/approle/role/[^/]+/role-id",
["incorrect/path", "[^a]+", "auth/approle/role/[^/]+/role-id"],
],
)
def test_vault_client_unwrap_should_match_check_expected_creation_path(
path, role_id_response, client
):
"""
Expected creation paths should be accepted as strings and list of strings,
where the strings can be regex patterns
"""
res = client.unwrap("test-wrapping-token", expected_creation_path=path)
assert res == role_id_response
@pytest.mark.usefixtures("req_unwrapping")
@pytest.mark.parametrize(
"path",
[
"auth/other_mount/role/test-minion/role-id",
"auth/approle/role/[^tes/]+/role-id",
["incorrect/path", "[^a]+", "auth/approle/role/[^/]/role-id"],
],
)
def test_vault_client_unwrap_should_fail_on_unexpected_creation_path(path, client):
"""
When none of the patterns match, a (serious) exception should be raised
"""
with pytest.raises(vault.VaultUnwrapException):
client.unwrap("test-wrapping-token", expected_creation_path=path)
def test_vault_client_token_lookup_returns_data_only(
token_lookup_self_response, req, client
):
"""
token_lookup should return "data" only, not the whole response payload
"""
req.return_value = _mock_json_response(token_lookup_self_response)
res = client.token_lookup("test-token")
assert res == token_lookup_self_response["data"]
@pytest.mark.parametrize("raw", [False, True])
def test_vault_client_token_lookup_respects_raw(raw, req, client):
"""
when raw is True, token_lookup should return the raw response
"""
response_data = {"foo": "bar"}
req.return_value = _mock_json_response({"data": response_data})
res = client.token_lookup("test-token", raw=raw)
if raw:
assert res.json() == {"data": response_data}
else:
assert res == response_data
def test_vault_client_token_lookup_uses_accessor(client, req_any):
"""
Ensure a client can lookup tokens with provided accessor
"""
token = "test-token"
if client.token_valid():
token = None
client.token_lookup(token=token, accessor="test-token-accessor")
payload = req_any.call_args.kwargs.get("json", {})
_, url = req_any.call_args[0]
assert payload.get("accessor") == "test-token-accessor"
assert url.endswith("lookup-accessor")
# VaultClient only
@pytest.mark.usefixtures("req")
@pytest.mark.parametrize("client", [None], indirect=["client"])
def test_vault_client_token_lookup_requires_token_for_unauthenticated_client(client):
with pytest.raises(vault.VaultInvocationError):
client.token_lookup()
# AuthenticatedVaultClient only
@pytest.mark.usefixtures("req_any")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
@pytest.mark.parametrize(
"endpoint,use",
[
("secret/data/some/path", True),
("auth/approle/role/test-minion", True),
("sys/internal/ui/mounts", False),
("sys/internal/ui/mounts/secret", False),
("sys/wrapping/lookup", False),
("sys/internal/ui/namespaces", False),
("sys/health", False),
("sys/seal-status", False),
],
)
def test_vault_client_request_raw_increases_use_count_when_necessary_depending_on_path(
endpoint, use, client
):
"""
When a request is issued to an endpoint that consumes a use, ensure it is passed
along to the token.
https://github.com/hashicorp/vault/blob/d467681e15898041b6dd5f2bf7789bd7c236fb16/vault/logical_system.go#L119-L155
"""
client.request_raw("GET", endpoint)
assert client.auth.used.called is use
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
@pytest.mark.parametrize(
"req_failed",
[400, 403, 404, 405, 412, 500, 502, 503, 401],
indirect=True,
)
def test_vault_client_request_raw_increases_use_count_when_necessary_depending_on_response(
req_failed, client
):
"""
When a request is issued to an endpoint that consumes a use, make sure that
this is registered regardless of status code:
https://github.com/hashicorp/vault/blob/c1cf97adac5c53301727623a74b828a5f12592cf/vault/request_handling.go#L864-L866
ref: PR #62552
"""
client.request_raw("GET", "secret/data/some/path")
assert client.auth.used.called is True
@pytest.mark.usefixtures("req_any")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
def test_vault_client_request_raw_does_not_increase_use_count_with_unauthd_endpoint(
client,
):
"""
Unauthenticated endpoints do not consume a token use. Since some cannot be detected
easily because of customizable mount points for secret engines and auth methods,
this can be specified in the request. Make sure it is honored.
"""
client.request("GET", "pki/cert/ca", is_unauthd=True)
client.auth.used.assert_not_called()
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
def test_vault_client_token_lookup_self_possible(client, req_any):
"""
Ensure an authenticated client can lookup its own token
"""
client.token_lookup()
headers = req_any.call_args.kwargs.get("headers", {})
_, url = req_any.call_args[0]
assert headers.get("X-Vault-Token") == str(client.auth.get_token())
assert url.endswith("lookup-self")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
def test_vault_client_token_lookup_supports_token_arg(client, req_any):
"""
Ensure an authenticated client can lookup other tokens
"""
token = "other-test-token"
client.token_lookup(token=token)
headers = req_any.call_args.kwargs.get("headers", {})
payload = req_any.call_args.kwargs.get("json", {})
_, url = req_any.call_args[0]
assert payload.get("token") == token
assert headers.get("X-Vault-Token") == str(client.auth.get_token())
assert url.endswith("lookup")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
@pytest.mark.parametrize("renewable", [True, False])
def test_vault_client_token_renew_self_possible(
token_renew_self_response, client, req, renewable
):
"""
Ensure an authenticated client can renew its own token only when
it is renewable and that the renewed data is passed along to the
token store
"""
req.return_value = _mock_json_response(token_renew_self_response)
client.auth.is_renewable.return_value = renewable
res = client.token_renew()
if renewable:
headers = req.call_args.kwargs.get("headers", {})
_, url = req.call_args[0]
assert headers.get("X-Vault-Token") == str(client.auth.get_token())
assert url.endswith("renew-self")
req.assert_called_once()
client.auth.update_token.assert_called_once_with(
token_renew_self_response["auth"]
)
assert res == token_renew_self_response["auth"]
else:
assert res is False
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
def test_vault_client_token_renew_supports_token_arg(
token_renew_other_response, client, req
):
"""
Ensure an authenticated client can renew other tokens
"""
req.return_value = _mock_json_response(token_renew_other_response)
token = "other-test-token"
client.token_renew(token=token)
headers = req.call_args.kwargs.get("headers", {})
payload = req.call_args.kwargs.get("json", {})
_, url = req.call_args[0]
assert payload.get("token") == token
assert headers.get("X-Vault-Token") == str(client.auth.get_token())
assert url.endswith("renew")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
def test_vault_client_token_renew_uses_accessor(
token_renew_accessor_response, client, req
):
"""
Ensure a client can renew tokens with provided accessor
"""
req.return_value = _mock_json_response(token_renew_accessor_response)
client.token_renew(accessor="test-token-accessor")
payload = req.call_args.kwargs.get("json", {})
_, url = req.call_args[0]
assert payload.get("accessor") == "test-token-accessor"
assert url.endswith("renew-accessor")
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
@pytest.mark.parametrize("token", [None, "other-test-token"])
def test_vault_client_token_renew_self_updates_token(
token_renew_self_response, client, token, req
):
"""
Ensure the current client token is updated when it is renewed, but not
when another token is renewed
"""
req.return_value = _mock_json_response(token_renew_self_response)
client.token_renew(token=token)
if token is None:
assert client.auth.update_token.called
else:
assert not client.auth.update_token.called
@pytest.mark.parametrize("client", ["valid_token"], indirect=True)
@pytest.mark.parametrize(
"token,accessor",
[(None, None), ("other-test-token", None), (None, "test-accessor")],
)
def test_vault_client_token_renew_increment_is_honored(
token, accessor, client, token_renew_self_response, req
):
"""
Ensure the renew increment is passed to vault if provided
"""
req.return_value = _mock_json_response(token_renew_self_response)
client.token_renew(token=token, accessor=accessor, increment=3600)
payload = req.call_args.kwargs.get("json", {})
assert payload.get("increment") == 3600
@pytest.mark.parametrize(
"secret,config,expected",
[
("token", None, r"auth/token/create(/[^/]+)?"),
("secret_id", None, r"auth/[^/]+/role/[^/]+/secret\-id"),
("role_id", None, r"auth/[^/]+/role/[^/]+/role\-id"),
(
"secret_id",
{"auth": {"approle_mount": "test_mount", "approle_name": "test_minion"}},
r"auth/test_mount/role/test_minion/secret\-id",
),
(
"role_id",
{"auth": {"approle_mount": "test_mount", "approle_name": "test_minion"}},
r"auth/test_mount/role/test_minion/role\-id",
),
(
"secret_id",
{"auth": {"approle_mount": "te$t-mount", "approle_name": "te$t-minion"}},
r"auth/te\$t\-mount/role/te\$t\-minion/secret\-id",
),
(
"role_id",
{"auth": {"approle_mount": "te$t-mount", "approle_name": "te$t-minion"}},
r"auth/te\$t\-mount/role/te\$t\-minion/role\-id",
),
],
)
def test_get_expected_creation_path(secret, config, expected):
"""
Ensure expected creation paths are resolved as expected
"""
assert vclient._get_expected_creation_path(secret, config) == expected
def test_get_expected_creation_path_fails_for_unknown_type():
"""
Ensure unknown source types result in an exception
"""
with pytest.raises(salt.exceptions.SaltInvocationError):
vclient._get_expected_creation_path("nonexistent")
@pytest.mark.parametrize(
"server_config",
[
{
"url": "https://127.0.0.1:8200",
"verify": "-----BEGIN CERTIFICATE-----testcert",
}
],
indirect=True,
)
def test_vault_client_verify_pem(server_config):
"""
Test that the ``verify`` parameter to the client can contain a PEM-encoded certificate
which will be used as the sole trust anchor for the Vault URL.
The ``verify`` parameter to ``Session.request`` should be None in that case since
it requires a local file path.
"""
with patch("salt.utils.vault.client.CACertHTTPSAdapter", autospec=True) as adapter:
with patch("salt.utils.vault.requests.Session", autospec=True) as session:
client = vclient.VaultClient(**server_config)
adapter.assert_called_once_with(server_config["verify"])
session.return_value.mount.assert_called_once_with(
server_config["url"], adapter.return_value
)
client.request_raw("GET", "test")
session.return_value.request.assert_called_once_with(
"GET",
f"{server_config['url']}/v1/test",
headers=ANY,
json=ANY,
verify=None,
)

File diff suppressed because it is too large Load diff

View file

@ -1,119 +0,0 @@
# this needs to be from! see test_iso_to_timestamp_polyfill
from datetime import datetime
import pytest
import salt.utils.vault.helpers as hlp
from tests.support.mock import patch
@pytest.mark.parametrize(
"opts_runtype,expected",
[
("master", hlp.SALT_RUNTYPE_MASTER),
("master_peer_run", hlp.SALT_RUNTYPE_MASTER_PEER_RUN),
("master_impersonating", hlp.SALT_RUNTYPE_MASTER_IMPERSONATING),
("minion_local_1", hlp.SALT_RUNTYPE_MINION_LOCAL),
("minion_local_2", hlp.SALT_RUNTYPE_MINION_LOCAL),
("minion_local_3", hlp.SALT_RUNTYPE_MINION_LOCAL),
("minion_remote", hlp.SALT_RUNTYPE_MINION_REMOTE),
],
indirect=["opts_runtype"],
)
def test_get_salt_run_type(opts_runtype, expected):
"""
Ensure run types are detected as expected
"""
assert hlp._get_salt_run_type(opts_runtype) == expected
@pytest.mark.parametrize(
"pattern,expected",
[
("no-tokens-to-replace", ["no-tokens-to-replace"]),
("single-dict:{minion}", ["single-dict:{minion}"]),
("single-list:{grains[roles]}", ["single-list:web", "single-list:database"]),
(
"multiple-lists:{grains[roles]}+{grains[aux]}",
[
"multiple-lists:web+foo",
"multiple-lists:web+bar",
"multiple-lists:database+foo",
"multiple-lists:database+bar",
],
),
(
"single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}",
[
"single-list-with-dicts:{grains[id]}+web+{grains[id]}",
"single-list-with-dicts:{grains[id]}+database+{grains[id]}",
],
),
(
"deeply-nested-list:{grains[deep][foo][bar][baz]}",
[
"deeply-nested-list:hello",
"deeply-nested-list:world",
],
),
],
)
def test_expand_pattern_lists(pattern, expected):
"""
Ensure expand_pattern_lists works as intended:
- Expand list-valued patterns
- Do not change non-list-valued tokens
"""
pattern_vars = {
"id": "test-minion",
"roles": ["web", "database"],
"aux": ["foo", "bar"],
"deep": {"foo": {"bar": {"baz": ["hello", "world"]}}},
}
mappings = {"minion": "test-minion", "grains": pattern_vars}
output = hlp.expand_pattern_lists(pattern, **mappings)
assert output == expected
@pytest.mark.parametrize(
"inpt,expected",
[
(60.0, 60.0),
(60, 60.0),
("60", 60.0),
("60s", 60.0),
("2m", 120.0),
("1h", 3600.0),
("1d", 86400.0),
("1.5s", 1.5),
("1.5m", 90.0),
("1.5h", 5400.0),
("7.5d", 648000.0),
],
)
def test_timestring_map(inpt, expected):
assert hlp.timestring_map(inpt) == expected
@pytest.mark.parametrize(
"creation_time,expected",
[
("2022-08-22T17:16:21-09:30", 1661222781),
("2022-08-22T17:16:21-01:00", 1661192181),
("2022-08-22T17:16:21+00:00", 1661188581),
("2022-08-22T17:16:21Z", 1661188581),
("2022-08-22T17:16:21+02:00", 1661181381),
("2022-08-22T17:16:21+12:30", 1661143581),
],
)
def test_iso_to_timestamp_polyfill(creation_time, expected):
with patch("salt.utils.vault.helpers.datetime.datetime") as d:
d.fromisoformat.side_effect = AttributeError
# needs from datetime import datetime, otherwise results
# in infinite recursion
# pylint: disable=unnecessary-lambda
d.side_effect = lambda *args: datetime(*args)
res = hlp.iso_to_timestamp(creation_time)
assert res == expected

View file

@ -1,592 +0,0 @@
import pytest
import requests.models
import salt.utils.vault as vault
import salt.utils.vault.cache as vcache
import salt.utils.vault.client as vclient
import salt.utils.vault.kv as vkv
from tests.support.mock import MagicMock, Mock, patch
@pytest.fixture
def path():
return "secret/some/path"
@pytest.fixture
def paths():
return {
"data": "secret/data/some/path",
"metadata": "secret/metadata/some/path",
"delete": "secret/data/some/path",
"delete_versions": "secret/delete/some/path",
"destroy": "secret/destroy/some/path",
}
@pytest.fixture
def kvv1_meta_response():
return {
"request_id": "b82f2df7-a9b6-920c-0ed2-a3463b996f9e",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"accessor": "kv_f8731f1b",
"config": {
"default_lease_ttl": 0,
"force_no_cache": False,
"max_lease_ttl": 0,
},
"description": "key/value secret storage",
"external_entropy_access": False,
"local": False,
"options": None,
"path": "secret/",
"seal_wrap": False,
"type": "kv",
"uuid": "1d9431ac-060a-9b63-4572-3ca7ffd78347",
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def kvv2_meta_response():
return {
"request_id": "b82f2df7-a9b6-920c-0ed2-a3463b996f9e",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"accessor": "kv_f8731f1b",
"config": {
"default_lease_ttl": 0,
"force_no_cache": False,
"max_lease_ttl": 0,
},
"description": "key/value secret storage",
"external_entropy_access": False,
"local": False,
"options": {
"version": "2",
},
"path": "secret/",
"seal_wrap": False,
"type": "kv",
"uuid": "1d9431ac-060a-9b63-4572-3ca7ffd78347",
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def kvv1_info():
return {
"v2": False,
"data": "secret/some/path",
"metadata": "secret/some/path",
"delete": "secret/some/path",
"type": "kv",
}
@pytest.fixture
def kvv2_info():
return {
"v2": True,
"data": "secret/data/some/path",
"metadata": "secret/metadata/some/path",
"delete": "secret/data/some/path",
"delete_versions": "secret/delete/some/path",
"destroy": "secret/destroy/some/path",
"type": "kv",
}
@pytest.fixture
def no_kv_info():
return {
"v2": False,
"data": "secret/some/path",
"metadata": "secret/some/path",
"delete": "secret/some/path",
"type": None,
}
@pytest.fixture
def kvv1_response():
return {
"request_id": "35df4df1-c3d8-b270-0682-ddb0160c7450",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"foo": "bar",
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def kvv2_response():
return {
"request_id": "35df4df1-c3d8-b270-0682-ddb0160c7450",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"data": {"foo": "bar"},
"metadata": {
"created_time": "2020-05-02T07:26:12.180848003Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def kv_list_response():
return {
"request_id": "35df4df1-c3d8-b270-0682-ddb0160c7450",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"keys": ["foo"],
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
@pytest.fixture
def metadata_nocache():
cache = Mock(spec=vcache.VaultCache)
cache.get.return_value = None
return cache
@pytest.fixture(params=["v1", "v2"])
def kv_meta(request, metadata_nocache):
client = Mock(spec=vclient.AuthenticatedVaultClient)
if request.param == "invalid":
res = {"wrap_info": {}}
else:
res = request.getfixturevalue(f"kv{request.param}_meta_response")
client.get.return_value = res
return vkv.VaultKV(client, metadata_nocache)
@pytest.fixture(params=["v1", "v2"])
def kv_meta_cached(request):
cache = Mock(spec=vcache.VaultCache)
client = Mock(spec=vclient.AuthenticatedVaultClient)
kv_meta_response = request.getfixturevalue(f"kv{request.param}_meta_response")
client.get.return_value = kv_meta_response
cache.get.return_value = {"secret/some/path": kv_meta_response["data"]}
return vkv.VaultKV(client, cache)
@pytest.fixture
def kvv1(kvv1_info, kvv1_response, metadata_nocache, kv_list_response):
client = Mock(spec=vclient.AuthenticatedVaultClient)
client.get.return_value = kvv1_response
client.post.return_value = True
client.patch.side_effect = vclient.VaultPermissionDeniedError
client.list.return_value = kv_list_response
client.delete.return_value = True
with patch("salt.utils.vault.kv.VaultKV.is_v2", Mock(return_value=kvv1_info)):
yield vkv.VaultKV(client, metadata_nocache)
@pytest.fixture
def kvv2(kvv2_info, kvv2_response, metadata_nocache, kv_list_response):
client = Mock(spec=vclient.AuthenticatedVaultClient)
client.get.return_value = kvv2_response
client.post.return_value = True
client.patch.return_value = True
client.list.return_value = kv_list_response
client.delete.return_value = True
with patch("salt.utils.vault.kv.VaultKV.is_v2", Mock(return_value=kvv2_info)):
yield vkv.VaultKV(client, metadata_nocache)
@pytest.mark.parametrize(
"wrapper,param,result",
[
("read_kv", None, {"foo": "bar"}),
("write_kv", {"foo": "bar"}, True),
("patch_kv", {"foo": "bar"}, True),
("delete_kv", None, True),
("destroy_kv", [0], True),
("list_kv", None, ["foo"]),
],
)
@pytest.mark.parametrize("test_remote_config", ["token"], indirect=True)
@pytest.mark.parametrize(
"clear_unauthd,token_valid", [(False, False), (True, False), (True, True)]
)
def test_kv_wrapper_handles_perm_exceptions(
wrapper, param, result, test_remote_config, clear_unauthd, token_valid
):
"""
Test that *_kv wrappers retry with a new client if
a) the current configuration might be invalid
b) the current token might not have all policies and
`cache:clear_on_unauthorized` is True
"""
func = getattr(vault, wrapper)
exc = vault.VaultPermissionDeniedError
args = ["secret/some/path"]
if param:
args.append(param)
args += [{}, {}]
test_remote_config["cache"]["clear_on_unauthorized"] = clear_unauthd
with patch("salt.utils.vault.get_kv", autospec=True) as getkv:
with patch("salt.utils.vault.clear_cache", autospec=True) as cache:
kv = Mock(spec=vkv.VaultKV)
kv.client = Mock(spec=vclient.AuthenticatedVaultClient)
kv.client.token_valid.return_value = token_valid
getattr(kv, wrapper.rstrip("_kv")).side_effect = (exc, result)
getkv.side_effect = ((kv, test_remote_config), kv)
res = func(*args)
assert res == result
cache.assert_called_once()
@pytest.mark.parametrize(
"wrapper,param",
[
("read_kv", None),
("write_kv", {"foo": "bar"}),
("patch_kv", {"foo": "bar"}),
("delete_kv", None),
("destroy_kv", [0]),
("list_kv", None),
],
)
@pytest.mark.parametrize("test_remote_config", ["token"], indirect=True)
def test_kv_wrapper_raises_perm_exceptions_when_configured(
wrapper, param, test_remote_config
):
"""
Test that *_kv wrappers do not retry with a new client when `cache:clear_on_unauthorized` is False.
"""
func = getattr(vault, wrapper)
exc = vault.VaultPermissionDeniedError
args = ["secret/some/path"]
if param:
args.append(param)
args += [{}, {}]
test_remote_config["cache"]["clear_on_unauthorized"] = False
with patch("salt.utils.vault.get_kv", autospec=True) as getkv:
with patch("salt.utils.vault.clear_cache", autospec=True):
kv = Mock(spec=vkv.VaultKV)
kv.client = Mock(spec=vclient.AuthenticatedVaultClient)
kv.client.token_valid.return_value = True
getattr(kv, wrapper.rstrip("_kv")).side_effect = exc
getkv.return_value = (kv, test_remote_config)
with pytest.raises(exc):
func(*args)
@pytest.mark.parametrize(
"kv_meta,expected",
[
(
"v1",
"kvv1_info",
),
(
"v2",
"kvv2_info",
),
(
"invalid",
"no_kv_info",
),
],
indirect=["kv_meta"],
)
def test_vault_kv_is_v2_no_cache(kv_meta, expected, request):
"""
Ensure path metadata is requested as expected and cached
if the lookup succeeds
"""
expected_val = request.getfixturevalue(expected)
res = kv_meta.is_v2("secret/some/path")
kv_meta.metadata_cache.get.assert_called_once()
kv_meta.client.get.assert_called_once_with(
"sys/internal/ui/mounts/secret/some/path"
)
if expected != "no_kv_info":
kv_meta.metadata_cache.store.assert_called_once()
assert res == expected_val
@pytest.mark.parametrize(
"kv_meta_cached,expected",
[
(
"v1",
"kvv1_info",
),
(
"v2",
"kvv2_info",
),
],
indirect=["kv_meta_cached"],
)
def test_vault_kv_is_v2_cached(kv_meta_cached, expected, request):
"""
Ensure cache is respected for path metadata
"""
expected = request.getfixturevalue(expected)
res = kv_meta_cached.is_v2("secret/some/path")
kv_meta_cached.metadata_cache.get.assert_called_once()
kv_meta_cached.metadata_cache.store.assert_not_called()
kv_meta_cached.client.assert_not_called()
assert res == expected
class TestKVV1:
path = "secret/some/path"
@pytest.mark.parametrize("include_metadata", [False, True])
def test_vault_kv_read(self, kvv1, include_metadata, path):
"""
Ensure that VaultKV.read works for KV v1 and does not fail if
metadata is requested, which is invalid for KV v1.
"""
res = kvv1.read(path, include_metadata=include_metadata)
kvv1.client.get.assert_called_once_with(path)
assert res == {"foo": "bar"}
def test_vault_kv_write(self, kvv1, path):
"""
Ensure that VaultKV.write works for KV v1.
"""
data = {"bar": "baz"}
kvv1.write(path, data)
kvv1.client.post.assert_called_once_with(path, payload=data)
@pytest.mark.parametrize(
"existing,data,expected",
[
({"foo": "bar"}, {"bar": "baz"}, {"foo": "bar", "bar": "baz"}),
({"foo": "bar"}, {"foo": None}, {}),
(
{"foo": "bar"},
{"foo2": {"bar": {"baz": True}}},
{"foo": "bar", "foo2": {"bar": {"baz": True}}},
),
(
{"foo": {"bar": {"baz": True}}},
{"foo": {"bar": {"baz": None}}},
{"foo": {"bar": {}}},
),
],
)
def test_vault_kv_patch(self, kvv1, path, existing, data, expected):
"""
Ensure that VaultKV.patch works for KV v1.
This also tests the internal JSON merge patch implementation.
"""
kvv1.client.get.return_value = {"data": existing}
kvv1.patch(path, data)
kvv1.client.post.assert_called_once_with(
path,
payload=expected,
)
def test_vault_kv_delete(self, kvv1, path):
"""
Ensure that VaultKV.delete works for KV v1.
"""
kvv1.delete(path)
kvv1.client.request.assert_called_once_with("DELETE", path, payload=None)
def test_vault_kv_delete_versions(self, kvv1, path):
"""
Ensure that VaultKV.delete with versions raises an exception for KV v1.
"""
with pytest.raises(
vault.VaultInvocationError, match="Versioning support requires kv-v2.*"
):
kvv1.delete(path, versions=[1, 2, 3, 4])
def test_vault_kv_destroy(self, kvv1, path):
"""
Ensure that VaultKV.destroy raises an exception for KV v1.
"""
with pytest.raises(vault.VaultInvocationError):
kvv1.destroy(path, [1, 2, 3, 4])
def test_vault_kv_nuke(self, kvv1, path):
"""
Ensure that VaultKV.nuke raises an exception for KV v1.
"""
with pytest.raises(vault.VaultInvocationError):
kvv1.nuke(path)
def test_vault_kv_list(self, kvv1, path):
"""
Ensure that VaultKV.list works for KV v1 and only returns keys.
"""
res = kvv1.list(path)
kvv1.client.list.assert_called_once_with(path)
assert res == ["foo"]
class TestKVV2:
@pytest.mark.parametrize(
"versions,expected",
[
(0, [0]),
("1", [1]),
([2], [2]),
(["3"], [3]),
],
)
def test_parse_versions(self, kvv2, versions, expected):
"""
Ensure parsing versions works as expected:
single integer/number string or list of those are allowed
"""
assert kvv2._parse_versions(versions) == expected
def test_parse_versions_raises_exception_when_unparsable(self, kvv2):
"""
Ensure unparsable versions raise an exception
"""
with pytest.raises(vault.VaultInvocationError):
kvv2._parse_versions("four")
def test_get_secret_path_metadata_lookup_unexpected_response(
self, kvv2, caplog, path
):
"""
Ensure unexpected responses are treated as not KV
"""
# _mock_json_response() returns a Mock, but we need MagicMock here
resp_mm = MagicMock(spec=requests.models.Response)
resp_mm.json.return_value = {"wrap_info": {}}
resp_mm.status_code = 200
resp_mm.reason = ""
kvv2.client.get.return_value = resp_mm
res = kvv2._get_secret_path_metadata(path)
assert res is None
assert "Unexpected response to metadata query" in caplog.text
def test_get_secret_path_metadata_lookup_request_error(self, kvv2, caplog, path):
"""
Ensure HTTP error status codes are treated as not KV
"""
kvv2.client.get.side_effect = vault.VaultPermissionDeniedError
res = kvv2._get_secret_path_metadata(path)
assert res is None
assert "VaultPermissionDeniedError:" in caplog.text
@pytest.mark.parametrize("include_metadata", [False, True])
def test_vault_kv_read(self, kvv2, include_metadata, kvv2_response, paths):
"""
Ensure that VaultKV.read works for KV v2 and returns metadata
if requested.
"""
res = kvv2.read(path, include_metadata=include_metadata)
kvv2.client.get.assert_called_once_with(paths["data"])
if include_metadata:
assert res == kvv2_response["data"]
else:
assert res == kvv2_response["data"]["data"]
def test_vault_kv_write(self, kvv2, path, paths):
"""
Ensure that VaultKV.write works for KV v2.
"""
data = {"bar": "baz"}
kvv2.write(path, data)
kvv2.client.post.assert_called_once_with(paths["data"], payload={"data": data})
def test_vault_kv_patch(self, kvv2, path, paths):
"""
Ensure that VaultKV.patch works for KV v2.
"""
data = {"bar": "baz"}
kvv2.patch(path, data)
kvv2.client.patch.assert_called_once_with(
paths["data"],
payload={"data": data},
add_headers={"Content-Type": "application/merge-patch+json"},
)
def test_vault_kv_delete(self, kvv2, path, paths):
"""
Ensure that VaultKV.delete works for KV v2.
"""
kvv2.delete(path)
kvv2.client.request.assert_called_once_with(
"DELETE", paths["data"], payload=None
)
@pytest.mark.parametrize(
"versions", [[1, 2], [2], 2, ["1", "2"], ["2"], "2", [1, "2"]]
)
def test_vault_kv_delete_versions(self, kvv2, versions, path, paths):
"""
Ensure that VaultKV.delete with versions works for KV v2.
"""
if isinstance(versions, list):
expected = [int(x) for x in versions]
else:
expected = [int(versions)]
kvv2.delete(path, versions=versions)
kvv2.client.request.assert_called_once_with(
"POST", paths["delete_versions"], payload={"versions": expected}
)
@pytest.mark.parametrize(
"versions", [[1, 2], [2], 2, ["1", "2"], ["2"], "2", [1, "2"]]
)
def test_vault_kv_destroy(self, kvv2, versions, path, paths):
"""
Ensure that VaultKV.destroy works for KV v2.
"""
if isinstance(versions, list):
expected = [int(x) for x in versions]
else:
expected = [int(versions)]
kvv2.destroy(path, versions)
kvv2.client.post.assert_called_once_with(
paths["destroy"], payload={"versions": expected}
)
def test_vault_kv_nuke(self, kvv2, path, paths):
"""
Ensure that VaultKV.nuke works for KV v2.
"""
kvv2.nuke(path)
kvv2.client.delete.assert_called_once_with(paths["metadata"])
def test_vault_kv_list(self, kvv2, path, paths):
"""
Ensure that VaultKV.list works for KV v2 and only returns keys.
"""
res = kvv2.list(path)
kvv2.client.list.assert_called_once_with(paths["metadata"])
assert res == ["foo"]

View file

@ -1,363 +0,0 @@
import pytest
import salt.utils.vault as vault
import salt.utils.vault.cache as vcache
import salt.utils.vault.client as vclient
import salt.utils.vault.leases as leases
from tests.support.mock import Mock, call, patch
@pytest.fixture(autouse=True, params=[0])
def time_stopped(request):
with patch(
"salt.utils.vault.leases.time.time", autospec=True, return_value=request.param
):
yield
@pytest.fixture
def lease_renewed_response():
return {
"lease_id": "database/creds/testrole/abcd",
"renewable": True,
"lease_duration": 2000,
}
@pytest.fixture
def lease_renewed_extended_response():
return {
"lease_id": "database/creds/testrole/abcd",
"renewable": True,
"lease_duration": 3000,
}
@pytest.fixture
def store(events):
client = Mock(spec=vclient.AuthenticatedVaultClient)
cache = Mock(spec=vcache.VaultLeaseCache)
cache.exists.return_value = False
cache.get.return_value = None
return leases.LeaseStore(client, cache, expire_events=events)
@pytest.fixture
def store_valid(store, lease, lease_renewed_response):
store.cache.exists.return_value = True
store.cache.get.return_value = leases.VaultLease(**lease)
store.client.post.return_value = lease_renewed_response
return store
@pytest.mark.parametrize(
"creation_time",
[
1661188581,
"1661188581",
"2022-08-22T17:16:21.473219641+00:00",
"2022-08-22T17:16:21.47321964+00:00",
"2022-08-22T17:16:21.4732196+00:00",
"2022-08-22T17:16:21.473219+00:00",
"2022-08-22T17:16:21.47321+00:00",
"2022-08-22T17:16:21.4732+00:00",
"2022-08-22T17:16:21.473+00:00",
"2022-08-22T17:16:21.47+00:00",
"2022-08-22T17:16:21.4+00:00",
],
)
def test_vault_lease_creation_time_normalization(creation_time):
"""
Ensure the normalization of different creation_time formats works as expected -
many token endpoints report a timestamp, while other endpoints report RFC3339-formatted
strings that may have a variable number of digits for sub-second precision (0 omitted)
while datetime.fromisoformat expects exactly 6 digits
"""
data = {
"lease_id": "id",
"renewable": False,
"lease_duration": 1337,
"creation_time": creation_time,
"data": None,
}
res = leases.VaultLease(**data)
assert res.creation_time == 1661188581
@pytest.mark.parametrize(
"time_stopped,duration,offset,expected",
[
(0, 50, 0, True),
(50, 10, 0, False),
(0, 60, 10, True),
(0, 60, 600, False),
],
indirect=["time_stopped"],
)
def test_vault_lease_is_valid_accounts_for_time(duration, offset, expected):
"""
Ensure lease validity is checked correctly and can look into the future
"""
data = {
"lease_id": "id",
"renewable": False,
"lease_duration": duration,
"creation_time": 0,
"expire_time": duration,
"data": None,
}
res = leases.VaultLease(**data)
assert res.is_valid_for(offset) is expected
@pytest.mark.parametrize(
"time_stopped,duration,offset,expected",
[
(0, 50, 0, True),
(50, 10, 0, False),
(0, 60, 10, True),
(0, 60, 600, False),
],
indirect=["time_stopped"],
)
def test_vault_token_is_valid_accounts_for_time(duration, offset, expected):
"""
Ensure token time validity is checked correctly and can look into the future
"""
data = {
"client_token": "id",
"renewable": False,
"lease_duration": duration,
"num_uses": 0,
"creation_time": 0,
"expire_time": duration,
}
res = vault.VaultToken(**data)
assert res.is_valid_for(offset) is expected
@pytest.mark.parametrize(
"num_uses,uses,expected",
[(0, 999999, True), (1, 0, True), (1, 1, False), (1, 2, False)],
)
def test_vault_token_is_valid_accounts_for_num_uses(num_uses, uses, expected):
"""
Ensure token uses validity is checked correctly
"""
data = {
"client_token": "id",
"renewable": False,
"lease_duration": 0,
"num_uses": num_uses,
"creation_time": 0,
"use_count": uses,
}
with patch(
"salt.utils.vault.leases.BaseLease.is_valid_for",
autospec=True,
return_value=True,
):
res = vault.VaultToken(**data)
assert res.is_valid() is expected
@pytest.mark.parametrize(
"time_stopped,duration,offset,expected",
[
(0, 50, 0, True),
(50, 10, 0, False),
(0, 60, 10, True),
(0, 60, 600, False),
],
indirect=["time_stopped"],
)
def test_vault_approle_secret_id_is_valid_accounts_for_time(duration, offset, expected):
"""
Ensure secret ID time validity is checked correctly and can look into the future
"""
data = {
"secret_id": "test-secret-id",
"renewable": False,
"creation_time": 0,
"expire_time": duration,
"secret_id_num_uses": 0,
"secret_id_ttl": duration,
}
res = vault.VaultSecretId(**data)
assert res.is_valid(offset) is expected
@pytest.mark.parametrize(
"num_uses,uses,expected",
[(0, 999999, True), (1, 0, True), (1, 1, False), (1, 2, False)],
)
def test_vault_approle_secret_id_is_valid_accounts_for_num_uses(
num_uses, uses, expected
):
"""
Ensure secret ID uses validity is checked correctly
"""
data = {
"secret_id": "test-secret-id",
"renewable": False,
"creation_time": 0,
"secret_id_ttl": 0,
"secret_id_num_uses": num_uses,
"use_count": uses,
}
with patch(
"salt.utils.vault.leases.BaseLease.is_valid_for",
autospec=True,
return_value=True,
):
res = vault.VaultSecretId(**data)
assert res.is_valid() is expected
class TestLeaseStore:
def test_get_uncached_or_invalid(self, store):
"""
Ensure uncached or invalid leases are reported as None.
"""
ret = store.get("test")
assert ret is None
store.client.post.assert_not_called()
store.cache.flush.assert_not_called()
store.cache.store.assert_not_called()
def test_get_cached_valid(self, store_valid, lease):
"""
Ensure valid leases are returned without extra behavior.
"""
ret = store_valid.get("test")
assert ret == lease
store_valid.client.post.assert_not_called()
store_valid.cache.flush.assert_not_called()
store_valid.cache.store.assert_not_called()
@pytest.mark.parametrize(
"valid_for", [2000, pytest.param(2002, id="2002_renewal_leeway")]
)
def test_get_valid_renew_default_period(self, store_valid, lease, valid_for):
"""
Ensure renewals are attempted by default, cache is updated accordingly
and validity checks after renewal allow for a little leeway to account
for latency.
"""
ret = store_valid.get("test", valid_for=valid_for)
lease["duration"] = lease["expire_time"] = 2000
assert ret == lease
store_valid.client.post.assert_called_once_with(
"sys/leases/renew", payload={"lease_id": lease["id"]}
)
store_valid.cache.flush.assert_not_called()
store_valid.cache.store.assert_called_once_with("test", ret)
store_valid.expire_events.assert_not_called()
def test_get_valid_renew_increment(self, store_valid, lease):
"""
Ensure renew_increment is honored when renewing.
"""
ret = store_valid.get("test", valid_for=1400, renew_increment=2000)
lease["duration"] = lease["expire_time"] = 2000
assert ret == lease
store_valid.client.post.assert_called_once_with(
"sys/leases/renew", payload={"lease_id": lease["id"], "increment": 2000}
)
store_valid.cache.flush.assert_not_called()
store_valid.cache.store.assert_called_once_with("test", ret)
store_valid.expire_events.assert_not_called()
def test_get_valid_renew_increment_insufficient(self, store_valid, lease):
"""
Ensure that when renewal_increment is set, valid_for is respected and that
a second renewal using valid_for as increment is not attempted when the
Vault server does not allow renewals for at least valid_for.
If an event factory was passed, an event should be sent.
"""
ret = store_valid.get("test", valid_for=2100, renew_increment=3000)
assert ret is None
store_valid.client.post.assert_has_calls(
(
call(
"sys/leases/renew",
payload={"lease_id": lease["id"], "increment": 3000},
),
call(
"sys/leases/renew",
payload={"lease_id": lease["id"], "increment": 60},
),
)
)
store_valid.cache.flush.assert_called_once_with("test")
store_valid.expire_events.assert_called_once_with(
tag="vault/lease/test/expire", data={"valid_for_less": 2100}
)
@pytest.mark.parametrize(
"valid_for", [3000, pytest.param(3002, id="3002_renewal_leeway")]
)
def test_get_valid_renew_valid_for(
self,
store_valid,
lease,
valid_for,
lease_renewed_response,
lease_renewed_extended_response,
):
"""
Ensure that, if renew_increment was not set and the default period
does not yield valid_for, a second renewal is attempted by valid_for.
There should be some leeway by default to account for latency.
"""
store_valid.client.post.side_effect = (
lease_renewed_response,
lease_renewed_extended_response,
)
ret = store_valid.get("test", valid_for=valid_for)
lease["duration"] = lease["expire_time"] = 3000
assert ret == lease
store_valid.client.post.assert_has_calls(
(
call("sys/leases/renew", payload={"lease_id": lease["id"]}),
call(
"sys/leases/renew",
payload={"lease_id": lease["id"], "increment": valid_for},
),
)
)
store_valid.cache.flush.assert_not_called()
store_valid.cache.store.assert_called_with("test", ret)
store_valid.expire_events.assert_not_called()
def test_get_valid_not_renew(self, store_valid, lease):
"""
Currently valid leases should not be returned if they undercut
valid_for. By default, revocation should be attempted and cache
should be flushed. If an event factory was passed, an event should be sent.
"""
ret = store_valid.get("test", valid_for=2000, renew=False)
assert ret is None
store_valid.cache.store.assert_not_called()
store_valid.client.post.assert_called_once_with(
"sys/leases/renew", payload={"lease_id": lease["id"], "increment": 60}
)
store_valid.cache.flush.assert_called_once_with("test")
store_valid.expire_events.assert_called_once_with(
tag="vault/lease/test/expire", data={"valid_for_less": 2000}
)
def test_get_valid_not_flush(self, store_valid):
"""
Currently valid leases should not be returned if they undercut
valid_for and should not be revoked if requested so.
If an event factory was passed, an event should be sent.
"""
ret = store_valid.get("test", valid_for=2000, revoke=False, renew=False)
assert ret is None
store_valid.cache.flush.assert_not_called()
store_valid.client.post.assert_not_called()
store_valid.cache.store.assert_not_called()
store_valid.expire_events.assert_called_once_with(
tag="vault/lease/test/expire", data={"valid_for_less": 2000}
)

View file

@ -1,311 +0,0 @@
import json
import logging
import subprocess
import time
import pytest
from pytestshellutils.utils.processes import ProcessResult
import salt.utils.files
import salt.utils.path
from tests.support.helpers import PatchedEnviron
from tests.support.runtests import RUNTIME_VARS
log = logging.getLogger(__name__)
def _vault_cmd(cmd, textinput=None, raw=False):
vault_binary = salt.utils.path.which("vault")
proc = subprocess.run(
[vault_binary] + cmd,
check=False,
input=textinput,
capture_output=True,
text=True,
)
ret = ProcessResult(
returncode=proc.returncode,
stdout=proc.stdout,
stderr=proc.stderr,
cmdline=proc.args,
)
if raw:
return ret
if ret.returncode != 0:
log.debug("Failed to run vault %s:\n%s", " ".join(cmd), ret)
raise RuntimeError()
return ret
def vault_write_policy(name, rules):
try:
_vault_cmd(["policy", "write", name, "-"], textinput=rules)
except RuntimeError:
pytest.fail(f"Unable to write policy `{name}`")
def vault_write_policy_file(policy, filename=None):
if filename is None:
filename = policy
try:
_vault_cmd(
[
"policy",
"write",
policy,
f"{RUNTIME_VARS.FILES}/vault/policies/{filename}.hcl",
]
)
except RuntimeError:
pytest.fail(f"Unable to write policy `{policy}`")
def vault_read_policy(policy):
ret = _vault_cmd(["policy", "read", "-format=json", policy], raw=True)
if ret.returncode != 0:
if "No policy named" in ret.stderr:
return None
log.debug("Failed to read policy `%s`:\n%s", policy, ret)
pytest.fail(f"Unable to read policy `{policy}`")
res = json.loads(ret.stdout)
return res["policy"]
def vault_list_policies():
try:
ret = _vault_cmd(["policy", "list", "-format=json"])
except RuntimeError:
pytest.fail("Unable to list policies")
return json.loads(ret.stdout)
def vault_delete_policy(policy):
try:
_vault_cmd(["policy", "delete", policy])
except RuntimeError:
pytest.fail(f"Unable to delete policy `{policy}`")
def vault_enable_secret_engine(name, options=None, **kwargs):
if options is None:
options = []
try:
ret = _vault_cmd(["secrets", "enable"] + options + [name])
except RuntimeError:
pytest.fail(f"Could not enable secret engine `{name}`")
if "path is already in use at" in ret.stdout:
return False
if "Success" in ret.stdout:
return True
log.debug("Failed to enable secret engine `%s`:\n%s", name, ret)
pytest.fail(f"Could not enable secret engine `{name}`: {ret.stdout}")
def vault_disable_secret_engine(name):
try:
ret = _vault_cmd(["secrets", "disable", name])
except RuntimeError:
pytest.fail(f"Could not disable secret engine `{name}`")
if "Success" in ret.stdout:
return True
log.debug("Failed to disable secret engine `%s`:\n%s", name, ret)
pytest.fail(f"Could not disable secret engine `{name}`: {ret.stdout}")
def vault_enable_auth_method(name, options=None, **kwargs):
if options is None:
options = []
cmd = (
["auth", "enable"] + options + [name] + [f"{k}={v}" for k, v in kwargs.items()]
)
try:
ret = _vault_cmd(cmd)
except RuntimeError:
pytest.fail(f"Could not enable auth method `{name}`")
if "path is already in use at" in ret.stdout:
return False
if "Success" in ret.stdout:
return True
log.debug("Failed to enable auth method `%s`:\n%s", name, ret)
pytest.fail(f"Could not enable auth method `{name}`: {ret.stdout}")
def vault_disable_auth_method(name):
try:
ret = _vault_cmd(["auth", "disable", name])
except RuntimeError:
pytest.fail(f"Could not disable auth method `{name}`")
if "Success" in ret.stdout:
return True
log.debug("Failed to disable auth method `%s`:\n%s", name, ret)
pytest.fail(f"Could not disable auth method `{name}`: {ret.stdout}")
def vault_write_secret(path, **kwargs):
cmd = ["kv", "put", path] + [f"{k}={v}" for k, v in kwargs.items()]
try:
ret = _vault_cmd(cmd)
except RuntimeError:
pytest.fail(f"Failed to write secret at `{path}`")
if vault_read_secret(path) != kwargs:
log.debug("Failed to write secret at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to write secret at `{path}`")
return True
def vault_write_secret_file(path, data_name):
data_path = f"{RUNTIME_VARS.FILES}/vault/data/{data_name}.json"
with salt.utils.files.fopen(data_path) as f:
data = json.load(f)
cmd = ["kv", "put", path, f"@{data_path}"]
try:
ret = _vault_cmd([cmd])
except RuntimeError:
pytest.fail(f"Failed to write secret at `{path}`")
if vault_read_secret(path) != data:
log.debug("Failed to write secret at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to write secret at `{path}`")
return True
def vault_read_secret(path):
ret = _vault_cmd(["kv", "get", "-format=json", path], raw=True)
if ret.returncode != 0:
if "No value found at" in ret.stderr:
return None
log.debug("Failed to read secret at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to read secret at `{path}`")
res = json.loads(ret.stdout)
if "data" in res["data"]:
return res["data"]["data"]
return res["data"]
def vault_list_secrets(path):
ret = _vault_cmd(["kv", "list", "-format=json", path], raw=True)
if ret.returncode != 0:
if ret.returncode == 2:
return []
log.debug("Failed to list secrets at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to list secrets at `{path}`")
return json.loads(ret.stdout)
def vault_delete_secret(path, metadata=False):
try:
ret = _vault_cmd(["kv", "delete", path])
except RuntimeError:
pytest.fail(f"Failed to delete secret at `{path}`")
if vault_read_secret(path) is not None:
log.debug("Failed to delete secret at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to delete secret at `{path}`")
if not metadata:
return True
ret = _vault_cmd(["kv", "metadata", "delete", path], raw=True)
if (
ret.returncode != 0
and "Metadata not supported on KV Version 1" not in ret.stderr
):
log.debug("Failed to delete secret metadata at `%s`:\n%s", path, ret)
pytest.fail(f"Failed to delete secret metadata at `{path}`")
return True
def vault_list(path):
try:
ret = _vault_cmd(["list", "-format=json", path])
except RuntimeError:
pytest.fail(f"Failed to list path at `{path}`")
return json.loads(ret.stdout)
@pytest.fixture(scope="module")
def vault_environ(vault_port):
with PatchedEnviron(VAULT_ADDR=f"http://127.0.0.1:{vault_port}"):
yield
def vault_container_version_id(value):
return f"vault=={value}"
@pytest.fixture(
scope="module",
params=["0.9.6", "1.3.1", "latest"],
ids=vault_container_version_id,
)
def vault_container_version(request, salt_factories, vault_port, vault_environ):
vault_version = request.param
vault_binary = salt.utils.path.which("vault")
config = {
"backend": {"file": {"path": "/vault/file"}},
"default_lease_ttl": "168h",
"max_lease_ttl": "720h",
}
factory = salt_factories.get_container(
"vault",
f"ghcr.io/saltstack/salt-ci-containers/vault:{vault_version}",
check_ports=[vault_port],
container_run_kwargs={
"ports": {"8200/tcp": vault_port},
"environment": {
"VAULT_DEV_ROOT_TOKEN_ID": "testsecret",
"VAULT_LOCAL_CONFIG": json.dumps(config),
},
"cap_add": "IPC_LOCK",
},
pull_before_start=True,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
)
with factory.started() as factory:
attempts = 0
while attempts < 3:
attempts += 1
time.sleep(1)
proc = subprocess.run(
[vault_binary, "login", "token=testsecret"],
check=False,
capture_output=True,
text=True,
)
if proc.returncode == 0:
break
ret = ProcessResult(
returncode=proc.returncode,
stdout=proc.stdout,
stderr=proc.stderr,
cmdline=proc.args,
)
log.debug("Failed to authenticate against vault:\n%s", ret)
time.sleep(4)
else:
pytest.fail("Failed to login to vault")
vault_write_policy_file("salt_master")
if "latest" == vault_version:
vault_write_policy_file("salt_minion")
else:
vault_write_policy_file("salt_minion", "salt_minion_old")
if vault_version in ("1.3.1", "latest"):
vault_enable_secret_engine("kv-v2")
if vault_version == "latest":
vault_enable_auth_method("approle", ["-path=salt-minions"])
vault_enable_secret_engine("kv", ["-version=2", "-path=salt"])
yield vault_version