mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 17:50:20 +00:00
Clean up more warts in man page generation
This commit is contained in:
parent
179e95fa89
commit
86dea3a5dd
69 changed files with 2228 additions and 16220 deletions
258
doc/foo.py
Normal file
258
doc/foo.py
Normal file
|
@ -0,0 +1,258 @@
|
|||
aix_group
|
||||
aix_shadow
|
||||
aixpkg
|
||||
aliases
|
||||
alternatives
|
||||
ansiblegate
|
||||
apache
|
||||
aptpkg
|
||||
apf
|
||||
archive
|
||||
arista_pyeapi
|
||||
artifactory
|
||||
at
|
||||
at_solaris
|
||||
baredoc
|
||||
bcache
|
||||
beacons
|
||||
bridge
|
||||
cassandra_cql
|
||||
cassandra_mod
|
||||
celery
|
||||
chef
|
||||
chocolatey
|
||||
chronos
|
||||
chroot
|
||||
cloud
|
||||
cmdmod
|
||||
config
|
||||
cp
|
||||
cron
|
||||
cryptdev
|
||||
data
|
||||
debconfmod
|
||||
debian_ip
|
||||
debian_service
|
||||
debuild_pkgbuild
|
||||
defaults
|
||||
devinfo
|
||||
devmap
|
||||
dig
|
||||
disk
|
||||
dnsutil
|
||||
dpkg_lowpkg
|
||||
environ
|
||||
etcd_mod
|
||||
ethtool
|
||||
event
|
||||
extfs
|
||||
file
|
||||
firewalld
|
||||
git
|
||||
gpg
|
||||
grains
|
||||
groupadd
|
||||
hashutil
|
||||
highstate_doc
|
||||
hosts
|
||||
http
|
||||
idem
|
||||
incron
|
||||
ini_manage
|
||||
iosconfig
|
||||
ipset
|
||||
iptables
|
||||
iwtools
|
||||
jinja
|
||||
junos
|
||||
kernelpkg_linux_apt
|
||||
kernelpkg_linux_yum
|
||||
key
|
||||
keyboard
|
||||
kmod
|
||||
linux_acl
|
||||
linux_ip
|
||||
linux_lvm
|
||||
linux_service
|
||||
linux_shadow
|
||||
linux_sysctl
|
||||
localemod
|
||||
locate
|
||||
logmod
|
||||
logrotate
|
||||
mac_assistive
|
||||
mac_brew_pkg
|
||||
mac_desktop
|
||||
mac_group
|
||||
mac_keychain
|
||||
mac_pkgutil
|
||||
mac_portspkg
|
||||
mac_power
|
||||
mac_service
|
||||
mac_shadow
|
||||
mac_softwareupdate
|
||||
mac_sysctl
|
||||
mac_system
|
||||
mac_timezone
|
||||
mac_user
|
||||
mac_xattr
|
||||
macdefaults
|
||||
macpackage
|
||||
match
|
||||
mdadm_raid
|
||||
mine
|
||||
minion
|
||||
mod_random
|
||||
mount
|
||||
mysql
|
||||
nacl
|
||||
napalm_bgp
|
||||
napalm_formula
|
||||
napalm_mod
|
||||
napalm_netacl
|
||||
napalm_network
|
||||
napalm_ntp
|
||||
napalm_probes
|
||||
napalm_route
|
||||
napalm_snmp
|
||||
napalm_users
|
||||
napalm_yang_mod
|
||||
netaddress
|
||||
network
|
||||
nfs3
|
||||
nftables
|
||||
npm
|
||||
nxos
|
||||
nxos_api
|
||||
nxos_upgrade
|
||||
oracle
|
||||
osquery
|
||||
out
|
||||
pacmanpkg
|
||||
pam
|
||||
parted_partition
|
||||
pillar
|
||||
pip
|
||||
pkg_resource
|
||||
pkgin
|
||||
pkgng
|
||||
pkgutil
|
||||
postgres
|
||||
proxy
|
||||
ps
|
||||
publish
|
||||
puppet
|
||||
pw_group
|
||||
pw_user
|
||||
pyenv
|
||||
quota
|
||||
rabbitmq
|
||||
rbac_solaris
|
||||
rdp
|
||||
reg
|
||||
rest_pkg
|
||||
rest_sample_utils
|
||||
rest_service
|
||||
restartcheck
|
||||
ret
|
||||
rh_ip
|
||||
rh_service
|
||||
rpm_lowpkg
|
||||
rpmbuild_pkgbuild
|
||||
rsync
|
||||
salt_proxy
|
||||
salt_version
|
||||
saltcheck
|
||||
saltcloudmod
|
||||
saltutil
|
||||
schedule
|
||||
scp_mod
|
||||
scsi
|
||||
sdb
|
||||
seed
|
||||
selinux
|
||||
slack_notify
|
||||
slsutil
|
||||
smbios
|
||||
smf_service
|
||||
snapper
|
||||
solaris_fmadm
|
||||
solaris_group
|
||||
solaris_shadow
|
||||
solaris_system
|
||||
solaris_user
|
||||
solarisipspkg
|
||||
solarispkg
|
||||
sqlite3
|
||||
ssh
|
||||
ssh_pkg
|
||||
ssh_service
|
||||
state
|
||||
status
|
||||
supervisord
|
||||
sysfs
|
||||
syslog_ng
|
||||
sysmod
|
||||
system
|
||||
systemd_service
|
||||
temp
|
||||
test
|
||||
test_virtual
|
||||
textfsm_mod
|
||||
timezone
|
||||
tls
|
||||
udev
|
||||
upstart_service
|
||||
useradd
|
||||
vagrant
|
||||
virtualenv_mod
|
||||
vsphere
|
||||
webutil
|
||||
win_appx
|
||||
win_auditpol
|
||||
win_autoruns
|
||||
win_certutil
|
||||
win_dacl
|
||||
win_disk
|
||||
win_dism
|
||||
win_dns_client
|
||||
win_dsc
|
||||
win_event
|
||||
win_file
|
||||
win_firewall
|
||||
win_groupadd
|
||||
win_iis
|
||||
win_ip
|
||||
win_lgpo
|
||||
win_lgpo_reg
|
||||
win_license
|
||||
win_network
|
||||
win_ntp
|
||||
win_path
|
||||
win_pkg
|
||||
win_pki
|
||||
win_powercfg
|
||||
win_psget
|
||||
win_servermanager
|
||||
win_service
|
||||
win_shadow
|
||||
win_shortcut
|
||||
win_smtp_server
|
||||
win_snmp
|
||||
win_status
|
||||
win_system
|
||||
win_task
|
||||
win_timezone
|
||||
win_useradd
|
||||
win_wua
|
||||
win_wusa
|
||||
winrepo
|
||||
x509
|
||||
x509_v2
|
||||
xfs
|
||||
xml
|
||||
yaml
|
||||
yumpkg
|
||||
zk_concurrency
|
||||
zoneadm
|
||||
zonecfg
|
674
doc/meh.txt
Normal file
674
doc/meh.txt
Normal file
|
@ -0,0 +1,674 @@
|
|||
/home/dan/src/salt/doc/ref/auth/all/salt.auth.django.rst
|
||||
/home/dan/src/salt/doc/ref/auth/all/salt.auth.keystone.rst
|
||||
/home/dan/src/salt/doc/ref/auth/all/salt.auth.mysql.rst
|
||||
/home/dan/src/salt/doc/ref/auth/all/salt.auth.yubico.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.adb.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.aix_account.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.avahi_announce.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.bonjour_announce.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.btmp.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.glxinfo.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.haproxy.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.junos_rre_keys.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.napalm_beacon.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.sensehat.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.smartos_imgadm.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.smartos_vmadm.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.telegram_bot_msg.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.twilio_txt_msg.rst
|
||||
/home/dan/src/salt/doc/ref/beacons/all/salt.beacons.wtmp.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.aliyun.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.clc.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.cloudstack.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.digitalocean.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.dimensiondata.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.ec2.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.gce.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.gogrid.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.hetzner.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.joyent.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.libvirt.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.linode.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.lxc.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.oneandone.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.opennebula.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.openstack.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.packet.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.parallels.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.profitbricks.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.proxmox.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.pyrax.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.qingcloud.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.scaleway.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.softlayer.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.softlayer_hw.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.tencentcloud.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.vagrant.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.virtualbox.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.vmware.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.vultrpy.rst
|
||||
/home/dan/src/salt/doc/ref/clouds/all/salt.cloud.clouds.xen.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.docker_events.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.fluent.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.http_logstash.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.ircbot.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.junos_syslog.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.libvirt_events.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.logentries.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.logstash_engine.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.napalm_syslog.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.redis_sentinel.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.slack.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.slack_bolt_engine.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.sqs_events.rst
|
||||
/home/dan/src/salt/doc/ref/engines/all/salt.engines.stalekey.rst
|
||||
/home/dan/src/salt/doc/ref/executors/all/salt.executors.docker.rst
|
||||
/home/dan/src/salt/doc/ref/executors/all/salt.executors.transactional_update.rst
|
||||
/home/dan/src/salt/doc/ref/file_server/all/salt.fileserver.hgfs.rst
|
||||
/home/dan/src/salt/doc/ref/file_server/all/salt.fileserver.s3fs.rst
|
||||
/home/dan/src/salt/doc/ref/file_server/all/salt.fileserver.svnfs.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.chronos.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.cimc.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.esxi.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.fibre_channel.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.fx2.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.iscsi.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.junos.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.lvm.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.marathon.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.mdadm.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.mdata.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.metadata.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.metadata_gce.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.napalm.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.nvme.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.nxos.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.panos.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.philips_hue.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.smartos.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.ssh_sample.rst
|
||||
/home/dan/src/salt/doc/ref/grains/all/salt.grains.zfs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.acme.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.apcups.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.apkpkg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.aptly.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.augeas_cfg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.aws_sqs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.bamboohr.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.bigip.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.bluez_bluetooth.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.bower.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.bsd_shadow.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.btrfs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.cabal.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.capirca_acl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ceph.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.chassis.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.cimc.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ciscoconfparse_mod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.cisconso.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.composer.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.consul.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.container_resource.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.csf.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.cyg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.daemontools.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.datadog_api.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ddns.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.deb_apache.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.deb_postgres.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.djangomod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.dnsmasq.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.drac.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.dracr.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.drbd.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.dummyproxy_pkg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.dummyproxy_service.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.esxcluster.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.esxi.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsd_sysctl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsd_update.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsdjail.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsdkmod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsdpkg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsdports.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freebsdservice.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.freezer.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.gcp_addon.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.gem.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.genesis.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.gentoo_service.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.gentoolkitmod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.github.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.glanceng.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.glassfish.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.glusterfs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.google_chat.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.grafana4.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.grub_legacy.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.guestfs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.hadoop.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.haproxyconn.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.heat.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.helm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.hg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.icinga2.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ifttt.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ilo.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.influxdb08mod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.influxdbmod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.infoblox.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.inspectlib.entities.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.inspectlib.fsdb.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.inspectlib.kiwiproc.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.inspector.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.introspect.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.ipmi.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.jboss7.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.jboss7_cli.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.jira_mod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.k8s.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.kapacitor.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.kerberos.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.keystone.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.keystore.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.kubeadm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.logadm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.lvs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.lxc.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.lxd.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.makeconf.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.mandrill.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.marathon.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.mattermost.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.mdata.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.memcached.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.modjk.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.mongodb.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.monit.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.moosefs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.mssql.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.msteams.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.munin.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nagios.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nagios_rpc.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.namecheap_domains.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.namecheap_domains_dns.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.namecheap_domains_ns.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.namecheap_ssl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.namecheap_users.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.netbox.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.netbsd_sysctl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.netbsdservice.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.netmiko_mod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.netscaler.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.neutron.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.neutronng.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nexus.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nginx.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nilrt_ip.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nix.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nova.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.nspawn.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.omapi.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openbsd_sysctl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openbsdrcctl_service.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openbsdservice.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openscap.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openstack_config.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.openvswitch.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.opkg.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.opsgenie.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pagerduty_util.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.panos.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.parallels.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pcs.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pdbedit.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pecl.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pf.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.philips_hue.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.portage_config.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.postfix.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.poudriere.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.powerpath.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.purefa.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.purefb.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pushbullet.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.pushover_notify.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.qemu_img.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.qemu_nbd.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.rallydev.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.random_org.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.rbenv.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.rebootmgr.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.redismod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.restconf.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.riak.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.runit.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.rvm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.s3.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.s6.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.sensehat.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.sensors.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.serverdensity_device.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.servicenow.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.slackware_service.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.smartos_imgadm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.smartos_nictagadm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.smartos_virt.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.smartos_vmadm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.smtp.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.solr.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.solrcloud.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.splunk.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.splunk_search.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.statuspage.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.suse_apache.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.suse_ip.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.svn.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.swarm.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.swift.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.sysbench.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.sysrc.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.system_profiler.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.telegram.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.telemetry.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.testinframod.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.tomcat.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.trafficserver.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.transactional_update.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.travisci.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.twilio_notify.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.uptime.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.uwsgi.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.varnish.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.vault.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.vbox_guest.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.vboxmanage.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.win_lgpo.rst
|
||||
/home/dan/src/salt/doc/ref/modules/all/salt.modules.zcbuildout.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.dson.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.newline_values_only.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.no_out_quiet.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.overstatestage.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.pony.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.profile.rst
|
||||
/home/dan/src/salt/doc/ref/output/all/salt.output.virt_query.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.cmd_json.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.cmd_yaml.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.cmd_yamlex.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.cobbler.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.confidant.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.consul_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.csvpillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.digicert.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.django_orm.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.ec2_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.etcd_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.extra_minion_data_in_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.foreman.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.hg_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.hiera.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.http_json.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.http_yaml.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.libvirt.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.makostack.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.mongo.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.mysql.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.nacl.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.netbox.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.neutron.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.pepa.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.pillar_ldap.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.postgres.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.puppet.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.reclass_adapter.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.redismod.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.rethinkdb_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.s3.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.saltclass.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.sql_base.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.sqlcipher.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.sqlite3.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.stack.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.svn_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.varstack_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.vault.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.venafi.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.virtkey.rst
|
||||
/home/dan/src/salt/doc/ref/pillar/all/salt.pillar.vmware_pillar.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.arista_pyeapi.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.chronos.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.cimc.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.cisconso.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.docker.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.dummy.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.esxcluster.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.esxdatacenter.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.esxi.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.esxvm.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.fx2.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.junos.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.marathon.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.napalm.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.netmiko_px.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.nxos.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.nxos_api.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.panos.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.philips_hue.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.rest_sample.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.restconf.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.ssh_sample.rst
|
||||
/home/dan/src/salt/doc/ref/proxy/all/salt.proxy.vcenter.rst
|
||||
/home/dan/src/salt/doc/ref/queues/all/salt.queues.pgjsonb_queue.rst
|
||||
/home/dan/src/salt/doc/ref/queues/all/salt.queues.sqlite_queue.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.aws_kms.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.cheetah.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.dson.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.genshi.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.hjson.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.json5.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.pass.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.pydsl.rst
|
||||
/home/dan/src/salt/doc/ref/renderers/all/salt.renderers.wempy.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.appoptics_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.carbon_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.cassandra_cql_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.cassandra_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.couchbase_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.couchdb_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.django_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.elasticsearch_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.etcd_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.influxdb_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.kafka_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.librato_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.mattermost_returner.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.memcache_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.mongo_future_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.mongo_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.mysql.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.nagios_nrdp_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.odbc.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.pushover_returner.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.redis_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.sentry_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.slack_returner.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.slack_webhook_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.sms_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.smtp_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.splunk.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.sqlite3_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.telegram_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.xmpp_return.rst
|
||||
/home/dan/src/salt/doc/ref/returners/all/salt.returners.zabbix_return.rst
|
||||
/home/dan/src/salt/doc/ref/roster/all/salt.roster.cloud.rst
|
||||
/home/dan/src/salt/doc/ref/roster/all/salt.roster.clustershell.rst
|
||||
/home/dan/src/salt/doc/ref/roster/all/salt.roster.terraform.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.asam.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.bgp.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.cloud.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.ddns.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.digicertapi.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.drac.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.f5.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.launchd.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.lxc.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.mattermost.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.nacl.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.pagerduty.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.pkg.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.smartos_vmadm.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.spacewalk.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.thin.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.vault.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.venafiapi.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.virt.rst
|
||||
/home/dan/src/salt/doc/ref/runners/all/salt.runners.vistara.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.cache.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.confidant.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.consul.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.couchdb.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.etcd_db.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.keyring_db.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.memcached.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.redis_sdb.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.rest.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.sqlite3.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.tism.rst
|
||||
/home/dan/src/salt/doc/ref/sdb/all/salt.sdb.vault.rst
|
||||
/home/dan/src/salt/doc/ref/serializers/all/salt.serializers.configparser.rst
|
||||
/home/dan/src/salt/doc/ref/serializers/all/salt.serializers.keyvalue.rst
|
||||
/home/dan/src/salt/doc/ref/serializers/all/salt.serializers.plist.rst
|
||||
/home/dan/src/salt/doc/ref/serializers/all/salt.serializers.python.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.acme.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.alias.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.alternatives.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.aptpkg.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.artifactory.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.augeas.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.aws_sqs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.bigip.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.blockdev.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto3_elasticache.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto3_elasticsearch.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto3_route53.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto3_sns.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_apigateway.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_asg.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cfn.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cloudfront.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cloudtrail.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cloudwatch_alarm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cloudwatch_event.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_cognitoidentity.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_datapipeline.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_dynamodb.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_ec2.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_elasticache.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_elasticsearch_domain.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_elb.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_elbv2.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_iam.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_iam_role.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_iot.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_kinesis.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_kms.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_lambda.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_lc.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_rds.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_route53.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_s3.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_s3_bucket.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_secgroup.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_sns.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_sqs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.boto_vpc.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.bower.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.btrfs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.cabal.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ceph.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.chef.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.chronos_job.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.cimc.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.cisconso.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.composer.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.consul.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.cryptdev.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.csf.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.cyg.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ddns.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.debconfmod.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.dellchassis.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.docker_container.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.docker_image.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.docker_network.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.docker_volume.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.drac.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.dvs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.elasticsearch.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.elasticsearch_index.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.elasticsearch_index_template.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.eselect.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.esxcluster.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.esxdatacenter.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.esxi.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.esxvm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ethtool.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.gem.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.github.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.glance_image.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.glassfish.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.glusterfs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.gnomedesktop.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana4_dashboard.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana4_datasource.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana4_org.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana4_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana_dashboard.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.grafana_datasource.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.heat.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.helm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.hg.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.icinga2.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ifttt.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.incron.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb08_database.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb08_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb_continuous_query.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb_database.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb_retention_policy.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.influxdb_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.infoblox_a.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.infoblox_cname.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.infoblox_host_record.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.infoblox_range.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ipmi.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.jboss7.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.jenkins.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.junos.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.kapacitor.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.kernelpkg.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_domain.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_endpoint.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_group.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_project.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_role.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_role_grant.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_service.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystone_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.keystore.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.kubernetes.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.layman.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ldap.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.libcloud_dns.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.libcloud_loadbalancer.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.libcloud_storage.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.logadm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lvs_server.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lvs_service.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lxc.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lxd.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lxd_container.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lxd_image.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.lxd_profile.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.marathon_app.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.memcached.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.modjk.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.modjk_worker.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mongodb_database.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mongodb_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.monit.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mssql_database.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mssql_login.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mssql_role.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mssql_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.msteams.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mysql_database.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mysql_grants.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mysql_query.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.mysql_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.net_napalm_yang.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.neutron_network.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.neutron_secgroup.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.neutron_secgroup_rule.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.neutron_subnet.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.nexus.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.nfs_export.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.npm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.nxos.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.nxos_upgrade.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.openstack_config.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.openvswitch_bridge.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.openvswitch_db.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.openvswitch_port.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.opsgenie.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pagerduty.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pagerduty_escalation_policy.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pagerduty_schedule.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pagerduty_service.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pagerduty_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.panos.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pbm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pcs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pdbedit.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pecl.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.portage_config.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.ports.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.powerpath.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.probes.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pushover.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.pyrax_queues.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.rbac_solaris.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.rbenv.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.rdp.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.redismod.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.restconf.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.rsync.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.rvm.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.serverdensity_device.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.slack.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.smartos.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.smtp.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.snapper.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.solrcloud.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.splunk.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.splunk_search.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.sqlite3.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.statuspage.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.supervisord.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.svn.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.sysrc.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.telemetry_alert.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.testinframod.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.tomcat.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.trafficserver.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.tuned.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.vagrant.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.vault.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.vbox_guest.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.victorops.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.virt.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.virtualenv_mod.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.webutil.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.wordpress.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.xml.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.xmpp.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_action.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_host.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_hostgroup.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_mediatype.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_template.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_user.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_usergroup.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_usermacro.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zabbix_valuemap.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zcbuildout.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zenoss.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zfs.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zk_concurrency.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zone.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zookeeper.rst
|
||||
/home/dan/src/salt/doc/ref/states/all/salt.states.zpool.rst
|
|
@ -1,5 +0,0 @@
|
|||
============================
|
||||
salt.log_handlers.fluent_mod
|
||||
============================
|
||||
|
||||
.. automodule:: salt.log_handlers.fluent_mod
|
|
@ -1,5 +0,0 @@
|
|||
===============================
|
||||
salt.log_handlers.log4mongo_mod
|
||||
===============================
|
||||
|
||||
.. automodule:: salt.log_handlers.log4mongo_mod
|
|
@ -1,5 +0,0 @@
|
|||
==============================
|
||||
salt.log_handlers.logstash_mod
|
||||
==============================
|
||||
|
||||
.. automodule:: salt.log_handlers.logstash_mod
|
|
@ -1,5 +0,0 @@
|
|||
============================
|
||||
salt.log_handlers.sentry_mod
|
||||
============================
|
||||
|
||||
.. automodule:: salt.log_handlers.sentry_mod
|
|
@ -30,6 +30,7 @@ execution modules
|
|||
ansiblegate
|
||||
apache
|
||||
apf
|
||||
aptpkg
|
||||
archive
|
||||
arista_pyeapi
|
||||
artifactory
|
||||
|
@ -41,6 +42,7 @@ execution modules
|
|||
bridge
|
||||
cassandra_cql
|
||||
celery
|
||||
chef
|
||||
chocolatey
|
||||
chronos
|
||||
chroot
|
||||
|
@ -61,8 +63,6 @@ execution modules
|
|||
dig
|
||||
disk
|
||||
dnsutil
|
||||
dockercompose
|
||||
dockermod
|
||||
dpkg_lowpkg
|
||||
environ
|
||||
etcd_mod
|
||||
|
@ -160,6 +160,7 @@ execution modules
|
|||
pkgin
|
||||
pkgng
|
||||
pkgutil
|
||||
postgres
|
||||
proxy
|
||||
ps
|
||||
publish
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
salt.modules.dockercompose
|
||||
==========================
|
||||
|
||||
.. automodule:: salt.modules.dockercompose
|
||||
:members:
|
|
@ -1,6 +0,0 @@
|
|||
salt.modules.dockermod
|
||||
======================
|
||||
|
||||
.. automodule:: salt.modules.dockermod
|
||||
:members:
|
||||
:exclude-members: cp, freeze, unfreeze
|
|
@ -1,5 +0,0 @@
|
|||
salt.modules.inspectlib.collector
|
||||
=================================
|
||||
|
||||
.. automodule:: salt.modules.inspectlib.collector
|
||||
:members:
|
|
@ -1,5 +0,0 @@
|
|||
salt.modules.inspectlib.dbhandle
|
||||
================================
|
||||
|
||||
.. automodule:: salt.modules.inspectlib.dbhandle
|
||||
:members:
|
|
@ -1,5 +0,0 @@
|
|||
salt.modules.inspectlib.exceptions
|
||||
==================================
|
||||
|
||||
.. automodule:: salt.modules.inspectlib.exceptions
|
||||
:members:
|
|
@ -1,5 +0,0 @@
|
|||
salt.modules.inspectlib.query
|
||||
=============================
|
||||
|
||||
.. automodule:: salt.modules.inspectlib.query
|
||||
:members:
|
|
@ -1,18 +0,0 @@
|
|||
salt.modules.inspectlib package
|
||||
===============================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
salt.modules.inspectlib.collector
|
||||
salt.modules.inspectlib.dbhandle
|
||||
salt.modules.inspectlib.exceptions
|
||||
salt.modules.inspectlib.query
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: salt.modules.inspectlib
|
||||
:members:
|
|
@ -1,6 +1,6 @@
|
|||
=====================
|
||||
=====================
|
||||
salt.modules.win_lgpo
|
||||
=====================
|
||||
|
||||
.. automodule:: salt.modules.win_lgpo
|
||||
:members:
|
||||
:members:
|
||||
|
|
|
@ -14,3 +14,5 @@ pillar modules
|
|||
git_pillar
|
||||
gpg
|
||||
nodegroups
|
||||
postgres
|
||||
sql_base
|
||||
|
|
|
@ -9,4 +9,3 @@ queue modules
|
|||
.. autosummary::
|
||||
:toctree:
|
||||
:template: autosummary.rst.tmpl
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ serializer modules
|
|||
:toctree:
|
||||
:template: autosummary.rst.tmpl
|
||||
|
||||
configparser
|
||||
json
|
||||
msgpack
|
||||
tomlmod
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
=============================
|
||||
salt.serializers.configparser
|
||||
=============================
|
||||
|
||||
.. automodule:: salt.serializers.configparser
|
||||
:members:
|
|
@ -80,6 +80,7 @@ state modules
|
|||
postgres_tablespace
|
||||
postgres_user
|
||||
process
|
||||
proxy
|
||||
pyenv
|
||||
quota
|
||||
rabbitmq_cluster
|
||||
|
|
|
@ -581,14 +581,13 @@ if so, the old container is stopped and destroyed, and the temporary container
|
|||
is renamed and started.
|
||||
|
||||
Salt still needs to translate arguments into the format which docker-py
|
||||
expects, but if it does not properly do so, the :ref:`skip_translate
|
||||
<docker-container-running-skip-translate>` argument can be used to skip input
|
||||
translation on an argument-by-argument basis, and you can then format your SLS
|
||||
file to pass the data in the format that the docker-py expects. This allows you
|
||||
to work around any changes in Docker's API or issues with the input
|
||||
translation, and continue to manage your Docker containers using Salt. Read the
|
||||
documentation for :ref:`skip_translate
|
||||
<docker-container-running-skip-translate>` for more information.
|
||||
expects, but if it does not properly do so, the REMOVED DURRING MODULE
|
||||
MIGRATION argument can be used to skip input translation on an
|
||||
argument-by-argument basis, and you can then format your SLS file to pass the
|
||||
data in the format that the docker-py expects. This allows you to work around
|
||||
any changes in Docker's API or issues with the input translation, and continue
|
||||
to manage your Docker containers using Salt. Read the documentation for REMOVED
|
||||
DURRING MODULE MIGRATION for more information.
|
||||
|
||||
.. note::
|
||||
When running the :py:func:`docker_container.running
|
||||
|
|
|
@ -54,7 +54,7 @@ Lots of Docker Improvements
|
|||
Much Improved Support for Docker Networking
|
||||
*******************************************
|
||||
|
||||
The :py:func:`docker_network.present <salt.states.docker_network.present>`
|
||||
The REMOVED DURRING MODULE MIGRATION
|
||||
state has undergone a full rewrite, which includes the following improvements:
|
||||
|
||||
Full API Support for Network Management
|
||||
|
@ -70,7 +70,7 @@ Custom Subnets
|
|||
**************
|
||||
|
||||
Custom subnets can now be configured. Both IPv4 and mixed IPv4/IPv6 networks
|
||||
are supported. See :ref:`here <salt-states-docker-network-present-ipam>` for
|
||||
are supported. See REMOVED DURRING MODULE MIGRATION for
|
||||
more information.
|
||||
|
||||
Network Configuration in :py:func:`docker_container.running <salt.states.docker_container.running>` States
|
||||
|
@ -78,7 +78,7 @@ Network Configuration in :py:func:`docker_container.running <salt.states.docker_
|
|||
|
||||
A long-requested feature has finally been added! It is now possible to
|
||||
configure static IPv4/IPv6 addresses, as well as links and labels. See
|
||||
:ref:`here <salt-states-docker-container-network-management>` for more
|
||||
REMOVDE DURRING MODULE MIGRATION for more
|
||||
information.
|
||||
|
||||
.. note::
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
102
salt/modules/dummyproxy_pkg.py
Normal file
102
salt/modules/dummyproxy_pkg.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
"""
|
||||
Package support for the dummy proxy used by the test suite
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import salt.utils.data
|
||||
import salt.utils.platform
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = "pkg"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
"""
|
||||
Only work on systems that are a proxy minion
|
||||
"""
|
||||
try:
|
||||
if salt.utils.platform.is_proxy() and __opts__["proxy"]["proxytype"] == "dummy":
|
||||
return __virtualname__
|
||||
except KeyError:
|
||||
return (
|
||||
False,
|
||||
"The dummyproxy_package execution module failed to load. Check "
|
||||
"the proxy key in pillar or /etc/salt/proxy.",
|
||||
)
|
||||
|
||||
return (
|
||||
False,
|
||||
"The dummyproxy_package execution module failed to load: only works "
|
||||
"on a dummy proxy minion.",
|
||||
)
|
||||
|
||||
|
||||
def list_pkgs(versions_as_list=False, **kwargs):
|
||||
return __proxy__["dummy.package_list"]()
|
||||
|
||||
|
||||
def install(name=None, refresh=False, fromrepo=None, pkgs=None, sources=None, **kwargs):
|
||||
return __proxy__["dummy.package_install"](name, **kwargs)
|
||||
|
||||
|
||||
def remove(name=None, pkgs=None, **kwargs):
|
||||
return __proxy__["dummy.package_remove"](name)
|
||||
|
||||
|
||||
def version(*names, **kwargs):
|
||||
"""
|
||||
Returns a string representing the package version or an empty string if not
|
||||
installed. If more than one package name is specified, a dict of
|
||||
name/version pairs is returned.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.version <package name>
|
||||
salt '*' pkg.version <package1> <package2> <package3> ...
|
||||
"""
|
||||
if len(names) == 1:
|
||||
vers = __proxy__["dummy.package_status"](names[0])
|
||||
return vers[names[0]]
|
||||
else:
|
||||
results = {}
|
||||
for n in names:
|
||||
vers = __proxy__["dummy.package_status"](n)
|
||||
results.update(vers)
|
||||
return results
|
||||
|
||||
|
||||
def upgrade(
|
||||
name=None, pkgs=None, refresh=True, skip_verify=True, normalize=True, **kwargs
|
||||
):
|
||||
old = __proxy__["dummy.package_list"]()
|
||||
new = __proxy__["dummy.uptodate"]()
|
||||
pkg_installed = __proxy__["dummy.upgrade"]()
|
||||
ret = salt.utils.data.compare_dicts(old, pkg_installed)
|
||||
return ret
|
||||
|
||||
|
||||
def installed(
|
||||
name,
|
||||
version=None,
|
||||
refresh=False,
|
||||
fromrepo=None,
|
||||
skip_verify=False,
|
||||
pkgs=None,
|
||||
sources=None,
|
||||
**kwargs
|
||||
):
|
||||
|
||||
p = __proxy__["dummy.package_status"](name)
|
||||
if version is None:
|
||||
if "ret" in p:
|
||||
return str(p["ret"])
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
if p is not None:
|
||||
return version == str(p)
|
159
salt/modules/dummyproxy_service.py
Normal file
159
salt/modules/dummyproxy_service.py
Normal file
|
@ -0,0 +1,159 @@
|
|||
"""
|
||||
Provide the service module for the dummy proxy used in integration tests
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import salt.utils.platform
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__func_alias__ = {"list_": "list"}
|
||||
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = "service"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
"""
|
||||
Only work on systems that are a proxy minion
|
||||
"""
|
||||
try:
|
||||
if salt.utils.platform.is_proxy() and __opts__["proxy"]["proxytype"] == "dummy":
|
||||
return __virtualname__
|
||||
except KeyError:
|
||||
return (
|
||||
False,
|
||||
"The dummyproxy_service execution module failed to load. Check "
|
||||
"the proxy key in pillar or /etc/salt/proxy.",
|
||||
)
|
||||
|
||||
return (
|
||||
False,
|
||||
"The dummyproxy_service execution module failed to load: only works "
|
||||
"on the integration testsuite dummy proxy minion.",
|
||||
)
|
||||
|
||||
|
||||
def get_all():
|
||||
"""
|
||||
Return a list of all available services
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.get_all
|
||||
"""
|
||||
proxy_fn = "dummy.service_list"
|
||||
return __proxy__[proxy_fn]()
|
||||
|
||||
|
||||
def list_():
|
||||
"""
|
||||
Return a list of all available services.
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.list
|
||||
"""
|
||||
return get_all()
|
||||
|
||||
|
||||
def start(name, sig=None):
|
||||
"""
|
||||
Start the specified service on the dummy
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.start <service name>
|
||||
"""
|
||||
|
||||
proxy_fn = "dummy.service_start"
|
||||
return __proxy__[proxy_fn](name)
|
||||
|
||||
|
||||
def stop(name, sig=None):
|
||||
"""
|
||||
Stop the specified service on the dummy
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.stop <service name>
|
||||
"""
|
||||
proxy_fn = "dummy.service_stop"
|
||||
return __proxy__[proxy_fn](name)
|
||||
|
||||
|
||||
def restart(name, sig=None):
|
||||
"""
|
||||
Restart the specified service with dummy.
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.restart <service name>
|
||||
"""
|
||||
|
||||
proxy_fn = "dummy.service_restart"
|
||||
return __proxy__[proxy_fn](name)
|
||||
|
||||
|
||||
def status(name, sig=None):
|
||||
"""
|
||||
Return the status for a service via dummy, returns a bool
|
||||
whether the service is running.
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' service.status <service name>
|
||||
"""
|
||||
|
||||
proxy_fn = "dummy.service_status"
|
||||
resp = __proxy__[proxy_fn](name)
|
||||
if resp["comment"] == "stopped":
|
||||
return False
|
||||
if resp["comment"] == "running":
|
||||
return True
|
||||
|
||||
|
||||
def running(name, sig=None):
|
||||
"""
|
||||
Return whether this service is running.
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
"""
|
||||
return status(name).get(name, False)
|
||||
|
||||
|
||||
def enabled(name, sig=None):
|
||||
"""
|
||||
Only the 'redbull' service is 'enabled' in the test
|
||||
|
||||
.. versionadded:: 2016.11.3
|
||||
|
||||
"""
|
||||
return name == "redbull"
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Utility functions for use with or in SLS files
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
import textwrap
|
||||
|
@ -16,6 +16,9 @@ import salt.utils.path
|
|||
CONTEXT_BASE = "slsutil"
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def update(dest, upd, recursive_update=True, merge_lists=False):
|
||||
"""
|
||||
Merge ``upd`` recursively into ``dest``
|
||||
|
@ -159,31 +162,36 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs):
|
|||
salt '*' slsutil.renderer string='Inline template! {{ saltenv }}'
|
||||
salt '*' slsutil.renderer string='Hello, {{ name }}.' name='world'
|
||||
"""
|
||||
if not path and not string:
|
||||
raise salt.exceptions.SaltInvocationError("Must pass either path or string")
|
||||
try:
|
||||
if not path and not string:
|
||||
raise salt.exceptions.SaltInvocationError("Must pass either path or string")
|
||||
|
||||
if path and string:
|
||||
raise salt.exceptions.SaltInvocationError("Must not pass both path and string")
|
||||
if path and string:
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
"Must not pass both path and string"
|
||||
)
|
||||
|
||||
renderers = salt.loader.render(__opts__, __salt__)
|
||||
renderers = salt.loader.render(__opts__, __salt__)
|
||||
|
||||
if path:
|
||||
path_or_string = __salt__["cp.get_url"](
|
||||
path, saltenv=kwargs.get("saltenv", "base")
|
||||
if path:
|
||||
path_or_string = __salt__["cp.get_url"](
|
||||
path, saltenv=kwargs.get("saltenv", "base")
|
||||
)
|
||||
if string:
|
||||
path_or_string = ":string:"
|
||||
kwargs["input_data"] = string
|
||||
|
||||
ret = salt.template.compile_template(
|
||||
path_or_string,
|
||||
renderers,
|
||||
default_renderer,
|
||||
__opts__["renderer_blacklist"],
|
||||
__opts__["renderer_whitelist"],
|
||||
**kwargs,
|
||||
)
|
||||
if string:
|
||||
path_or_string = ":string:"
|
||||
kwargs["input_data"] = string
|
||||
|
||||
ret = salt.template.compile_template(
|
||||
path_or_string,
|
||||
renderers,
|
||||
default_renderer,
|
||||
__opts__["renderer_blacklist"],
|
||||
__opts__["renderer_whitelist"],
|
||||
**kwargs,
|
||||
)
|
||||
return ret.read() if __utils__["stringio.is_readable"](ret) else ret
|
||||
return ret.read() if __utils__["stringio.is_readable"](ret) else ret
|
||||
except Exception as exc:
|
||||
log.error("WTF", exc_info=True)
|
||||
|
||||
|
||||
def _get_serialize_fn(serializer, fn_name):
|
||||
|
|
84
salt/pillar/extra_minion_data_in_pillar.py
Normal file
84
salt/pillar/extra_minion_data_in_pillar.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
"""
|
||||
Add all extra minion data to the pillar.
|
||||
|
||||
:codeauthor: Alexandru.Bleotu@morganstanley.ms.com
|
||||
|
||||
One can filter on the keys to include in the pillar by using the ``include``
|
||||
parameter. For subkeys the ':' notation is supported (i.e. 'key:subkey')
|
||||
The keyword ``<all>`` includes all keys.
|
||||
|
||||
Complete example in etc/salt/master
|
||||
=====================================
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
ext_pillar:
|
||||
- extra_minion_data_in_pillar:
|
||||
include: *
|
||||
|
||||
ext_pillar:
|
||||
- extra_minion_data_in_pillar:
|
||||
include:
|
||||
- key1
|
||||
- key2:subkey2
|
||||
|
||||
ext_pillar:
|
||||
- extra_minion_data_in_pillar:
|
||||
include: <all>
|
||||
|
||||
"""
|
||||
import logging
|
||||
|
||||
# Set up logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = "extra_minion_data_in_pillar"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def ext_pillar(minion_id, pillar, include, extra_minion_data=None):
|
||||
def get_subtree(key, source_dict):
|
||||
"""
|
||||
Returns a subtree corresponfing to the specified key.
|
||||
|
||||
key
|
||||
Key. Supports the ':' notation (e.g. 'key:subkey')
|
||||
|
||||
source_dict
|
||||
Source dictionary
|
||||
"""
|
||||
ret_dict = aux_dict = {}
|
||||
subtree = source_dict
|
||||
subkeys = key.split(":")
|
||||
# Build an empty intermediate subtree following the subkeys
|
||||
for subkey in subkeys[:-1]:
|
||||
# The result will be built in aux_dict
|
||||
aux_dict[subkey] = {}
|
||||
aux_dict = aux_dict[subkey]
|
||||
if subkey not in subtree:
|
||||
# The subkey is not in
|
||||
return {}
|
||||
subtree = subtree[subkey]
|
||||
if subkeys[-1] not in subtree:
|
||||
# Final subkey is not in subtree
|
||||
return {}
|
||||
# Assign the subtree value to the result
|
||||
aux_dict[subkeys[-1]] = subtree[subkeys[-1]]
|
||||
return ret_dict
|
||||
|
||||
log.trace("minion_id = %s", minion_id)
|
||||
log.trace("include = %s", include)
|
||||
log.trace("extra_minion_data = %s", extra_minion_data)
|
||||
data = {}
|
||||
|
||||
if not extra_minion_data:
|
||||
return {}
|
||||
if include in ["*", "<all>"]:
|
||||
return extra_minion_data
|
||||
data = {}
|
||||
for key in include:
|
||||
data.update(get_subtree(key, extra_minion_data))
|
||||
return data
|
487
salt/pillar/sql_base.py
Normal file
487
salt/pillar/sql_base.py
Normal file
|
@ -0,0 +1,487 @@
|
|||
"""
|
||||
Retrieve Pillar data by doing a SQL query
|
||||
|
||||
This module is not meant to be used directly as an ext_pillar.
|
||||
It is a place to put code common to PEP 249 compliant SQL database adapters.
|
||||
It exposes a python ABC that can be subclassed for new database providers.
|
||||
|
||||
:maturity: new
|
||||
:platform: all
|
||||
|
||||
Theory of sql_base ext_pillar
|
||||
=============================
|
||||
|
||||
Ok, here's the theory for how this works...
|
||||
|
||||
- First, any non-keyword args are processed in order.
|
||||
- Then, remaining keywords are processed.
|
||||
|
||||
We do this so that it's backward compatible with older configs.
|
||||
Keyword arguments are sorted before being appended, so that they're predictable,
|
||||
but they will always be applied last so overall it's moot.
|
||||
|
||||
For each of those items we process, it depends on the object type:
|
||||
|
||||
- Strings are executed as is and the pillar depth is determined by the number
|
||||
of fields returned.
|
||||
- A list has the first entry used as the query, the second as the pillar depth.
|
||||
- A mapping uses the keys "query" and "depth" as the tuple
|
||||
|
||||
You can retrieve as many fields as you like, how they get used depends on the
|
||||
exact settings.
|
||||
|
||||
Configuring a sql_base ext_pillar
|
||||
=================================
|
||||
|
||||
The sql_base ext_pillar cannot be used directly, but shares query configuration
|
||||
with its implementations. These examples use a fake 'sql_base' adapter, which
|
||||
should be replaced with the name of the adapter you are using.
|
||||
|
||||
A list of queries can be passed in
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
- "SELECT pillar,value FROM pillars WHERE minion_id = %s"
|
||||
- "SELECT pillar,value FROM more_pillars WHERE minion_id = %s"
|
||||
|
||||
Or you can pass in a mapping
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
main: "SELECT pillar,value FROM pillars WHERE minion_id = %s"
|
||||
extras: "SELECT pillar,value FROM more_pillars WHERE minion_id = %s"
|
||||
|
||||
The query can be provided as a string as we have just shown, but they can be
|
||||
provided as lists
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
- "SELECT pillar,value FROM pillars WHERE minion_id = %s"
|
||||
2
|
||||
|
||||
Or as a mapping
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
- query: "SELECT pillar,value FROM pillars WHERE minion_id = %s"
|
||||
depth: 2
|
||||
|
||||
The depth defines how the dicts are constructed.
|
||||
Essentially if you query for fields a,b,c,d for each row you'll get:
|
||||
|
||||
- With depth 1: {a: {"b": b, "c": c, "d": d}}
|
||||
- With depth 2: {a: {b: {"c": c, "d": d}}}
|
||||
- With depth 3: {a: {b: {c: d}}}
|
||||
|
||||
Depth greater than 3 wouldn't be different from 3 itself.
|
||||
Depth of 0 translates to the largest depth needed, so 3 in this case.
|
||||
(max depth == key count - 1)
|
||||
|
||||
Then they are merged in a similar way to plain pillar data, in the order
|
||||
returned by the SQL database.
|
||||
|
||||
Thus subsequent results overwrite previous ones when they collide.
|
||||
|
||||
The ignore_null option can be used to change the overwrite behavior so that
|
||||
only non-NULL values in subsequent results will overwrite. This can be used
|
||||
to selectively overwrite default values.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
- query: "SELECT pillar,value FROM pillars WHERE minion_id = 'default' and minion_id != %s"
|
||||
depth: 2
|
||||
- query: "SELECT pillar,value FROM pillars WHERE minion_id = %s"
|
||||
depth: 2
|
||||
ignore_null: True
|
||||
|
||||
If you specify `as_list: True` in the mapping expression it will convert
|
||||
collisions to lists.
|
||||
|
||||
If you specify `with_lists: '...'` in the mapping expression it will
|
||||
convert the specified depths to list. The string provided is a sequence
|
||||
numbers that are comma separated. The string '1,3' will result in::
|
||||
|
||||
a,b,c,d,e,1 # field 1 same, field 3 differs
|
||||
a,b,c,f,g,2 # ^^^^
|
||||
a,z,h,y,j,3 # field 1 same, field 3 same
|
||||
a,z,h,y,k,4 # ^^^^
|
||||
^ ^
|
||||
|
||||
These columns define list grouping
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{a: [
|
||||
{c: [
|
||||
{e: 1},
|
||||
{g: 2}
|
||||
]
|
||||
},
|
||||
{h: [
|
||||
{j: 3, k: 4 }
|
||||
]
|
||||
}
|
||||
]}
|
||||
|
||||
The range for with_lists is 1 to number_of_fields, inclusive.
|
||||
Numbers outside this range are ignored.
|
||||
|
||||
If you specify `as_json: True` in the mapping expression and query only for
|
||||
single value, returned data are considered in JSON format and will be merged
|
||||
directly.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- sql_base:
|
||||
- query: "SELECT json_pillar FROM pillars WHERE minion_id = %s"
|
||||
as_json: True
|
||||
|
||||
The processed JSON entries are recursively merged in a single dictionary.
|
||||
Additionnaly if `as_list` is set to `True` the lists will be merged in case of collision.
|
||||
|
||||
For instance the following rows:
|
||||
|
||||
{"a": {"b": [1, 2]}, "c": 3}
|
||||
{"a": {"b": [1, 3]}, "d": 4}
|
||||
|
||||
will result in the following pillar with `as_list=False`
|
||||
|
||||
{"a": {"b": [1, 3], "c": 3, "d": 4}
|
||||
|
||||
and in with `as_list=True`
|
||||
|
||||
{"a": {"b": [1, 2, 3], "c": 3, "d": 4}
|
||||
|
||||
Finally, if you pass the queries in via a mapping, the key will be the
|
||||
first level name where as passing them in as a list will place them in the
|
||||
root. This isolates the query results into their own subtrees.
|
||||
This may be a help or hindrance to your aims and can be used as such.
|
||||
|
||||
You can basically use any SELECT query that gets you the information, you
|
||||
could even do joins or subqueries in case your minion_id is stored elsewhere.
|
||||
It is capable of handling single rows or multiple rows per minion.
|
||||
|
||||
Configuration of the connection depends on the adapter in use.
|
||||
|
||||
.. versionadded:: 3005
|
||||
The *as_json* parameter.
|
||||
|
||||
More complete example for MySQL (to also show configuration)
|
||||
============================================================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mysql:
|
||||
user: 'salt'
|
||||
pass: 'super_secret_password'
|
||||
db: 'salt_db'
|
||||
|
||||
ext_pillar:
|
||||
- mysql:
|
||||
fromdb:
|
||||
query: 'SELECT col1,col2,col3,col4,col5,col6,col7
|
||||
FROM some_random_table
|
||||
WHERE minion_pattern LIKE %s'
|
||||
depth: 5
|
||||
as_list: True
|
||||
with_lists: [1,3]
|
||||
"""
|
||||
|
||||
import abc
|
||||
import logging
|
||||
|
||||
from salt.utils.dictupdate import update
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Please don't strip redundant parentheses from this file.
|
||||
# I have added some for clarity.
|
||||
|
||||
# tests/unit/pillar/mysql_test.py may help understand this code.
|
||||
|
||||
|
||||
# This ext_pillar is abstract and cannot be used directory
|
||||
def __virtual__():
|
||||
return False
|
||||
|
||||
|
||||
class SqlBaseExtPillar(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
This class receives and processes the database rows in a database
|
||||
agnostic way.
|
||||
"""
|
||||
|
||||
result = None
|
||||
focus = None
|
||||
field_names = None
|
||||
num_fields = 0
|
||||
depth = 0
|
||||
as_list = False
|
||||
as_json = False
|
||||
with_lists = None
|
||||
ignore_null = False
|
||||
|
||||
def __init__(self):
|
||||
self.result = self.focus = {}
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def _db_name(cls):
|
||||
"""
|
||||
Return a friendly name for the database, e.g. 'MySQL' or 'SQLite'.
|
||||
Used in logging output.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _get_cursor(self):
|
||||
"""
|
||||
Yield a PEP 249 compliant Cursor as a context manager.
|
||||
"""
|
||||
|
||||
def extract_queries(self, args, kwargs):
|
||||
"""
|
||||
This function normalizes the config block into a set of queries we
|
||||
can use. The return is a list of consistently laid out dicts.
|
||||
"""
|
||||
# Please note the function signature is NOT an error. Neither args, nor
|
||||
# kwargs should have asterisks. We are passing in a list and dict,
|
||||
# rather than receiving variable args. Adding asterisks WILL BREAK the
|
||||
# function completely.
|
||||
|
||||
# First, this is the query buffer. Contains lists of [base,sql]
|
||||
qbuffer = []
|
||||
|
||||
# Add on the non-keywords...
|
||||
qbuffer.extend([[None, s] for s in args])
|
||||
|
||||
# And then the keywords...
|
||||
# They aren't in definition order, but they can't conflict each other.
|
||||
klist = list(kwargs.keys())
|
||||
klist.sort()
|
||||
qbuffer.extend([[k, kwargs[k]] for k in klist])
|
||||
|
||||
# Filter out values that don't have queries.
|
||||
qbuffer = [
|
||||
x
|
||||
for x in qbuffer
|
||||
if (
|
||||
(isinstance(x[1], str) and len(x[1]))
|
||||
or (isinstance(x[1], (list, tuple)) and (len(x[1]) > 0) and x[1][0])
|
||||
or (isinstance(x[1], dict) and "query" in x[1] and len(x[1]["query"]))
|
||||
)
|
||||
]
|
||||
|
||||
# Next, turn the whole buffer into full dicts.
|
||||
for qb in qbuffer:
|
||||
defaults = {
|
||||
"query": "",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
}
|
||||
if isinstance(qb[1], str):
|
||||
defaults["query"] = qb[1]
|
||||
elif isinstance(qb[1], (list, tuple)):
|
||||
defaults["query"] = qb[1][0]
|
||||
if len(qb[1]) > 1:
|
||||
defaults["depth"] = qb[1][1]
|
||||
# May set 'as_list' from qb[1][2].
|
||||
else:
|
||||
defaults.update(qb[1])
|
||||
if defaults["with_lists"] and isinstance(defaults["with_lists"], str):
|
||||
defaults["with_lists"] = [
|
||||
int(i) for i in defaults["with_lists"].split(",")
|
||||
]
|
||||
qb[1] = defaults
|
||||
|
||||
return qbuffer
|
||||
|
||||
def enter_root(self, root):
|
||||
"""
|
||||
Set self.focus for kwarg queries
|
||||
"""
|
||||
# There is no collision protection on root name isolation
|
||||
if root:
|
||||
self.result[root] = self.focus = {}
|
||||
else:
|
||||
self.focus = self.result
|
||||
|
||||
def process_fields(self, field_names, depth):
|
||||
"""
|
||||
The primary purpose of this function is to store the sql field list
|
||||
and the depth to which we process.
|
||||
"""
|
||||
# List of field names in correct order.
|
||||
self.field_names = field_names
|
||||
# number of fields.
|
||||
self.num_fields = len(field_names)
|
||||
# Constrain depth.
|
||||
if (depth == 0) or (depth >= self.num_fields):
|
||||
self.depth = self.num_fields - 1
|
||||
else:
|
||||
self.depth = depth
|
||||
|
||||
def process_results(self, rows):
|
||||
"""
|
||||
This function takes a list of database results and iterates over,
|
||||
merging them into a dict form.
|
||||
"""
|
||||
listify = OrderedDict()
|
||||
listify_dicts = OrderedDict()
|
||||
for ret in rows:
|
||||
# crd is the Current Return Data level, to make this non-recursive.
|
||||
crd = self.focus
|
||||
|
||||
# We have just one field without any key, assume returned row is already a dict
|
||||
# aka JSON storage
|
||||
if self.as_json and self.num_fields == 1:
|
||||
crd = update(crd, ret[0], merge_lists=self.as_list)
|
||||
continue
|
||||
|
||||
# Walk and create dicts above the final layer
|
||||
for i in range(0, self.depth - 1):
|
||||
# At the end we'll use listify to find values to make a list of
|
||||
if i + 1 in self.with_lists:
|
||||
if id(crd) not in listify:
|
||||
listify[id(crd)] = []
|
||||
listify_dicts[id(crd)] = crd
|
||||
if ret[i] not in listify[id(crd)]:
|
||||
listify[id(crd)].append(ret[i])
|
||||
if ret[i] not in crd:
|
||||
# Key missing
|
||||
crd[ret[i]] = {}
|
||||
crd = crd[ret[i]]
|
||||
else:
|
||||
# Check type of collision
|
||||
ty = type(crd[ret[i]])
|
||||
if ty is list:
|
||||
# Already made list
|
||||
temp = {}
|
||||
crd[ret[i]].append(temp)
|
||||
crd = temp
|
||||
elif ty is not dict:
|
||||
# Not a list, not a dict
|
||||
if self.as_list:
|
||||
# Make list
|
||||
temp = {}
|
||||
crd[ret[i]] = [crd[ret[i]], temp]
|
||||
crd = temp
|
||||
else:
|
||||
# Overwrite
|
||||
crd[ret[i]] = {}
|
||||
crd = crd[ret[i]]
|
||||
else:
|
||||
# dict, descend.
|
||||
crd = crd[ret[i]]
|
||||
|
||||
# If this test is true, the penultimate field is the key
|
||||
if self.depth == self.num_fields - 1:
|
||||
nk = self.num_fields - 2 # Aka, self.depth-1
|
||||
# Should we and will we have a list at the end?
|
||||
if (self.as_list and (ret[nk] in crd)) or (nk + 1 in self.with_lists):
|
||||
if ret[nk] in crd:
|
||||
if not isinstance(crd[ret[nk]], list):
|
||||
crd[ret[nk]] = [crd[ret[nk]]]
|
||||
# if it's already a list, do nothing
|
||||
else:
|
||||
crd[ret[nk]] = []
|
||||
crd[ret[nk]].append(ret[self.num_fields - 1])
|
||||
else:
|
||||
if not self.ignore_null or ret[self.num_fields - 1] is not None:
|
||||
crd[ret[nk]] = ret[self.num_fields - 1]
|
||||
else:
|
||||
# Otherwise, the field name is the key but we have a spare.
|
||||
# The spare results because of {c: d} vs {c: {"d": d, "e": e }}
|
||||
# So, make that last dict
|
||||
if ret[self.depth - 1] not in crd:
|
||||
crd[ret[self.depth - 1]] = {}
|
||||
# This bit doesn't escape listify
|
||||
if self.depth in self.with_lists:
|
||||
if id(crd) not in listify:
|
||||
listify[id(crd)] = []
|
||||
listify_dicts[id(crd)] = crd
|
||||
if ret[self.depth - 1] not in listify[id(crd)]:
|
||||
listify[id(crd)].append(ret[self.depth - 1])
|
||||
crd = crd[ret[self.depth - 1]]
|
||||
# Now for the remaining keys, we put them into the dict
|
||||
for i in range(self.depth, self.num_fields):
|
||||
nk = self.field_names[i]
|
||||
# Listify
|
||||
if i + 1 in self.with_lists:
|
||||
if id(crd) not in listify:
|
||||
listify[id(crd)] = []
|
||||
listify_dicts[id(crd)] = crd
|
||||
if nk not in listify[id(crd)]:
|
||||
listify[id(crd)].append(nk)
|
||||
# Collision detection
|
||||
if self.as_list and (nk in crd):
|
||||
# Same as before...
|
||||
if isinstance(crd[nk], list):
|
||||
crd[nk].append(ret[i])
|
||||
else:
|
||||
crd[nk] = [crd[nk], ret[i]]
|
||||
else:
|
||||
if not self.ignore_null or ret[i] is not None:
|
||||
crd[nk] = ret[i]
|
||||
# Get key list and work backwards. This is inner-out processing
|
||||
ks = list(listify_dicts.keys())
|
||||
ks.reverse()
|
||||
for i in ks:
|
||||
d = listify_dicts[i]
|
||||
for k in listify[i]:
|
||||
if isinstance(d[k], dict):
|
||||
d[k] = list(d[k].values())
|
||||
elif isinstance(d[k], list):
|
||||
d[k] = [d[k]]
|
||||
|
||||
def fetch(self, minion_id, pillar, *args, **kwargs): # pylint: disable=W0613
|
||||
"""
|
||||
Execute queries, merge and return as a dict.
|
||||
"""
|
||||
db_name = self._db_name()
|
||||
log.info("Querying %s for information for %s", db_name, minion_id)
|
||||
#
|
||||
# log.debug('ext_pillar %s args: %s', db_name, args)
|
||||
# log.debug('ext_pillar %s kwargs: %s', db_name, kwargs)
|
||||
#
|
||||
# Most of the heavy lifting is in this class for ease of testing.
|
||||
qbuffer = self.extract_queries(args, kwargs)
|
||||
with self._get_cursor() as cursor:
|
||||
for root, details in qbuffer:
|
||||
# Run the query
|
||||
cursor.execute(details["query"], (minion_id,))
|
||||
|
||||
# Extract the field names the db has returned and process them
|
||||
self.process_fields(
|
||||
[row[0] for row in cursor.description], details["depth"]
|
||||
)
|
||||
self.enter_root(root)
|
||||
self.as_list = details["as_list"]
|
||||
self.as_json = details["as_json"]
|
||||
if details["with_lists"]:
|
||||
self.with_lists = details["with_lists"]
|
||||
else:
|
||||
self.with_lists = []
|
||||
self.ignore_null = details["ignore_null"]
|
||||
self.process_results(cursor.fetchall())
|
||||
|
||||
log.debug("ext_pillar %s: Return data: %s", db_name, self)
|
||||
return self.result
|
||||
|
||||
|
||||
# To extend this module you must define a top level ext_pillar procedure
|
||||
# See mysql.py for an example
|
275
salt/proxy/dummy.py
Normal file
275
salt/proxy/dummy.py
Normal file
|
@ -0,0 +1,275 @@
|
|||
"""
|
||||
This is the a dummy proxy-minion designed for testing the proxy minion subsystem.
|
||||
"""
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
from contextlib import contextmanager
|
||||
|
||||
import salt.utils.files
|
||||
import salt.utils.msgpack
|
||||
from salt.exceptions import CommandExecutionError, MinionError
|
||||
|
||||
# This must be present or the Salt loader won't load this module
|
||||
__proxyenabled__ = ["dummy"]
|
||||
|
||||
log = logging.getLogger(__file__)
|
||||
|
||||
|
||||
# This does nothing, it's here just as an example and to provide a log
|
||||
# entry when the module is loaded.
|
||||
def __virtual__():
|
||||
"""
|
||||
Only return if all the modules are available
|
||||
"""
|
||||
log.debug("dummy proxy __virtual__() called...")
|
||||
return True
|
||||
|
||||
|
||||
def _save_state(opts, details):
|
||||
_id = __context__["dummy_proxy"]["id"]
|
||||
cachefile = os.path.join(opts["cachedir"], "dummy-proxy-{}.cache".format(_id))
|
||||
with salt.utils.files.fopen(cachefile, "wb") as pck:
|
||||
pck.write(salt.utils.msgpack.packb(details, use_bin_type=True))
|
||||
log.warning("Dummy Proxy Saved State(%s):\n%s", cachefile, pprint.pformat(details))
|
||||
|
||||
|
||||
def _load_state(opts):
|
||||
_id = __context__["dummy_proxy"]["id"]
|
||||
cachefile = os.path.join(opts["cachedir"], "dummy-proxy-{}.cache".format(_id))
|
||||
try:
|
||||
with salt.utils.files.fopen(cachefile, "rb") as pck:
|
||||
state = salt.utils.msgpack.unpackb(pck.read(), raw=False)
|
||||
except FileNotFoundError:
|
||||
state = _initial_state()
|
||||
_save_state(opts, state)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.exception("Failed to load state: %s", exc, exc_info=True)
|
||||
state = _initial_state()
|
||||
_save_state(opts, state)
|
||||
log.warning("Dummy Proxy Loaded State(%s):\n%s", cachefile, pprint.pformat(state))
|
||||
return state
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _loaded_state(opts):
|
||||
state = _load_state(opts)
|
||||
original = copy.deepcopy(state)
|
||||
try:
|
||||
yield state
|
||||
finally:
|
||||
if state != original:
|
||||
_save_state(opts, state)
|
||||
|
||||
|
||||
def _initial_state():
|
||||
return {
|
||||
"services": {"apache": "running", "ntp": "running", "samba": "stopped"},
|
||||
"packages": {
|
||||
"coreutils": "1.0",
|
||||
"apache": "2.4",
|
||||
"tinc": "1.4",
|
||||
"redbull": "999.99",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Every proxy module needs an 'init', though you can
|
||||
# just put DETAILS['initialized'] = True here if nothing
|
||||
# else needs to be done.
|
||||
|
||||
|
||||
def init(opts):
|
||||
"""
|
||||
Required.
|
||||
Can be used to initialize the server connection.
|
||||
"""
|
||||
# Added to test situation when a proxy minion throws
|
||||
# an exception during init.
|
||||
if opts["proxy"].get("raise_minion_error"):
|
||||
raise MinionError(message="Raising A MinionError.")
|
||||
if opts["proxy"].get("raise_commandexec_error"):
|
||||
raise CommandExecutionError(message="Raising A CommandExecutionError.")
|
||||
__context__["dummy_proxy"] = {"id": opts["id"]}
|
||||
log.debug("dummy proxy init() called...")
|
||||
with _loaded_state(opts) as state:
|
||||
state["initialized"] = True
|
||||
|
||||
|
||||
def initialized():
|
||||
"""
|
||||
Since grains are loaded in many different places and some of those
|
||||
places occur before the proxy can be initialized, return whether
|
||||
our init() function has been called
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
return state.get("initialized", False)
|
||||
|
||||
|
||||
def grains():
|
||||
"""
|
||||
Make up some grains
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
if "grains_cache" not in state:
|
||||
state["grains_cache"] = {
|
||||
"dummy_grain_1": "one",
|
||||
"dummy_grain_2": "two",
|
||||
"dummy_grain_3": "three",
|
||||
}
|
||||
return state["grains_cache"]
|
||||
|
||||
|
||||
def grains_refresh():
|
||||
"""
|
||||
Refresh the grains
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
if "grains_cache" in state:
|
||||
state.pop("grains_cache")
|
||||
return grains()
|
||||
|
||||
|
||||
def fns():
|
||||
"""
|
||||
Method called by grains module.
|
||||
"""
|
||||
return {
|
||||
"details": (
|
||||
"This key is here because a function in "
|
||||
"grains/rest_sample.py called fns() here in the proxymodule."
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def service_start(name):
|
||||
"""
|
||||
Start a "service" on the dummy server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
state["services"][name] = "running"
|
||||
return "running"
|
||||
|
||||
|
||||
def service_stop(name):
|
||||
"""
|
||||
Stop a "service" on the dummy server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
state["services"][name] = "stopped"
|
||||
return "stopped"
|
||||
|
||||
|
||||
def service_restart(name):
|
||||
"""
|
||||
Restart a "service" on the REST server
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
def service_list():
|
||||
"""
|
||||
List "services" on the REST server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
return list(state["services"])
|
||||
|
||||
|
||||
def service_status(name):
|
||||
"""
|
||||
Check if a service is running on the REST server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
if state["services"][name] == "running":
|
||||
return {"comment": "running"}
|
||||
else:
|
||||
return {"comment": "stopped"}
|
||||
|
||||
|
||||
def package_list():
|
||||
"""
|
||||
List "packages" installed on the REST server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
return state["packages"]
|
||||
|
||||
|
||||
def package_install(name, **kwargs):
|
||||
"""
|
||||
Install a "package" on the REST server
|
||||
"""
|
||||
if kwargs.get("version", False):
|
||||
version = kwargs["version"]
|
||||
else:
|
||||
version = "1.0"
|
||||
with _loaded_state(__opts__) as state:
|
||||
state["packages"][name] = version
|
||||
return {name: version}
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
"Upgrade" packages
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
for p in state["packages"]:
|
||||
version_float = float(state["packages"][p])
|
||||
version_float = version_float + 1.0
|
||||
state["packages"][p] = str(version_float)
|
||||
return state["packages"]
|
||||
|
||||
|
||||
def uptodate():
|
||||
"""
|
||||
Call the REST endpoint to see if the packages on the "server" are up to date.
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
return state["packages"]
|
||||
|
||||
|
||||
def package_remove(name):
|
||||
"""
|
||||
Remove a "package" on the REST server
|
||||
"""
|
||||
__context__["dummy_proxy"]["foo"] = "bar"
|
||||
with _loaded_state(__opts__) as state:
|
||||
state["packages"].pop(name)
|
||||
return state["packages"]
|
||||
|
||||
|
||||
def package_status(name):
|
||||
"""
|
||||
Check the installation status of a package on the REST server
|
||||
"""
|
||||
with _loaded_state(__opts__) as state:
|
||||
if name in state["packages"]:
|
||||
return {name: state["packages"][name]}
|
||||
|
||||
|
||||
def ping():
|
||||
"""
|
||||
Degenerate ping
|
||||
"""
|
||||
log.debug("dummy proxy returning ping")
|
||||
return True
|
||||
|
||||
|
||||
def shutdown(opts):
|
||||
"""
|
||||
For this proxy shutdown is a no-op
|
||||
"""
|
||||
log.debug("dummy proxy shutdown() called...")
|
||||
with _loaded_state(__opts__) as state:
|
||||
if "filename" in state:
|
||||
os.unlink(state["filename"])
|
||||
|
||||
|
||||
def test_from_state():
|
||||
"""
|
||||
Test function so we have something to call from a state
|
||||
:return:
|
||||
"""
|
||||
log.debug("test_from_state called")
|
||||
return "testvalue"
|
90
salt/serializers/configparser.py
Normal file
90
salt/serializers/configparser.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
"""
|
||||
salt.serializers.configparser
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
Implements a configparser serializer.
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import io
|
||||
|
||||
from salt.serializers import DeserializationError, SerializationError
|
||||
|
||||
__all__ = ["deserialize", "serialize", "available"]
|
||||
|
||||
available = True
|
||||
|
||||
|
||||
def deserialize(stream_or_string, **options):
|
||||
"""
|
||||
Deserialize any string or stream like object into a Python data structure.
|
||||
|
||||
:param stream_or_string: stream or string to deserialize.
|
||||
:param options: options given to lower configparser module.
|
||||
"""
|
||||
|
||||
cp = configparser.ConfigParser(**options)
|
||||
|
||||
try:
|
||||
if not isinstance(stream_or_string, (bytes, str)):
|
||||
cp.read_file(stream_or_string)
|
||||
else:
|
||||
cp.read_file(io.StringIO(stream_or_string))
|
||||
data = {}
|
||||
for section_name in cp.sections():
|
||||
section = {}
|
||||
for k, v in cp.items(section_name):
|
||||
section[k] = v
|
||||
data[section_name] = section
|
||||
return data
|
||||
except Exception as error: # pylint: disable=broad-except
|
||||
raise DeserializationError(error)
|
||||
|
||||
|
||||
def serialize(obj, **options):
|
||||
"""
|
||||
Serialize Python data to a configparser formatted string or file.
|
||||
|
||||
:param obj: the data structure to serialize
|
||||
:param options: options given to lower configparser module.
|
||||
"""
|
||||
|
||||
try:
|
||||
if not isinstance(obj, dict):
|
||||
raise TypeError(
|
||||
f"configparser can only serialize dictionaries, not {type(obj)}"
|
||||
)
|
||||
fp = options.pop("fp", None)
|
||||
cp = configparser.ConfigParser(**options)
|
||||
_read_dict(cp, obj)
|
||||
|
||||
if fp:
|
||||
return cp.write(fp)
|
||||
else:
|
||||
s = io.StringIO()
|
||||
cp.write(s)
|
||||
return s.getvalue()
|
||||
except Exception as error: # pylint: disable=broad-except
|
||||
raise SerializationError(error)
|
||||
|
||||
|
||||
def _is_defaultsect(section_name):
|
||||
return section_name == configparser.DEFAULTSECT
|
||||
|
||||
|
||||
def _read_dict(cp, dictionary):
|
||||
"""
|
||||
Cribbed from python3's ConfigParser.read_dict function.
|
||||
"""
|
||||
for section, keys in dictionary.items():
|
||||
section = str(section)
|
||||
if not _is_defaultsect(section):
|
||||
cp.add_section(section)
|
||||
|
||||
for key, value in keys.items():
|
||||
key = cp.optionxform(str(key))
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
cp.set(section, key, value)
|
|
@ -1,149 +0,0 @@
|
|||
"""
|
||||
Interface to Red Hat tuned-adm module
|
||||
|
||||
:maintainer: Syed Ali <alicsyed@gmail.com>
|
||||
:maturity: new
|
||||
:depends: cmd.run
|
||||
:platform: Linux
|
||||
"""
|
||||
|
||||
import salt.exceptions
|
||||
from salt.modules.tuned import TUNED_OFF_RETURN_NAME
|
||||
|
||||
|
||||
def profile(name):
|
||||
"""
|
||||
This state module allows you to modify system tuned parameters
|
||||
|
||||
Example tuned.sls file to set profile to virtual-guest
|
||||
|
||||
tuned:
|
||||
tuned.profile
|
||||
- name: virtual-guest
|
||||
|
||||
name
|
||||
tuned profile name to set the system to
|
||||
|
||||
To see a valid list of states call execution module:
|
||||
:py:func:`tuned.list <salt.modules.tuned.list_>`
|
||||
"""
|
||||
|
||||
# create data-structure to return with default value
|
||||
ret = {"name": "", "changes": {}, "result": False, "comment": ""}
|
||||
|
||||
ret[name] = name
|
||||
profile = name
|
||||
|
||||
# get the current state of tuned-adm
|
||||
current_state_dict = __salt__["tuned.active"]()
|
||||
|
||||
# Off is returned as retcode 1, stdout == TUNED_OFF_RETURN_NAME
|
||||
# any other type of error will be returned here
|
||||
if (
|
||||
current_state_dict["retcode"] != 0
|
||||
and current_state_dict["stdout"] != TUNED_OFF_RETURN_NAME
|
||||
):
|
||||
ret["comment"] = current_state_dict["stderr"]
|
||||
return ret
|
||||
|
||||
# if current state is same as requested state, return without doing much
|
||||
if current_state_dict["retcode"] == 0 and profile == current_state_dict["stdout"]:
|
||||
ret["result"] = True
|
||||
ret["comment"] = "System already in the correct state"
|
||||
return ret
|
||||
|
||||
# test mode
|
||||
if __opts__["test"] is True:
|
||||
# Only perform the valid profile test if it is a test,
|
||||
# tuned-adm command will fail and return message
|
||||
valid_profiles = __salt__["tuned.list"]()
|
||||
if profile not in valid_profiles:
|
||||
raise salt.exceptions.SaltInvocationError("Invalid Profile Name")
|
||||
ret["comment"] = 'The state of "{}" will be changed.'.format(
|
||||
current_state_dict["stdout"]
|
||||
)
|
||||
ret["changes"] = {
|
||||
"old": current_state_dict["stdout"],
|
||||
"new": f"Profile will be set to {profile}",
|
||||
}
|
||||
# return None when testing
|
||||
ret["result"] = None
|
||||
return ret
|
||||
|
||||
# we come to this stage if current state was determined and is different that requested state
|
||||
# we there have to set the new state request
|
||||
new_state_dict = __salt__["tuned.profile"](profile)
|
||||
|
||||
if new_state_dict["retcode"] != 0:
|
||||
ret["comment"] = new_state_dict["stderr"]
|
||||
else:
|
||||
# create the comment data structure
|
||||
ret["comment"] = f'Tunings changed to "{profile}"'
|
||||
# add the changes specifics
|
||||
ret["changes"] = {
|
||||
"old": current_state_dict["stdout"],
|
||||
"new": new_state_dict["stdout"],
|
||||
}
|
||||
ret["result"] = True
|
||||
|
||||
# return with the dictionary data structure
|
||||
return ret
|
||||
|
||||
|
||||
def off(name=None):
|
||||
"""
|
||||
|
||||
Turns 'tuned' off.
|
||||
Example tuned.sls file for turning tuned off:
|
||||
|
||||
tuned:
|
||||
tuned.off: []
|
||||
|
||||
|
||||
To see a valid list of states call execution module:
|
||||
:py:func:`tuned.list <salt.modules.tuned.list_>`
|
||||
"""
|
||||
|
||||
# create data-structure to return with default value
|
||||
ret = {"name": "off", "changes": {}, "result": False, "comment": "off"}
|
||||
|
||||
# check the current state of tuned
|
||||
current_state_dict = __salt__["tuned.active"]()
|
||||
|
||||
# Off is returned as retcode 1, stdout == TUNED_OFF_RETURN_NAME
|
||||
if current_state_dict["retcode"] != 0:
|
||||
if current_state_dict["stdout"] == TUNED_OFF_RETURN_NAME:
|
||||
ret["result"] = True
|
||||
ret["comment"] = "System already in the correct state"
|
||||
return ret
|
||||
ret["comment"] = current_state_dict["stderr"]
|
||||
return ret
|
||||
|
||||
# test mode
|
||||
if __opts__["test"] is True:
|
||||
ret["comment"] = 'The state of "{}" will be turned off.'.format(
|
||||
current_state_dict["stdout"]
|
||||
)
|
||||
ret["changes"] = {
|
||||
"old": current_state_dict["stdout"],
|
||||
"new": "Profile will be set to off",
|
||||
}
|
||||
# return None when testing
|
||||
ret["result"] = None
|
||||
return ret
|
||||
|
||||
# execute the tuned.off module
|
||||
off_result_dict = __salt__["tuned.off"]()
|
||||
|
||||
if off_result_dict["retcode"] != 0:
|
||||
ret["comment"] = off_result_dict["stderr"]
|
||||
else:
|
||||
ret["comment"] = "Tunings have been turned off"
|
||||
ret["changes"] = {
|
||||
"old": current_state_dict["stdout"],
|
||||
"new": "off",
|
||||
}
|
||||
ret["result"] = True
|
||||
|
||||
# return with the dictionary data structure
|
||||
return ret
|
|
@ -47,9 +47,6 @@ salt/modules/*apache.py:
|
|||
- pytests.unit.states.apache.test_module
|
||||
- pytests.unit.states.apache.test_site
|
||||
|
||||
salt/modules/augeas_cfg.py:
|
||||
- pytests.unit.states.test_augeas
|
||||
|
||||
salt/modules/cp.py:
|
||||
- pytests.functional.modules.file.test_replace
|
||||
- pytests.unit.modules.file.test_file_basics
|
||||
|
@ -65,28 +62,9 @@ salt/modules/cp.py:
|
|||
- integration.modules.test_file
|
||||
- pytests.functional.states.file.test_copy
|
||||
|
||||
salt/modules/dockermod.py:
|
||||
- pytests.unit.states.test_docker_image
|
||||
- pytests.unit.states.test_docker_volume
|
||||
- unit.utils.test_dockermod
|
||||
- pytests.functional.states.test_docker_container
|
||||
- pytests.functional.states.test_docker_network
|
||||
|
||||
salt/modules/file.py:
|
||||
- pytests.integration.states.test_cron
|
||||
|
||||
salt/modules/influxdb08mod.py:
|
||||
- pytests.unit.states.test_influxdb08_database
|
||||
- pytests.unit.states.test_influxdb08_user
|
||||
|
||||
salt/modules/mysql.py:
|
||||
- pytests.unit.states.mysql.test_user
|
||||
- pytests.unit.states.mysql.test_query
|
||||
- pytests.unit.states.mysql.test_grants
|
||||
|
||||
salt/modules/openvswitch.py:
|
||||
- pytests.unit.states.test_openvswitch_port
|
||||
|
||||
salt/(states|modules)/.*postgres.py:
|
||||
- pytests.unit.states.postgresql.test_cluster
|
||||
- pytests.unit.states.postgresql.test_database
|
||||
|
@ -286,9 +264,6 @@ salt/(cli/run\.py|runner\.py):
|
|||
- pytests.integration.cli.test_salt_run
|
||||
- integration.runners.test_runner_returns
|
||||
|
||||
salt/runners/venafiapi.py:
|
||||
- integration.externalapi.test_venafiapi
|
||||
|
||||
salt/serializers/*:
|
||||
- pytests.unit.serializers.test_serializers
|
||||
|
||||
|
@ -338,9 +313,6 @@ salt/(minion\.py|channel/.+|transport/.+):
|
|||
tests/support/mock.py:
|
||||
- unit.test_mock
|
||||
|
||||
tests/support/virt.py:
|
||||
- pytests.integration.modules.test_virt
|
||||
|
||||
tests/support/pytest/mysql.py:
|
||||
- pytests.functional.states.test_mysql
|
||||
- pytests.functional.modules.test_mysql
|
||||
|
|
|
@ -1,140 +0,0 @@
|
|||
"""
|
||||
Tests for the salt-run command
|
||||
"""
|
||||
|
||||
import functools
|
||||
import random
|
||||
import string
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.x509.oid import NameOID
|
||||
|
||||
from tests.support.case import ShellCase
|
||||
|
||||
|
||||
def _random_name(prefix=""):
|
||||
ret = prefix
|
||||
for _ in range(8):
|
||||
ret += random.choice(string.ascii_lowercase)
|
||||
return ret
|
||||
|
||||
|
||||
def with_random_name(func):
|
||||
"""
|
||||
generate a randomized name for a container
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
name = _random_name(prefix="salt_")
|
||||
return func(self, _random_name(prefix="salt-test-"), *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class VenafiTest(ShellCase):
|
||||
"""
|
||||
Test the venafi runner
|
||||
"""
|
||||
|
||||
@with_random_name
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.skip_on_fips_enabled_platform
|
||||
def test_request(self, name):
|
||||
cn = f"{name}.example.com"
|
||||
|
||||
ret = self.run_run_plus(
|
||||
fun="venafi.request",
|
||||
minion_id=cn,
|
||||
dns_name=cn,
|
||||
key_password="secretPassword",
|
||||
zone="fake",
|
||||
)
|
||||
cert_output = ret["return"][0]
|
||||
assert cert_output is not None, "venafi_certificate not found in `output_value`"
|
||||
|
||||
cert = x509.load_pem_x509_certificate(cert_output.encode(), default_backend())
|
||||
assert isinstance(cert, x509.Certificate)
|
||||
assert cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME) == [
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, cn)
|
||||
]
|
||||
|
||||
pkey_output = ret["return"][1]
|
||||
assert pkey_output is not None, "venafi_private key not found in output_value"
|
||||
|
||||
pkey = serialization.load_pem_private_key(
|
||||
pkey_output.encode(), password=b"secretPassword", backend=default_backend()
|
||||
)
|
||||
|
||||
pkey_public_key_pem = pkey.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
cert_public_key_pem = cert.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
assert pkey_public_key_pem == cert_public_key_pem
|
||||
|
||||
@with_random_name
|
||||
@pytest.mark.slow_test
|
||||
def test_sign(self, name):
|
||||
|
||||
csr_pem = """-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIFbDCCA1QCAQAwgbQxCzAJBgNVBAYTAlVTMQ0wCwYDVQQIDARVdGFoMRIwEAYD
|
||||
VQQHDAlTYWx0IExha2UxFDASBgNVBAoMC1ZlbmFmaSBJbmMuMRQwEgYDVQQLDAtJ
|
||||
bnRlZ3JhdGlvbjEnMCUGCSqGSIb3DQEJARYYZW1haWxAdmVuYWZpLmV4YW1wbGUu
|
||||
Y29tMS0wKwYDVQQDDCR0ZXN0LWNzci0zMjMxMzEzMS52ZW5hZmkuZXhhbXBsZS5j
|
||||
b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC4T0bdjq+mF+DABhF+
|
||||
XWCwOXXUWbPNWa72VVhxoelbyTS0iIeZEe64AvNGykytFdOuT/F9pdkZa+Io07R1
|
||||
ZMp6Ak8dp2Wjt4c5rayVZus6ZK+0ZwBRJO7if/cqhEpxy8Wz1RMfVLf2AE1u/xZS
|
||||
QSYY0BTRWGmPqrFJrIGbnyQfvmGVPk3cA0RfdrwYJZXtZ2/4QNrbNCoSoSmqTHzt
|
||||
NAtZhvT2dPU9U48Prx4b2460x+ck3xA1OdJNXV7n5u53QbxOIcjdGT0lJ62ml70G
|
||||
5gvEHmdPcg+t5cw/Sm5cfDSUEDtNEXvD4oJXfP98ty6f1cYsZpcrgxRwk9RfGain
|
||||
hvoweXhZP3NWnU5nRdn2nOfExv+xMeQOyB/rYv98zqzK6LvwKhwI5UB1l/n9KTpg
|
||||
jgaNCP4x/KAsrPecbHK91oiqGSbPn4wtTYOmPkDxSzATN317u7fE20iqvVAUy/O+
|
||||
7SCNNKEDPX2NP9LLz0IPK0roQxLiwd2CVyN6kEXuzs/3psptkNRMSlhyeAZdfrOE
|
||||
CNOp46Pam9f9HGBqzXxxoIlfzLqHHL584kgFlBm7qmivVrgp6zdLPDa+UayXEl2N
|
||||
O17SnGS8nkOTmfg3cez7lzX/LPLO9X/Y1xKYqx5hoGZhh754K8mzDWCVCYThWgou
|
||||
yBOYY8uNXiX6ldqzQUHpbxxQgwIDAQABoHIwcAYJKoZIhvcNAQkOMWMwYTBfBgNV
|
||||
HREEWDBWgilhbHQxLXRlc3QtY3NyLTMyMzEzMTMxLnZlbmFmaS5leGFtcGxlLmNv
|
||||
bYIpYWx0Mi10ZXN0LWNzci0zMjMxMzEzMS52ZW5hZmkuZXhhbXBsZS5jb20wDQYJ
|
||||
KoZIhvcNAQELBQADggIBAJd87BIdeh0WWoyQ4IX+ENpNqmm/sLmdfmUB/hj9NpBL
|
||||
qbr2UTWaSr1jadoZ+mrDxtm1Z0YJDTTIrEWxkBOW5wQ039lYZNe2tfDXSJZwJn7u
|
||||
2keaXtWQ2SdduK1wOPDO9Hra6WnH7aEq5D1AyoghvPsZwTqZkNynt/A1BZW5C/ha
|
||||
J9/mwgWfL4qXBGBOhLwKN5GUo3erUkJIdH0TlMqI906D/c/YAuJ86SRdQtBYci6X
|
||||
bJ7C+OnoiV6USn1HtQE6dfOMeS8voJuixpSIvHZ/Aim6kSAN1Za1f6FQAkyqbF+o
|
||||
oKTJHDS1CPWikCeLdpPUcOCDIbsiISTsMZkEvIkzZ7dKBIlIugauxw3vaEpk47jN
|
||||
Wq09r639RbSv/Qs8D6uY66m1IpL4zHm4lTAknrjM/BqihPxc8YiN76ssajvQ4SFT
|
||||
DHPrDweEVe4KL1ENw8nv4wdkIFKwJTDarV5ZygbETzIhfa2JSBZFTdN+Wmd2Mh5h
|
||||
OTu+vuHrJF2TO8g1G48EB/KWGt+yvVUpWAanRMwldnFX80NcUlM7GzNn6IXTeE+j
|
||||
BttIbvAAVJPG8rVCP8u3DdOf+vgm5macj9oLoVP8RBYo/z0E3e+H50nXv3uS6JhN
|
||||
xlAKgaU6i03jOm5+sww5L2YVMi1eeBN+kx7o94ogpRemC/EUidvl1PUJ6+e7an9V
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w+") as f:
|
||||
f.write(csr_pem)
|
||||
f.flush()
|
||||
csr_path = f.name
|
||||
cn = "test-csr-32313131.venafi.example.com"
|
||||
|
||||
ret = self.run_run_plus(
|
||||
fun="venafi.request", minion_id=cn, csr_path=csr_path, zone="fake"
|
||||
)
|
||||
cert_output = ret["return"][0]
|
||||
assert (
|
||||
cert_output is not None
|
||||
), "venafi_certificate not found in `output_value`"
|
||||
|
||||
cert = x509.load_pem_x509_certificate(
|
||||
cert_output.encode(), default_backend()
|
||||
)
|
||||
assert isinstance(cert, x509.Certificate)
|
||||
assert cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME) == [
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, cn)
|
||||
]
|
|
@ -4,7 +4,6 @@ import logging
|
|||
import pytest
|
||||
|
||||
import salt.serializers.configparser
|
||||
import salt.serializers.plist
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -87,6 +86,7 @@ def test_serializer_deserializer_opts(file, tmp_path):
|
|||
assert serialized_data["foo"]["bar"] == merged["foo"]["bar"]
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serializer_plist_binary_file_open(file, tmp_path):
|
||||
"""
|
||||
Test the serialization and deserialization of plists which should include
|
||||
|
@ -122,6 +122,7 @@ def test_serializer_plist_binary_file_open(file, tmp_path):
|
|||
assert serialized_data["foo"] == merged["foo"]
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serializer_plist_file_open(file, tmp_path):
|
||||
"""
|
||||
Test the serialization and deserialization of non binary plists with
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.platform
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
|
||||
pytest.mark.slow_test,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def minion_id():
|
||||
return "terraform_ssh_minion"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def terraform_roster_file(sshd_server, salt_master, tmp_path_factory, minion_id):
|
||||
darwin_addon = ""
|
||||
if salt.utils.platform.is_darwin():
|
||||
darwin_addon = ',\n "set_path": "$PATH:/usr/local/bin/"\n'
|
||||
roster_contents = textwrap.dedent(
|
||||
""" {{
|
||||
"version": 4,
|
||||
"terraform_version": "1.4.3",
|
||||
"serial": 1,
|
||||
"outputs": {{}},
|
||||
"resources": [
|
||||
{{
|
||||
"mode": "managed",
|
||||
"type": "salt_host",
|
||||
"name": "{minion}",
|
||||
"instances": [
|
||||
{{
|
||||
"schema_version": 0,
|
||||
"attributes": {{
|
||||
"cmd_umask": null,
|
||||
"host": "localhost",
|
||||
"id": "{minion}",
|
||||
"minion_opts": null,
|
||||
"passwd": "",
|
||||
"port": {port},
|
||||
"priv": null,
|
||||
"salt_id": "{minion}",
|
||||
"sudo": null,
|
||||
"sudo_user": null,
|
||||
"thin_dir": null,
|
||||
"timeout": null,
|
||||
"tty": null,
|
||||
"user": "{user}"{darwin_addon}
|
||||
}}
|
||||
}}
|
||||
]
|
||||
}}
|
||||
],
|
||||
"check_results": null
|
||||
}}
|
||||
"""
|
||||
).format(
|
||||
minion=minion_id,
|
||||
port=sshd_server.listen_port,
|
||||
user=RUNTIME_VARS.RUNNING_TESTS_USER,
|
||||
darwin_addon=darwin_addon,
|
||||
)
|
||||
roster_file = tmp_path_factory.mktemp("terraform_roster") / "terraform.tfstate"
|
||||
roster_file.write_text(roster_contents)
|
||||
yield roster_file
|
||||
roster_file.unlink()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_ssh_cli(salt_master, terraform_roster_file, sshd_config_dir):
|
||||
"""
|
||||
The ``salt-ssh`` CLI as a fixture against the running master
|
||||
"""
|
||||
assert salt_master.is_running()
|
||||
return salt_master.salt_ssh_cli(
|
||||
roster_file=terraform_roster_file,
|
||||
target_host="*",
|
||||
client_key=str(sshd_config_dir / "client_key"),
|
||||
base_script_args=["--ignore-host-keys"],
|
||||
)
|
||||
|
||||
|
||||
def test_terraform_roster(salt_ssh_cli, minion_id):
|
||||
"""
|
||||
Test that the terraform roster operates as intended
|
||||
"""
|
||||
ret = salt_ssh_cli.run("--roster=terraform", "test.ping")
|
||||
assert ret.data.get(minion_id) is True
|
|
@ -50,6 +50,7 @@ def pkg_tests_account_environ(pkg_tests_account):
|
|||
return environ
|
||||
|
||||
|
||||
@pytest.skip("Great module migration")
|
||||
def test_pip_install(salt_call_cli, install_salt, shell):
|
||||
"""
|
||||
Test pip.install and ensure module can use installed library
|
||||
|
|
|
@ -2,8 +2,6 @@ import pytest
|
|||
|
||||
import salt.loader
|
||||
import salt.loader.lazy
|
||||
import salt.modules.boto_vpc
|
||||
import salt.modules.virt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -12,9 +10,7 @@ def minion_mods(minion_opts):
|
|||
return salt.loader.minion_mods(minion_opts, utils=utils)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not salt.modules.boto_vpc.HAS_BOTO, reason="boto must be installed."
|
||||
)
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_load_boto_vpc(minion_mods):
|
||||
func = None
|
||||
try:
|
||||
|
@ -25,9 +21,7 @@ def test_load_boto_vpc(minion_mods):
|
|||
assert isinstance(func, salt.loader.lazy.LoadedFunc)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not salt.modules.virt.HAS_LIBVIRT, reason="libvirt-python must be installed."
|
||||
)
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_load_virt(minion_mods):
|
||||
func = None
|
||||
try:
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,85 +0,0 @@
|
|||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.dockermod as docker_mod
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
docker_mod: {
|
||||
"__utils__": {
|
||||
"state.get_sls_opts": MagicMock(
|
||||
return_value={
|
||||
"pillarenv": MagicMock(),
|
||||
"pillar": {},
|
||||
"grains": {},
|
||||
}
|
||||
),
|
||||
"args.clean_kwargs": lambda **x: x,
|
||||
},
|
||||
"__salt__": {
|
||||
"config.option": MagicMock(return_value=None),
|
||||
"cmd.run": fake_run,
|
||||
},
|
||||
"__opts__": {"id": "dockermod-unit-test"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def fake_run(*args, **kwargs):
|
||||
log.debug("Fake run call args: %s, kwargs: %s", args, kwargs)
|
||||
return "{}"
|
||||
|
||||
|
||||
def test_trans_tar_should_have_grains_in_sls_opts_including_pillar_override():
|
||||
container_name = "fnord"
|
||||
expected_grains = {
|
||||
"roscivs": "bottia",
|
||||
"fnord": "dronf",
|
||||
"salt": "NaCl",
|
||||
}
|
||||
expected_pillars = {
|
||||
"this": {"is": {"my": {"pillar": "data"}}},
|
||||
}
|
||||
extra_pillar_data = {"some": "extras"}
|
||||
fake_trans_tar = MagicMock(return_value=b"hi")
|
||||
patch_trans_tar = patch(
|
||||
"salt.modules.dockermod._prepare_trans_tar",
|
||||
fake_trans_tar,
|
||||
)
|
||||
patch_call = patch(
|
||||
"salt.modules.dockermod.call",
|
||||
MagicMock(return_value=expected_grains),
|
||||
)
|
||||
fake_get_pillar = MagicMock()
|
||||
fake_get_pillar.compile_pillar.return_value = expected_pillars
|
||||
patch_pillar = patch(
|
||||
"salt.modules.dockermod.salt.pillar.get_pillar",
|
||||
MagicMock(return_value=fake_get_pillar),
|
||||
)
|
||||
patch_run_all = patch(
|
||||
"salt.modules.dockermod.run_all",
|
||||
MagicMock(return_value={"retcode": 1, "stderr": "early exit test"}),
|
||||
)
|
||||
with patch_trans_tar, patch_call, patch_pillar, patch_run_all:
|
||||
docker_mod.sls(container_name, pillar=extra_pillar_data)
|
||||
# TODO: It would be fine if we could make this test require less magic numbers -W. Werner, 2019-08-27
|
||||
actual_sls_opts = fake_trans_tar.call_args[0][1]
|
||||
for (
|
||||
key,
|
||||
value,
|
||||
) in expected_grains.items():
|
||||
assert key in actual_sls_opts["grains"]
|
||||
assert value == actual_sls_opts["grains"][key]
|
||||
expected_pillars.update(extra_pillar_data)
|
||||
for (
|
||||
key,
|
||||
value,
|
||||
) in expected_pillars.items():
|
||||
assert key in actual_sls_opts["pillar"]
|
||||
assert value == actual_sls_opts["pillar"][key]
|
|
@ -1,67 +0,0 @@
|
|||
"""
|
||||
Tests for salt.modules.zfs on Solaris
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.loader
|
||||
import salt.modules.zfs as zfs
|
||||
import salt.utils.zfs
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.zfs import ZFSMockData
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def utils_patch():
|
||||
return ZFSMockData().get_patched_utils()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zfs_obj = {
|
||||
zfs: {
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {
|
||||
"osarch": "sparcv9",
|
||||
"os_family": "Solaris",
|
||||
"osmajorrelease": 10,
|
||||
"kernel": "SunOS",
|
||||
"kernelrelease": 5.10,
|
||||
},
|
||||
"__utils__": utils,
|
||||
}
|
||||
}
|
||||
|
||||
return zfs_obj
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_sunos
|
||||
def test_get_success_solaris():
|
||||
"""
|
||||
Tests zfs get success
|
||||
"""
|
||||
|
||||
cmd_out = {
|
||||
"pid": 7278,
|
||||
"retcode": 0,
|
||||
"stdout": "testpool\tmountpoint\t/testpool\tdefault",
|
||||
"stderr": "",
|
||||
}
|
||||
|
||||
run_all_mock = MagicMock(return_value=cmd_out)
|
||||
patches = {
|
||||
"cmd.run_all": run_all_mock,
|
||||
}
|
||||
with patch.dict(zfs.__salt__, patches):
|
||||
with patch("sys.platform", MagicMock(return_value="sunos5")):
|
||||
result = zfs.get("testpool", type="filesystem", properties="mountpoint")
|
||||
assert result == {
|
||||
"testpool": {
|
||||
"mountpoint": {"value": "/testpool", "source": "default"},
|
||||
},
|
||||
}
|
||||
run_all_mock.assert_called_once_with(
|
||||
"/usr/sbin/zfs get -H -o name,property,value,source mountpoint testpool",
|
||||
python_shell=False,
|
||||
)
|
|
@ -1,67 +0,0 @@
|
|||
"""
|
||||
Tests for salt.modules.zfs on Solaris
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.loader
|
||||
import salt.modules.zfs as zfs
|
||||
import salt.utils.zfs
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.zfs import ZFSMockData
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def utils_patch():
|
||||
return ZFSMockData().get_patched_utils()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zfs_obj = {
|
||||
zfs: {
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {
|
||||
"osarch": "sparcv9",
|
||||
"os_family": "Solaris",
|
||||
"osmajorrelease": 11,
|
||||
"kernel": "SunOS",
|
||||
"kernelrelease": 5.11,
|
||||
},
|
||||
"__utils__": utils,
|
||||
}
|
||||
}
|
||||
|
||||
return zfs_obj
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_sunos
|
||||
def test_get_success_solaris():
|
||||
"""
|
||||
Tests zfs get success
|
||||
"""
|
||||
|
||||
cmd_out = {
|
||||
"pid": 7278,
|
||||
"retcode": 0,
|
||||
"stdout": "testpool\tmountpoint\t/testpool\tdefault",
|
||||
"stderr": "",
|
||||
}
|
||||
|
||||
run_all_mock = MagicMock(return_value=cmd_out)
|
||||
patches = {
|
||||
"cmd.run_all": run_all_mock,
|
||||
}
|
||||
with patch.dict(zfs.__salt__, patches):
|
||||
with patch("sys.platform", MagicMock(return_value="sunos5")):
|
||||
result = zfs.get("testpool", type="filesystem", properties="mountpoint")
|
||||
assert result == {
|
||||
"testpool": {
|
||||
"mountpoint": {"value": "/testpool", "source": "default"},
|
||||
},
|
||||
}
|
||||
run_all_mock.assert_called_once_with(
|
||||
"/usr/sbin/zfs get -H -o name,property,value,source mountpoint testpool",
|
||||
python_shell=False,
|
||||
)
|
|
@ -1,401 +0,0 @@
|
|||
import xml.etree.ElementTree as ET
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.config as config
|
||||
import salt.modules.virt as virt
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
|
||||
class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors
|
||||
"""
|
||||
Libvirt library mock
|
||||
"""
|
||||
|
||||
class virDomain(MagicMock):
|
||||
"""
|
||||
virDomain mock
|
||||
"""
|
||||
|
||||
class libvirtError(Exception):
|
||||
"""
|
||||
libvirtError mock
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.msg = msg
|
||||
|
||||
def get_error_message(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
class MappedResultMock(MagicMock):
|
||||
"""
|
||||
Mock class consistently return the same mock object based on the first argument.
|
||||
"""
|
||||
|
||||
_instances = {}
|
||||
|
||||
def __init__(self):
|
||||
def mapped_results(*args, **kwargs):
|
||||
if args[0] not in self._instances:
|
||||
raise virt.libvirt.libvirtError(f"Not found: {args[0]}")
|
||||
return self._instances[args[0]]
|
||||
|
||||
super().__init__(side_effect=mapped_results)
|
||||
|
||||
def add(self, name, value=None):
|
||||
self._instances[name] = value or MagicMock()
|
||||
|
||||
|
||||
def loader_modules_config():
|
||||
# Create libvirt mock and connection mock
|
||||
mock_libvirt = LibvirtMock()
|
||||
mock_conn = MagicMock()
|
||||
mock_conn.getStoragePoolCapabilities.return_value = "<storagepoolCapabilities/>"
|
||||
|
||||
mock_libvirt.openAuth.return_value = mock_conn
|
||||
return {
|
||||
virt: {
|
||||
"libvirt": mock_libvirt,
|
||||
"__salt__": {"config.get": config.get, "config.option": config.option},
|
||||
},
|
||||
config: {},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_mock_vm():
|
||||
def _make_mock_vm(xml_def=None, running=False, inactive_def=None):
|
||||
mocked_conn = virt.libvirt.openAuth.return_value
|
||||
|
||||
desc = xml_def
|
||||
if not desc:
|
||||
desc = """
|
||||
<domain type='kvm' id='7'>
|
||||
<name>my_vm</name>
|
||||
<memory unit='KiB'>1048576</memory>
|
||||
<currentMemory unit='KiB'>1048576</currentMemory>
|
||||
<vcpu placement='auto'>1</vcpu>
|
||||
<on_reboot>restart</on_reboot>
|
||||
<os>
|
||||
<type arch='x86_64' machine='pc-i440fx-2.6'>hvm</type>
|
||||
</os>
|
||||
</domain>
|
||||
"""
|
||||
doc = ET.fromstring(desc)
|
||||
name = doc.find("name").text
|
||||
os_type = "hvm"
|
||||
os_type_node = doc.find("os/type")
|
||||
if os_type_node is not None:
|
||||
os_type = os_type_node.text
|
||||
|
||||
mocked_conn.listDefinedDomains.return_value = [name]
|
||||
|
||||
# Configure the mocked domain
|
||||
if not isinstance(mocked_conn.lookupByName, MappedResultMock):
|
||||
mocked_conn.lookupByName = MappedResultMock()
|
||||
mocked_conn.lookupByName.add(name)
|
||||
domain_mock = mocked_conn.lookupByName(name)
|
||||
|
||||
domain_mock.XMLDesc = MappedResultMock()
|
||||
domain_mock.XMLDesc.add(0, desc)
|
||||
domain_mock.XMLDesc.add(
|
||||
virt.libvirt.VIR_DOMAIN_XML_INACTIVE, inactive_def or desc
|
||||
)
|
||||
domain_mock.OSType.return_value = os_type
|
||||
|
||||
# Return state as shutdown
|
||||
domain_mock.info.return_value = [
|
||||
0 if running else 4,
|
||||
2048 * 1024,
|
||||
1024 * 1024,
|
||||
2,
|
||||
1234,
|
||||
]
|
||||
domain_mock.ID.return_value = 1
|
||||
domain_mock.name.return_value = name
|
||||
|
||||
domain_mock.attachDevice.return_value = 0
|
||||
domain_mock.detachDevice.return_value = 0
|
||||
domain_mock.setMemoryFlags.return_value = 0
|
||||
domain_mock.setVcpusFlags.return_value = 0
|
||||
|
||||
domain_mock.connect.return_value = mocked_conn
|
||||
|
||||
return domain_mock
|
||||
|
||||
return _make_mock_vm
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_mock_storage_pool():
|
||||
def _make_mock_storage_pool(name, type, volumes, source=None):
|
||||
mocked_conn = virt.libvirt.openAuth.return_value
|
||||
|
||||
# Append the pool name to the list of known mocked pools
|
||||
all_pools = mocked_conn.listStoragePools.return_value
|
||||
if not isinstance(all_pools, list):
|
||||
all_pools = []
|
||||
all_pools.append(name)
|
||||
mocked_conn.listStoragePools.return_value = all_pools
|
||||
|
||||
# Ensure we have mapped results for the pools
|
||||
if not isinstance(mocked_conn.storagePoolLookupByName, MappedResultMock):
|
||||
mocked_conn.storagePoolLookupByName = MappedResultMock()
|
||||
|
||||
# Configure the pool
|
||||
mocked_conn.storagePoolLookupByName.add(name)
|
||||
mocked_pool = mocked_conn.storagePoolLookupByName(name)
|
||||
source_def = source
|
||||
if not source and type == "disk":
|
||||
source = f"<device path='/dev/{name}'/>"
|
||||
pool_path = f"/path/to/{name}"
|
||||
mocked_pool.XMLDesc.return_value = """
|
||||
<pool type='{}'>
|
||||
<source>
|
||||
{}
|
||||
</source>
|
||||
<target>
|
||||
<path>{}</path>
|
||||
</target>
|
||||
</pool>
|
||||
""".format(
|
||||
type, source, pool_path
|
||||
)
|
||||
mocked_pool.name.return_value = name
|
||||
mocked_pool.info.return_value = [
|
||||
virt.libvirt.VIR_STORAGE_POOL_RUNNING,
|
||||
]
|
||||
|
||||
# Append the pool to the listAllStoragePools list
|
||||
all_pools_obj = mocked_conn.listAllStoragePools.return_value
|
||||
if not isinstance(all_pools_obj, list):
|
||||
all_pools_obj = []
|
||||
all_pools_obj.append(mocked_pool)
|
||||
mocked_conn.listAllStoragePools.return_value = all_pools_obj
|
||||
|
||||
# Configure the volumes
|
||||
if not isinstance(mocked_pool.storageVolLookupByName, MappedResultMock):
|
||||
mocked_pool.storageVolLookupByName = MappedResultMock()
|
||||
mocked_pool.listVolumes.return_value = volumes
|
||||
|
||||
all_volumes = []
|
||||
for volume in volumes:
|
||||
mocked_pool.storageVolLookupByName.add(volume)
|
||||
mocked_vol = mocked_pool.storageVolLookupByName(volume)
|
||||
vol_path = f"{pool_path}/{volume}"
|
||||
mocked_vol.XMLDesc.return_value = """
|
||||
<volume>
|
||||
<target>
|
||||
<path>{}</path>
|
||||
</target>
|
||||
</volume>
|
||||
""".format(
|
||||
vol_path,
|
||||
)
|
||||
mocked_vol.path.return_value = vol_path
|
||||
mocked_vol.name.return_value = volume
|
||||
|
||||
mocked_vol.info.return_value = [
|
||||
0,
|
||||
1234567,
|
||||
12345,
|
||||
]
|
||||
all_volumes.append(mocked_vol)
|
||||
|
||||
# Set the listAllVolumes return_value
|
||||
mocked_pool.listAllVolumes.return_value = all_volumes
|
||||
return mocked_pool
|
||||
|
||||
return _make_mock_storage_pool
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_capabilities():
|
||||
def _make_capabilities():
|
||||
mocked_conn = virt.libvirt.openAuth.return_value
|
||||
mocked_conn.getCapabilities.return_value = """
|
||||
<capabilities>
|
||||
<host>
|
||||
<uuid>44454c4c-3400-105a-8033-b3c04f4b344a</uuid>
|
||||
<cpu>
|
||||
<arch>x86_64</arch>
|
||||
<model>Nehalem</model>
|
||||
<vendor>Intel</vendor>
|
||||
<microcode version='25'/>
|
||||
<topology sockets='1' cores='4' threads='2'/>
|
||||
<feature name='vme'/>
|
||||
<feature name='ds'/>
|
||||
<feature name='acpi'/>
|
||||
<pages unit='KiB' size='4'/>
|
||||
<pages unit='KiB' size='2048'/>
|
||||
</cpu>
|
||||
<power_management>
|
||||
<suspend_mem/>
|
||||
<suspend_disk/>
|
||||
<suspend_hybrid/>
|
||||
</power_management>
|
||||
<migration_features>
|
||||
<live/>
|
||||
<uri_transports>
|
||||
<uri_transport>tcp</uri_transport>
|
||||
<uri_transport>rdma</uri_transport>
|
||||
</uri_transports>
|
||||
</migration_features>
|
||||
<topology>
|
||||
<cells num='1'>
|
||||
<cell id='0'>
|
||||
<memory unit='KiB'>12367120</memory>
|
||||
<pages unit='KiB' size='4'>3091780</pages>
|
||||
<pages unit='KiB' size='2048'>0</pages>
|
||||
<distances>
|
||||
<sibling id='0' value='10'/>
|
||||
</distances>
|
||||
<cpus num='8'>
|
||||
<cpu id='0' socket_id='0' core_id='0' siblings='0,4'/>
|
||||
<cpu id='1' socket_id='0' core_id='1' siblings='1,5'/>
|
||||
<cpu id='2' socket_id='0' core_id='2' siblings='2,6'/>
|
||||
<cpu id='3' socket_id='0' core_id='3' siblings='3,7'/>
|
||||
<cpu id='4' socket_id='0' core_id='0' siblings='0,4'/>
|
||||
<cpu id='5' socket_id='0' core_id='1' siblings='1,5'/>
|
||||
<cpu id='6' socket_id='0' core_id='2' siblings='2,6'/>
|
||||
<cpu id='7' socket_id='0' core_id='3' siblings='3,7'/>
|
||||
</cpus>
|
||||
</cell>
|
||||
</cells>
|
||||
</topology>
|
||||
<cache>
|
||||
<bank id='0' level='3' type='both' size='8' unit='MiB' cpus='0-7'/>
|
||||
</cache>
|
||||
<secmodel>
|
||||
<model>apparmor</model>
|
||||
<doi>0</doi>
|
||||
</secmodel>
|
||||
<secmodel>
|
||||
<model>dac</model>
|
||||
<doi>0</doi>
|
||||
<baselabel type='kvm'>+487:+486</baselabel>
|
||||
<baselabel type='qemu'>+487:+486</baselabel>
|
||||
</secmodel>
|
||||
</host>
|
||||
|
||||
<guest>
|
||||
<os_type>hvm</os_type>
|
||||
<arch name='i686'>
|
||||
<wordsize>32</wordsize>
|
||||
<emulator>/usr/bin/qemu-system-i386</emulator>
|
||||
<machine maxCpus='255'>pc-i440fx-2.6</machine>
|
||||
<machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
|
||||
<machine maxCpus='255'>pc-0.12</machine>
|
||||
<domain type='qemu'/>
|
||||
<domain type='kvm'>
|
||||
<emulator>/usr/bin/qemu-kvm</emulator>
|
||||
<machine maxCpus='255'>pc-i440fx-2.6</machine>
|
||||
<machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
|
||||
<machine maxCpus='255'>pc-0.12</machine>
|
||||
</domain>
|
||||
</arch>
|
||||
<features>
|
||||
<cpuselection/>
|
||||
<deviceboot/>
|
||||
<disksnapshot default='on' toggle='no'/>
|
||||
<acpi default='on' toggle='yes'/>
|
||||
<apic default='on' toggle='no'/>
|
||||
<pae/>
|
||||
<nonpae/>
|
||||
</features>
|
||||
</guest>
|
||||
|
||||
<guest>
|
||||
<os_type>hvm</os_type>
|
||||
<arch name='x86_64'>
|
||||
<wordsize>64</wordsize>
|
||||
<emulator>/usr/bin/qemu-system-x86_64</emulator>
|
||||
<machine maxCpus='255'>pc-i440fx-2.6</machine>
|
||||
<machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
|
||||
<machine maxCpus='255'>pc-0.12</machine>
|
||||
<domain type='qemu'/>
|
||||
<domain type='kvm'>
|
||||
<emulator>/usr/bin/qemu-kvm</emulator>
|
||||
<machine maxCpus='255'>pc-i440fx-2.6</machine>
|
||||
<machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
|
||||
<machine maxCpus='255'>pc-0.12</machine>
|
||||
</domain>
|
||||
</arch>
|
||||
<features>
|
||||
<cpuselection/>
|
||||
<deviceboot/>
|
||||
<disksnapshot default='on' toggle='no'/>
|
||||
<acpi default='on' toggle='yes'/>
|
||||
<apic default='on' toggle='no'/>
|
||||
</features>
|
||||
</guest>
|
||||
|
||||
</capabilities>"""
|
||||
|
||||
return _make_capabilities
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_mock_network():
|
||||
def _make_mock_net(xml_def):
|
||||
mocked_conn = virt.libvirt.openAuth.return_value
|
||||
|
||||
doc = ET.fromstring(xml_def)
|
||||
name = doc.find("name").text
|
||||
|
||||
if not isinstance(mocked_conn.networkLookupByName, MappedResultMock):
|
||||
mocked_conn.networkLookupByName = MappedResultMock()
|
||||
mocked_conn.networkLookupByName.add(name)
|
||||
net_mock = mocked_conn.networkLookupByName(name)
|
||||
net_mock.XMLDesc.return_value = xml_def
|
||||
|
||||
# libvirt defaults the autostart to unset
|
||||
net_mock.autostart.return_value = 0
|
||||
|
||||
# Append the network to listAllNetworks return value
|
||||
all_nets = mocked_conn.listAllNetworks.return_value
|
||||
if not isinstance(all_nets, list):
|
||||
all_nets = []
|
||||
all_nets.append(net_mock)
|
||||
mocked_conn.listAllNetworks.return_value = all_nets
|
||||
|
||||
return net_mock
|
||||
|
||||
return _make_mock_net
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_mock_device():
|
||||
"""
|
||||
Create a mock host device
|
||||
"""
|
||||
|
||||
def _make_mock_device(xml_def):
|
||||
mocked_conn = virt.libvirt.openAuth.return_value
|
||||
if not isinstance(mocked_conn.nodeDeviceLookupByName, MappedResultMock):
|
||||
mocked_conn.nodeDeviceLookupByName = MappedResultMock()
|
||||
|
||||
doc = ET.fromstring(xml_def)
|
||||
name = doc.find("./name").text
|
||||
|
||||
mocked_conn.nodeDeviceLookupByName.add(name)
|
||||
mocked_device = mocked_conn.nodeDeviceLookupByName(name)
|
||||
mocked_device.name.return_value = name
|
||||
mocked_device.XMLDesc.return_value = xml_def
|
||||
mocked_device.listCaps.return_value = [
|
||||
cap.get("type") for cap in doc.findall("./capability")
|
||||
]
|
||||
return mocked_device
|
||||
|
||||
return _make_mock_device
|
||||
|
||||
|
||||
@pytest.fixture(params=[True, False], ids=["test", "notest"])
|
||||
def test(request):
|
||||
"""
|
||||
Run the test with both True and False test values
|
||||
"""
|
||||
return request.param
|
|
@ -1,66 +0,0 @@
|
|||
import pytest
|
||||
|
||||
import salt.utils.json
|
||||
from salt.modules import http
|
||||
from salt.pillar import http_json
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
http_json: {
|
||||
"__salt__": {
|
||||
"http.query": http.query,
|
||||
},
|
||||
},
|
||||
http: {
|
||||
"__opts__": {},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.requires_network
|
||||
@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"])
|
||||
def test_ext_pillar_can_take_http_query_kwargs(backend, httpserver):
|
||||
response = {
|
||||
"dict": {
|
||||
"backend": backend,
|
||||
"pillar_type": "http_json",
|
||||
},
|
||||
}
|
||||
header_dict = {"custom-backend-header": backend}
|
||||
|
||||
# If the headers in header_dict are not in the request, httpserver will return an empty dictionary, so we know it will fail
|
||||
httpserver.expect_request(
|
||||
f"/http_json_pillar/{backend}",
|
||||
headers={"custom-backend-header": backend},
|
||||
).respond_with_data(salt.utils.json.dumps(response), content_type="text/plain")
|
||||
url = httpserver.url_for(f"/http_json_pillar/{backend}")
|
||||
|
||||
actual = http_json.ext_pillar("test-minion-id", {}, url, header_dict=header_dict)
|
||||
assert actual == response
|
||||
|
||||
|
||||
@pytest.mark.requires_network
|
||||
@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"])
|
||||
def test_ext_pillar_namespace(backend, httpserver):
|
||||
response = {
|
||||
"dict": {
|
||||
"backend": backend,
|
||||
"pillar_type": "http_json",
|
||||
},
|
||||
}
|
||||
header_dict = {"custom-backend-header": backend}
|
||||
namespace = "test_namespace"
|
||||
|
||||
# If the headers in header_dict are not in the request, httpserver will return an empty dictionary, so we know it will fail
|
||||
httpserver.expect_request(
|
||||
f"/http_json_pillar/{backend}",
|
||||
headers={"custom-backend-header": backend},
|
||||
).respond_with_data(salt.utils.json.dumps(response), content_type="text/plain")
|
||||
url = httpserver.url_for(f"/http_json_pillar/{backend}")
|
||||
|
||||
actual = http_json.ext_pillar(
|
||||
"test-minion-id", {}, url, header_dict=header_dict, namespace=namespace
|
||||
)
|
||||
assert actual == {namespace: response}
|
|
@ -1,41 +0,0 @@
|
|||
import pytest
|
||||
|
||||
import salt.utils.json
|
||||
from salt.modules import http
|
||||
from salt.pillar import http_yaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
http_yaml: {
|
||||
"__salt__": {
|
||||
"http.query": http.query,
|
||||
},
|
||||
},
|
||||
http: {
|
||||
"__opts__": {},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.requires_network
|
||||
@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"])
|
||||
def test_ext_pillar_can_take_http_query_kwargs(backend, httpserver):
|
||||
response = {
|
||||
"dict": {
|
||||
"backend": backend,
|
||||
"pillar_type": "http_yaml",
|
||||
},
|
||||
}
|
||||
header_dict = {"custom-backend-header": backend}
|
||||
|
||||
# If the headers in header_dict are not in the request, httpserver will return an empty dictionary, so we know it will fail
|
||||
httpserver.expect_request(
|
||||
f"/http_yaml_pillar/{backend}",
|
||||
headers={"custom-backend-header": backend},
|
||||
).respond_with_data(salt.utils.json.dumps(response), content_type="text/plain")
|
||||
url = httpserver.url_for(f"/http_yaml_pillar/{backend}")
|
||||
|
||||
actual = http_yaml.ext_pillar("test-minion-id", {}, url, header_dict=header_dict)
|
||||
assert actual == response
|
|
@ -1,118 +0,0 @@
|
|||
"""
|
||||
Unit tests for the splunk returner
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.config as config
|
||||
import salt.returners.splunk as splunk
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = {
|
||||
"splunk_http_forwarder": {
|
||||
"token": "TheToken",
|
||||
"indexer": "the.splunk.domain",
|
||||
"index": "TheIndex",
|
||||
"sourcetype": "TheSourceType",
|
||||
}
|
||||
}
|
||||
return {
|
||||
splunk: {"__opts__": opts, "__salt__": {"config.get": config.get}},
|
||||
config: {"__opts__": opts},
|
||||
}
|
||||
|
||||
|
||||
def test_verify_ssl_defaults_to_true():
|
||||
payload = {"some": "payload"}
|
||||
requests_post = MagicMock()
|
||||
ts = 1234565789
|
||||
host = "TheHostName"
|
||||
data = {
|
||||
"time": str(ts),
|
||||
"index": "TheIndex",
|
||||
"sourcetype": "TheSourceType",
|
||||
"event": payload,
|
||||
"host": host,
|
||||
}
|
||||
with patch("salt.returners.splunk.time.time", MagicMock(return_value=ts)), patch(
|
||||
"salt.returners.splunk.socket.gethostname", MagicMock(return_value=host)
|
||||
), patch("requests.post", requests_post):
|
||||
splunk.returner(payload.copy())
|
||||
assert json.loads(requests_post.call_args_list[0][1]["data"]) == data
|
||||
assert requests_post.call_args_list[0][1]["verify"]
|
||||
assert requests_post.call_args_list[0][1]["headers"] == {
|
||||
"Authorization": "Splunk TheToken"
|
||||
}
|
||||
assert (
|
||||
requests_post.call_args_list[0][0][0]
|
||||
== "https://the.splunk.domain:8088/services/collector/event"
|
||||
)
|
||||
|
||||
|
||||
def test_verify_ssl():
|
||||
payload = {"some": "payload"}
|
||||
verify_ssl_values = [True, False, None]
|
||||
payload = {"some": "payload"}
|
||||
ts = 1234565789
|
||||
host = "TheHostName"
|
||||
data = {
|
||||
"time": str(ts),
|
||||
"index": "TheIndex",
|
||||
"sourcetype": "TheSourceType",
|
||||
"event": payload,
|
||||
"host": host,
|
||||
}
|
||||
for verify_ssl in verify_ssl_values:
|
||||
requests_post = MagicMock()
|
||||
with patch(
|
||||
"salt.returners.splunk.time.time", MagicMock(return_value=ts)
|
||||
), patch(
|
||||
"salt.returners.splunk.socket.gethostname", MagicMock(return_value=host)
|
||||
), patch(
|
||||
"requests.post", requests_post
|
||||
), patch.dict(
|
||||
splunk.__opts__["splunk_http_forwarder"], verify_ssl=verify_ssl
|
||||
):
|
||||
splunk.returner(payload.copy())
|
||||
assert json.loads(requests_post.call_args_list[0][1]["data"]) == data
|
||||
assert requests_post.call_args_list[0][1]["verify"] == verify_ssl
|
||||
assert requests_post.call_args_list[0][1]["headers"] == {
|
||||
"Authorization": "Splunk TheToken"
|
||||
}
|
||||
assert (
|
||||
requests_post.call_args_list[0][0][0]
|
||||
== "https://the.splunk.domain:8088/services/collector/event"
|
||||
)
|
||||
|
||||
|
||||
def test_verify_event_returner():
|
||||
payload = [{"some": "payload"}, {"another": "event"}]
|
||||
ts = 1234565789
|
||||
host = "TheHostName"
|
||||
verify_ssl = True
|
||||
|
||||
requests_post = MagicMock()
|
||||
with patch("salt.returners.splunk.time.time", MagicMock(return_value=ts)), patch(
|
||||
"salt.returners.splunk.socket.gethostname", MagicMock(return_value=host)
|
||||
), patch("requests.post", requests_post), patch.dict(
|
||||
splunk.__opts__["splunk_http_forwarder"], verify_ssl=verify_ssl
|
||||
):
|
||||
splunk.event_return(payload)
|
||||
for i in range(len(payload)):
|
||||
assert (
|
||||
json.loads(requests_post.call_args_list[0][1]["data"])["event"]
|
||||
in payload
|
||||
)
|
||||
assert requests_post.call_args_list[0][1]["verify"] == verify_ssl
|
||||
assert requests_post.call_args_list[0][1]["headers"] == {
|
||||
"Authorization": "Splunk TheToken"
|
||||
}
|
||||
assert (
|
||||
requests_post.call_args_list[0][0][0]
|
||||
== "https://the.splunk.domain:8088/services/collector/event"
|
||||
)
|
|
@ -6,10 +6,7 @@ import yaml as _yaml
|
|||
|
||||
import salt.serializers.configparser as configparser
|
||||
import salt.serializers.json as json
|
||||
import salt.serializers.keyvalue as keyvalue
|
||||
import salt.serializers.msgpack as msgpack
|
||||
import salt.serializers.plist as plist
|
||||
import salt.serializers.python as python
|
||||
import salt.serializers.tomlmod as tomlmod
|
||||
import salt.serializers.yaml as yaml
|
||||
import salt.serializers.yamlex as yamlex
|
||||
|
@ -338,10 +335,10 @@ def test_msgpack():
|
|||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skipif(python.available is False, reason=SKIP_MESSAGE.format("python"))
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_python():
|
||||
data = {"foo": "bar"}
|
||||
serialized = python.serialize(data)
|
||||
serialized = python.serialize(data) # pylint: disable=undefined-variable
|
||||
expected = repr({"foo": "bar"})
|
||||
assert serialized == expected, serialized
|
||||
|
||||
|
@ -369,10 +366,10 @@ def test_serialize_toml():
|
|||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skipif(plist.available is False, reason=SKIP_MESSAGE.format("plist"))
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_plist():
|
||||
data = {"foo": "bar"}
|
||||
serialized = plist.serialize(data)
|
||||
serialized = plist.serialize(data) # pylint: disable=undefined-variable
|
||||
expected = (
|
||||
b'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
b'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"'
|
||||
|
@ -386,46 +383,62 @@ def test_serialize_plist():
|
|||
)
|
||||
assert serialized == expected, serialized
|
||||
|
||||
deserialized = plist.deserialize(serialized)
|
||||
deserialized = plist.deserialize(serialized) # pylint: disable=undefined-variable
|
||||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skipif(plist.available is False, reason=SKIP_MESSAGE.format("plist"))
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_binary_plist():
|
||||
data = {"foo": "bar"}
|
||||
serialized = plist.serialize(data, fmt="FMT_BINARY")
|
||||
serialized = plist.serialize( # pylint: disable=undefined-variable
|
||||
data, fmt="FMT_BINARY"
|
||||
)
|
||||
|
||||
deserialized = plist.deserialize(serialized)
|
||||
deserialized = plist.deserialize(serialized) # pylint: disable=undefined-variable
|
||||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_keyvalue():
|
||||
data = {"foo": "bar baz"}
|
||||
serialized = keyvalue.serialize(data)
|
||||
serialized = keyvalue.serialize(data) # pylint: disable=undefined-variable
|
||||
assert serialized == "foo=bar baz", serialized
|
||||
|
||||
deserialized = keyvalue.deserialize(serialized)
|
||||
deserialized = keyvalue.deserialize( # pylint: disable=undefined-variable
|
||||
serialized
|
||||
)
|
||||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_keyvalue_quoting():
|
||||
data = {"foo": "bar baz"}
|
||||
serialized = keyvalue.serialize(data, quoting=True)
|
||||
serialized = keyvalue.serialize( # pylint: disable=undefined-variable
|
||||
data, quoting=True
|
||||
)
|
||||
assert serialized == "foo='bar baz'", serialized
|
||||
|
||||
deserialized = keyvalue.deserialize(serialized, quoting=False)
|
||||
deserialized = keyvalue.deserialize( # pylint: disable=undefined-variable
|
||||
serialized, quoting=False
|
||||
)
|
||||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_keyvalue_separator():
|
||||
data = {"foo": "bar baz"}
|
||||
serialized = keyvalue.serialize(data, separator=" = ")
|
||||
serialized = keyvalue.serialize( # pylint: disable=undefined-variable
|
||||
data, separator=" = "
|
||||
)
|
||||
assert serialized == "foo = bar baz", serialized
|
||||
|
||||
deserialized = keyvalue.deserialize(serialized, separator=" = ")
|
||||
deserialized = keyvalue.deserialize( # pylint: disable=undefined-variable
|
||||
serialized, separator=" = "
|
||||
)
|
||||
assert deserialized == data, deserialized
|
||||
|
||||
|
||||
@pytest.mark.skip("Great module migration")
|
||||
def test_serialize_keyvalue_list_of_lists():
|
||||
if salt.utils.platform.is_windows():
|
||||
linend = "\r\n"
|
||||
|
@ -433,8 +446,10 @@ def test_serialize_keyvalue_list_of_lists():
|
|||
linend = "\n"
|
||||
data = [["foo", "bar baz"], ["salt", "rocks"]]
|
||||
expected = {"foo": "bar baz", "salt": "rocks"}
|
||||
serialized = keyvalue.serialize(data)
|
||||
serialized = keyvalue.serialize(data) # pylint: disable=undefined-variable
|
||||
assert serialized == f"foo=bar baz{linend}salt=rocks", serialized
|
||||
|
||||
deserialized = keyvalue.deserialize(serialized)
|
||||
deserialized = keyvalue.deserialize( # pylint: disable=undefined-variable
|
||||
serialized
|
||||
)
|
||||
assert deserialized == expected, deserialized
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
@ -24,9 +22,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -28,9 +26,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -6,8 +6,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -29,9 +27,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -29,9 +27,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -1,15 +1,11 @@
|
|||
import logging
|
||||
import os
|
||||
import plistlib
|
||||
import pprint
|
||||
|
||||
import msgpack
|
||||
import pytest
|
||||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -32,9 +28,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
@ -76,18 +70,6 @@ def test_serialize():
|
|||
filestate.serialize("/tmp", dataset, formatter="json")
|
||||
assert salt.utils.json.loads(returner.returned) == dataset
|
||||
|
||||
# plist
|
||||
filestate.serialize("/tmp", dataset, serializer="plist")
|
||||
assert plistlib.loads(returner.returned) == dataset
|
||||
filestate.serialize("/tmp", dataset, formatter="plist")
|
||||
assert plistlib.loads(returner.returned) == dataset
|
||||
|
||||
# Python
|
||||
filestate.serialize("/tmp", dataset, serializer="python")
|
||||
assert returner.returned == pprint.pformat(dataset) + "\n"
|
||||
filestate.serialize("/tmp", dataset, formatter="python")
|
||||
assert returner.returned == pprint.pformat(dataset) + "\n"
|
||||
|
||||
# msgpack
|
||||
filestate.serialize("/tmp", dataset, serializer="msgpack")
|
||||
assert returner.returned == msgpack.packb(dataset)
|
||||
|
@ -495,11 +477,11 @@ def test_serialize_into_managed_file():
|
|||
assert filestate.serialize(name) == ret
|
||||
|
||||
with patch.object(os.path, "isfile", mock_t):
|
||||
comt = "merge_if_exists is not supported for the python serializer"
|
||||
comt = "merge_if_exists is not supported for the json serializer"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert (
|
||||
filestate.serialize(
|
||||
name, dataset=True, merge_if_exists=True, formatter="python"
|
||||
name, dataset=True, merge_if_exists=True, formatter="json"
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
@ -518,7 +500,7 @@ def test_serialize_into_managed_file():
|
|||
with patch.dict(filestate.__opts__, {"test": True}):
|
||||
comt = f"Dataset will be serialized and stored into {name}"
|
||||
ret.update({"comment": comt, "result": None, "changes": True})
|
||||
assert filestate.serialize(name, dataset=True, formatter="python") == ret
|
||||
assert filestate.serialize(name, dataset=True, formatter="json") == ret
|
||||
|
||||
# __opts__['test']=True without changes
|
||||
with patch.dict(
|
||||
|
@ -527,14 +509,14 @@ def test_serialize_into_managed_file():
|
|||
with patch.dict(filestate.__opts__, {"test": True}):
|
||||
comt = f"The file {name} is in the correct state"
|
||||
ret.update({"comment": comt, "result": True, "changes": False})
|
||||
assert filestate.serialize(name, dataset=True, formatter="python") == ret
|
||||
assert filestate.serialize(name, dataset=True, formatter="json") == ret
|
||||
|
||||
mock = MagicMock(return_value=ret)
|
||||
with patch.dict(filestate.__opts__, {"test": False}):
|
||||
with patch.dict(filestate.__salt__, {"file.manage_file": mock}):
|
||||
comt = f"Dataset will be serialized and stored into {name}"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert filestate.serialize(name, dataset=True, formatter="python") == ret
|
||||
assert filestate.serialize(name, dataset=True, formatter="json") == ret
|
||||
|
||||
# merge_if_exists deserialization error
|
||||
mock_exception = MagicMock(side_effect=TypeError("test"))
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -29,9 +27,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
from tests.support.helpers import dedent
|
||||
|
@ -23,9 +21,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -29,9 +27,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -28,9 +26,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -6,8 +6,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.platform
|
||||
|
@ -25,9 +23,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -6,8 +6,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
from tests.support.mock import MagicMock, call, patch
|
||||
|
@ -34,9 +32,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -5,8 +5,6 @@ import pytest
|
|||
|
||||
import salt.serializers.json as jsonserializer
|
||||
import salt.serializers.msgpack as msgpackserializer
|
||||
import salt.serializers.plist as plistserializer
|
||||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.files
|
||||
|
@ -28,9 +26,7 @@ def configure_loader_modules():
|
|||
"__serializers__": {
|
||||
"yaml.serialize": yamlserializer.serialize,
|
||||
"yaml.seserialize": yamlserializer.serialize,
|
||||
"python.serialize": pythonserializer.serialize,
|
||||
"json.serialize": jsonserializer.serialize,
|
||||
"plist.serialize": plistserializer.serialize,
|
||||
"msgpack.serialize": msgpackserializer.serialize,
|
||||
},
|
||||
"__opts__": {"test": False, "cachedir": ""},
|
||||
|
|
|
@ -1,162 +0,0 @@
|
|||
"""
|
||||
This test checks mysql_database salt state
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.mysql_database as mysql_database
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {mysql_database: {}}
|
||||
|
||||
|
||||
def test_present():
|
||||
"""
|
||||
Test to ensure that the named database is present with
|
||||
the specified properties.
|
||||
"""
|
||||
dbname = "my_test"
|
||||
charset = "utf8"
|
||||
collate = "utf8_unicode_ci"
|
||||
|
||||
ret = {"name": dbname, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
mock_result = {
|
||||
"character_set": charset,
|
||||
"collate": collate,
|
||||
"name": dbname,
|
||||
}
|
||||
|
||||
mock_result_alter_db = {True}
|
||||
|
||||
mock = MagicMock(return_value=mock_result)
|
||||
mock_a = MagicMock(return_value=mock_result_alter_db)
|
||||
mock_failed = MagicMock(return_value=False)
|
||||
mock_err = MagicMock(return_value="salt")
|
||||
mock_no_err = MagicMock(return_value=None)
|
||||
mock_create = MagicMock(return_value=True)
|
||||
mock_create_failed = MagicMock(return_value=False)
|
||||
with patch.dict(
|
||||
mysql_database.__salt__, {"mysql.db_get": mock, "mysql.alter_db": mock_a}
|
||||
):
|
||||
mod_charset = "ascii"
|
||||
mod_collate = "ascii_general_ci"
|
||||
with patch.dict(mysql_database.__opts__, {"test": True}):
|
||||
comt = [
|
||||
"Database character set {} != {} needs to be updated".format(
|
||||
mod_charset, charset
|
||||
),
|
||||
f"Database {dbname} is going to be updated",
|
||||
]
|
||||
ret.update({"comment": "\n".join(comt)})
|
||||
ret.update({"result": None})
|
||||
assert mysql_database.present(dbname, character_set=mod_charset) == ret
|
||||
|
||||
with patch.dict(mysql_database.__opts__, {"test": True}):
|
||||
comt = [
|
||||
f"Database {dbname} is already present",
|
||||
"Database collate {} != {} needs to be updated".format(
|
||||
mod_collate, collate
|
||||
),
|
||||
]
|
||||
ret.update({"comment": "\n".join(comt)})
|
||||
ret.update({"result": None})
|
||||
assert (
|
||||
mysql_database.present(
|
||||
dbname, character_set=charset, collate=mod_collate
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
with patch.dict(mysql_database.__opts__, {}):
|
||||
comt = [
|
||||
"Database character set {} != {} needs to be updated".format(
|
||||
mod_charset, charset
|
||||
),
|
||||
"Database collate {} != {} needs to be updated".format(
|
||||
mod_collate, collate
|
||||
),
|
||||
]
|
||||
ret.update({"comment": "\n".join(comt)})
|
||||
ret.update({"result": True})
|
||||
assert (
|
||||
mysql_database.present(
|
||||
dbname, character_set=mod_charset, collate=mod_collate
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
with patch.dict(mysql_database.__opts__, {"test": False}):
|
||||
comt = f"Database {dbname} is already present"
|
||||
ret.update({"comment": comt})
|
||||
ret.update({"result": True})
|
||||
assert (
|
||||
mysql_database.present(dbname, character_set=charset, collate=collate)
|
||||
== ret
|
||||
)
|
||||
|
||||
with patch.dict(mysql_database.__salt__, {"mysql.db_get": mock_failed}):
|
||||
with patch.dict(mysql_database.__salt__, {"mysql.db_create": mock_create}):
|
||||
with patch.object(mysql_database, "_get_mysql_error", mock_err):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_database.present(dbname) == ret
|
||||
|
||||
with patch.object(mysql_database, "_get_mysql_error", mock_no_err):
|
||||
comt = f"The database {dbname} has been created"
|
||||
|
||||
ret.update({"comment": comt, "result": True})
|
||||
ret.update({"changes": {dbname: "Present"}})
|
||||
assert mysql_database.present(dbname) == ret
|
||||
|
||||
with patch.dict(
|
||||
mysql_database.__salt__, {"mysql.db_create": mock_create_failed}
|
||||
):
|
||||
ret["comment"] = ""
|
||||
with patch.object(mysql_database, "_get_mysql_error", mock_no_err):
|
||||
ret.update({"changes": {}})
|
||||
comt = f"Failed to create database {dbname}"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert mysql_database.present(dbname) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that the named database is absent.
|
||||
"""
|
||||
dbname = "my_test"
|
||||
|
||||
ret = {"name": dbname, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock_db_exists = MagicMock(return_value=True)
|
||||
mock_remove = MagicMock(return_value=True)
|
||||
mock_remove_fail = MagicMock(return_value=False)
|
||||
mock_err = MagicMock(return_value="salt")
|
||||
|
||||
with patch.dict(
|
||||
mysql_database.__salt__,
|
||||
{"mysql.db_exists": mock_db_exists, "mysql.db_remove": mock_remove},
|
||||
):
|
||||
with patch.dict(mysql_database.__opts__, {"test": True}):
|
||||
comt = f"Database {dbname} is present and needs to be removed"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_database.absent(dbname) == ret
|
||||
|
||||
with patch.dict(mysql_database.__opts__, {}):
|
||||
comt = f"Database {dbname} has been removed"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
ret.update({"changes": {dbname: "Absent"}})
|
||||
assert mysql_database.absent(dbname) == ret
|
||||
|
||||
with patch.dict(
|
||||
mysql_database.__salt__,
|
||||
{"mysql.db_exists": mock_db_exists, "mysql.db_remove": mock_remove_fail},
|
||||
):
|
||||
with patch.dict(mysql_database.__opts__, {}):
|
||||
with patch.object(mysql_database, "_get_mysql_error", mock_err):
|
||||
ret["changes"] = {}
|
||||
comt = "Unable to remove database {} ({})".format(dbname, "salt")
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert mysql_database.absent(dbname) == ret
|
|
@ -1,98 +0,0 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.mysql_grants as mysql_grants
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {mysql_grants: {}}
|
||||
|
||||
|
||||
def test_present():
|
||||
"""
|
||||
Test to ensure that the grant is present with the specified properties.
|
||||
"""
|
||||
name = "frank_exampledb"
|
||||
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(side_effect=[True, False, False, False])
|
||||
mock_t = MagicMock(return_value=True)
|
||||
mock_str = MagicMock(return_value="salt")
|
||||
mock_none = MagicMock(return_value=None)
|
||||
with patch.dict(
|
||||
mysql_grants.__salt__,
|
||||
{"mysql.grant_exists": mock, "mysql.grant_add": mock_t},
|
||||
):
|
||||
comt = "Grant None on None to None@localhost is already present"
|
||||
ret.update({"comment": comt})
|
||||
assert mysql_grants.present(name) == ret
|
||||
|
||||
with patch.object(mysql_grants, "_get_mysql_error", mock_str):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_grants.present(name) == ret
|
||||
|
||||
with patch.object(mysql_grants, "_get_mysql_error", mock_none):
|
||||
with patch.dict(mysql_grants.__opts__, {"test": True}):
|
||||
comt = "MySQL grant frank_exampledb is set to be created"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_grants.present(name) == ret
|
||||
|
||||
with patch.dict(mysql_grants.__opts__, {"test": False}):
|
||||
comt = "Grant None on None to None@localhost has been added"
|
||||
ret.update(
|
||||
{"comment": comt, "result": True, "changes": {name: "Present"}}
|
||||
)
|
||||
assert mysql_grants.present(name) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that the grant is absent.
|
||||
"""
|
||||
name = "frank_exampledb"
|
||||
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(side_effect=[True, False])
|
||||
mock_t = MagicMock(side_effect=[True, True, True, False, False])
|
||||
mock_str = MagicMock(return_value="salt")
|
||||
mock_none = MagicMock(return_value=None)
|
||||
with patch.dict(
|
||||
mysql_grants.__salt__,
|
||||
{"mysql.grant_exists": mock_t, "mysql.grant_revoke": mock},
|
||||
):
|
||||
with patch.dict(mysql_grants.__opts__, {"test": True}):
|
||||
comt = "MySQL grant frank_exampledb is set to be revoked"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_grants.absent(name) == ret
|
||||
|
||||
with patch.dict(mysql_grants.__opts__, {"test": False}):
|
||||
comt = "Grant None on None for None@localhost has been revoked"
|
||||
ret.update({"comment": comt, "result": True, "changes": {name: "Absent"}})
|
||||
assert mysql_grants.absent(name) == ret
|
||||
|
||||
with patch.object(mysql_grants, "_get_mysql_error", mock_str):
|
||||
comt = "Unable to revoke grant None on None for None@localhost (salt)"
|
||||
ret.update({"comment": comt, "result": False, "changes": {}})
|
||||
assert mysql_grants.absent(name) == ret
|
||||
|
||||
comt = (
|
||||
"Unable to determine if grant None on "
|
||||
"None for None@localhost exists (salt)"
|
||||
)
|
||||
ret.update({"comment": comt})
|
||||
assert mysql_grants.absent(name) == ret
|
||||
|
||||
with patch.object(mysql_grants, "_get_mysql_error", mock_none):
|
||||
comt = (
|
||||
"Grant None on None to None@localhost is not present,"
|
||||
" so it cannot be revoked"
|
||||
)
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert mysql_grants.absent(name) == ret
|
|
@ -1,178 +0,0 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.mysql as mysql_mod
|
||||
import salt.states.mysql_query as mysql_query
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
MySQLdb = pytest.importorskip("MySQLdb")
|
||||
pymysql = pytest.importorskip("pymysql")
|
||||
|
||||
pymysql.install_as_MySQLdb()
|
||||
|
||||
|
||||
class MockMySQLConnect:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def autocommit(self, *args, **kwards):
|
||||
return True
|
||||
|
||||
def cursor(self, *args, **kwards):
|
||||
return MagicMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {mysql_query: {}, mysql_mod: {}}
|
||||
|
||||
|
||||
def test_run():
|
||||
"""
|
||||
Test to execute an arbitrary query on the specified database.
|
||||
"""
|
||||
name = "query_id"
|
||||
database = "my_database"
|
||||
query = "SELECT * FROM table;"
|
||||
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock_t = MagicMock(return_value=True)
|
||||
mock_f = MagicMock(return_value=False)
|
||||
mock_str = MagicMock(return_value="salt")
|
||||
mock_none = MagicMock(return_value=None)
|
||||
mock_dict = MagicMock(return_value={"salt": "SALT"})
|
||||
mock_lst = MagicMock(return_value=["grain"])
|
||||
with patch.dict(mysql_query.__salt__, {"mysql.db_exists": mock_f}):
|
||||
with patch.object(mysql_query, "_get_mysql_error", mock_str):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_query.run(name, database, query) == ret
|
||||
|
||||
with patch.object(mysql_query, "_get_mysql_error", mock_none):
|
||||
comt = f"Database {name} is not present"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_query.run(name, database, query) == ret
|
||||
|
||||
with patch.dict(
|
||||
mysql_query.__salt__,
|
||||
{
|
||||
"mysql.db_exists": mock_t,
|
||||
"grains.ls": mock_lst,
|
||||
"grains.get": mock_dict,
|
||||
"mysql.query": mock_str,
|
||||
},
|
||||
):
|
||||
comt = "No execution needed. Grain grain already set"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert (
|
||||
mysql_query.run(
|
||||
name,
|
||||
database,
|
||||
query,
|
||||
output="grain",
|
||||
grain="grain",
|
||||
overwrite=False,
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
with patch.dict(mysql_query.__opts__, {"test": True}):
|
||||
comt = "Query would execute, storing result in grain: grain"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert (
|
||||
mysql_query.run(name, database, query, output="grain", grain="grain")
|
||||
== ret
|
||||
)
|
||||
|
||||
comt = "Query would execute, storing result in grain: grain:salt"
|
||||
ret.update({"comment": comt})
|
||||
assert (
|
||||
mysql_query.run(
|
||||
name, database, query, output="grain", grain="grain", key="salt"
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
comt = "Query would execute, storing result in file: salt"
|
||||
ret.update({"comment": comt})
|
||||
assert (
|
||||
mysql_query.run(name, database, query, output="salt", grain="grain")
|
||||
== ret
|
||||
)
|
||||
|
||||
comt = "Query would execute, not storing result"
|
||||
ret.update({"comment": comt})
|
||||
assert mysql_query.run(name, database, query) == ret
|
||||
|
||||
comt = "No execution needed. Grain grain:salt already set"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert (
|
||||
mysql_query.run(
|
||||
name,
|
||||
database,
|
||||
query,
|
||||
output="grain",
|
||||
grain="grain",
|
||||
key="salt",
|
||||
overwrite=False,
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
comt = "Error: output type 'grain' needs the grain parameter\n"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert mysql_query.run(name, database, query, output="grain") == ret
|
||||
|
||||
with patch.object(os.path, "isfile", mock_t):
|
||||
comt = "No execution needed. File salt already set"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert (
|
||||
mysql_query.run(
|
||||
name,
|
||||
database,
|
||||
query,
|
||||
output="salt",
|
||||
grain="grain",
|
||||
overwrite=False,
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
with patch.dict(mysql_query.__opts__, {"test": False}):
|
||||
ret.update({"comment": "salt", "changes": {"query": "Executed"}})
|
||||
assert mysql_query.run(name, database, query) == ret
|
||||
|
||||
|
||||
def test_run_multiple_statements():
|
||||
"""
|
||||
Test to execute an arbitrary query on the specified database
|
||||
and ensure that the correct multi_statements flag is passed along
|
||||
to MySQLdb.connect.
|
||||
"""
|
||||
name = "query_id"
|
||||
database = "my_database"
|
||||
query = "SELECT * FROM table; SELECT * from another_table;"
|
||||
|
||||
mock_t = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(mysql_query.__salt__, {"mysql.db_exists": mock_t}), patch.dict(
|
||||
mysql_query.__opts__, {"test": False}
|
||||
), patch.dict(mysql_query.__salt__, {"mysql.query": mysql_mod.query}), patch.dict(
|
||||
mysql_query.__salt__, {"mysql._execute": MagicMock()}
|
||||
), patch.dict(
|
||||
mysql_mod.__salt__, {"config.option": MagicMock()}
|
||||
), patch(
|
||||
"MySQLdb.connect", return_value=MockMySQLConnect()
|
||||
) as mock_connect:
|
||||
ret = mysql_query.run(name, database, query, client_flags=["multi_statements"])
|
||||
assert 1 == len(mock_connect.mock_calls)
|
||||
assert "client_flag=65536" in str(mock_connect.mock_calls[0])
|
|
@ -1,149 +0,0 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.mysql_user as mysql_user
|
||||
import salt.utils.data
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {mysql_user: {}}
|
||||
|
||||
|
||||
def test_present():
|
||||
"""
|
||||
Test to ensure that the named user is present with
|
||||
the specified properties.
|
||||
"""
|
||||
name = "frank"
|
||||
password = "bob@cat"
|
||||
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
True,
|
||||
False,
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
False,
|
||||
False,
|
||||
False,
|
||||
True,
|
||||
]
|
||||
)
|
||||
mock_t = MagicMock(return_value=True)
|
||||
mock_f = MagicMock(return_value=False)
|
||||
mock_str = MagicMock(return_value="salt")
|
||||
mock_none = MagicMock(return_value=None)
|
||||
mock_sn = MagicMock(side_effect=[None, "salt", None, None, None])
|
||||
with patch.object(salt.utils.data, "is_true", mock_f):
|
||||
comt = (
|
||||
"Either password or password_hash must be specified,"
|
||||
" unless allow_passwordless is True"
|
||||
)
|
||||
ret.update({"comment": comt})
|
||||
assert mysql_user.present(name) == ret
|
||||
|
||||
with patch.dict(
|
||||
mysql_user.__salt__,
|
||||
{"mysql.user_exists": mock, "mysql.user_chpass": mock_t},
|
||||
):
|
||||
with patch.object(salt.utils.data, "is_true", mock_t):
|
||||
comt = "User frank@localhost is already present with passwordless login"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert mysql_user.present(name, allow_passwordless=True) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_str):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_user.present(name) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_str):
|
||||
comt = "User frank@localhost is already present with the desired password"
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_str):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_none):
|
||||
with patch.dict(mysql_user.__opts__, {"test": True}):
|
||||
comt = "Password for user frank@localhost is set to be changed"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_sn):
|
||||
with patch.dict(mysql_user.__opts__, {"test": False}):
|
||||
ret.update({"comment": "salt", "result": False})
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
with patch.dict(mysql_user.__opts__, {"test": True}):
|
||||
comt = "User frank@localhost is set to be added"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
with patch.dict(mysql_user.__opts__, {"test": False}):
|
||||
comt = "Password for user frank@localhost has been changed"
|
||||
ret.update(
|
||||
{"comment": comt, "result": True, "changes": {name: "Updated"}}
|
||||
)
|
||||
assert mysql_user.present(name, password=password) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that the named user is absent.
|
||||
"""
|
||||
name = "frank_exampledb"
|
||||
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(side_effect=[True, True, True, False, False, False])
|
||||
mock_t = MagicMock(side_effect=[True, False])
|
||||
mock_str = MagicMock(return_value="salt")
|
||||
mock_none = MagicMock(return_value=None)
|
||||
with patch.dict(
|
||||
mysql_user.__salt__,
|
||||
{"mysql.user_exists": mock, "mysql.user_remove": mock_t},
|
||||
):
|
||||
with patch.dict(mysql_user.__opts__, {"test": True}):
|
||||
comt = "User frank_exampledb@localhost is set to be removed"
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert mysql_user.absent(name) == ret
|
||||
|
||||
with patch.dict(mysql_user.__opts__, {"test": False}):
|
||||
comt = "User frank_exampledb@localhost has been removed"
|
||||
ret.update(
|
||||
{
|
||||
"comment": comt,
|
||||
"result": True,
|
||||
"changes": {"frank_exampledb": "Absent"},
|
||||
}
|
||||
)
|
||||
assert mysql_user.absent(name) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_str):
|
||||
comt = "User frank_exampledb@localhost has been removed"
|
||||
ret.update({"comment": "salt", "result": False, "changes": {}})
|
||||
assert mysql_user.absent(name) == ret
|
||||
|
||||
comt = "User frank_exampledb@localhost has been removed"
|
||||
ret.update({"comment": "salt"})
|
||||
assert mysql_user.absent(name) == ret
|
||||
|
||||
with patch.object(mysql_user, "_get_mysql_error", mock_none):
|
||||
comt = (
|
||||
"User frank_exampledb@localhost is not present,"
|
||||
" so it cannot be removed"
|
||||
)
|
||||
ret.update({"comment": comt, "result": True, "changes": {}})
|
||||
assert mysql_user.absent(name) == ret
|
|
@ -1,301 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email:`Jakub Sliva <jakub.sliva@ultimum.io>`
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.zabbix_action as zabbix_action
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def input_params():
|
||||
return {
|
||||
"status": "0",
|
||||
"filter": {
|
||||
"evaltype": "2",
|
||||
"conditions": [
|
||||
{"operator": "2", "conditiontype": "24", "value": "database"}
|
||||
],
|
||||
},
|
||||
"eventsource": "2",
|
||||
"name": "Auto registration Databases",
|
||||
"operations": [{"opgroup": [{"groupid": "6"}], "operationtype": "4"}],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj():
|
||||
return [
|
||||
{
|
||||
"status": "0",
|
||||
"operations": [
|
||||
{
|
||||
"operationtype": "4",
|
||||
"esc_period": "0",
|
||||
"evaltype": "0",
|
||||
"opconditions": [],
|
||||
"esc_step_to": "1",
|
||||
"actionid": "28",
|
||||
"esc_step_from": "1",
|
||||
"opgroup": [{"groupid": "6", "operationid": "92"}],
|
||||
"operationid": "92",
|
||||
}
|
||||
],
|
||||
"def_shortdata": "",
|
||||
"name": "Auto registration Databases",
|
||||
"esc_period": "0",
|
||||
"def_longdata": "",
|
||||
"filter": {
|
||||
"formula": "",
|
||||
"evaltype": "2",
|
||||
"conditions": [
|
||||
{
|
||||
"operator": "2",
|
||||
"conditiontype": "24",
|
||||
"formulaid": "A",
|
||||
"value": "database",
|
||||
}
|
||||
],
|
||||
"eval_formula": "A",
|
||||
},
|
||||
"eventsource": "2",
|
||||
"actionid": "28",
|
||||
"r_shortdata": "",
|
||||
"r_longdata": "",
|
||||
"recovery_msg": "0",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj_diff():
|
||||
return {
|
||||
"status": "0",
|
||||
"operations": [
|
||||
{
|
||||
"operationtype": "4",
|
||||
"esc_period": "0",
|
||||
"evaltype": "0",
|
||||
"opconditions": [],
|
||||
"esc_step_to": "1",
|
||||
"actionid": "28",
|
||||
"esc_step_from": "1",
|
||||
"opgroup": [{"groupid": "6", "operationid": "92"}],
|
||||
"operationid": "92",
|
||||
}
|
||||
],
|
||||
"def_shortdata": "",
|
||||
"name": "Auto registration Databases",
|
||||
"esc_period": "0",
|
||||
"def_longdata": "",
|
||||
"filter": {
|
||||
"formula": "",
|
||||
"evaltype": "2",
|
||||
"conditions": [
|
||||
{
|
||||
"operator": "2",
|
||||
"conditiontype": "24",
|
||||
"formulaid": "A",
|
||||
"value": "SOME OTHER VALUE",
|
||||
}
|
||||
],
|
||||
"eval_formula": "A",
|
||||
},
|
||||
"eventsource": "2",
|
||||
"actionid": "28",
|
||||
"r_shortdata": "",
|
||||
"r_longdata": "",
|
||||
"recovery_msg": "0",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def diff_params():
|
||||
return {
|
||||
"filter": {
|
||||
"evaltype": "2",
|
||||
"conditions": [
|
||||
{"operator": "2", "conditiontype": "24", "value": "virtual"}
|
||||
],
|
||||
},
|
||||
"actionid": "28",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {zabbix_action: {}}
|
||||
|
||||
|
||||
def test_present_create(input_params):
|
||||
"""
|
||||
Test to ensure that named action is created
|
||||
"""
|
||||
name = "Auto registration Databases"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "action.get":
|
||||
return False
|
||||
elif args[0] == "action.create":
|
||||
return True
|
||||
|
||||
with patch.dict(zabbix_action.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"action": "actionid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, False]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value={}),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Action "{name}" created.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Action "{name}" did not exist.',
|
||||
"new": 'Zabbix Action "{}" created according definition.'.format(
|
||||
name
|
||||
),
|
||||
}
|
||||
}
|
||||
assert zabbix_action.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_exists(input_params, existing_obj):
|
||||
"""
|
||||
Test to ensure that named action is present and not changed
|
||||
"""
|
||||
name = "Auto registration Databases"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
with patch.dict(zabbix_action.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"action": "actionid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, existing_obj]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(return_value=["length of result is 1"]),
|
||||
"zabbix.compare_params": MagicMock(return_value={}),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = (
|
||||
'Zabbix Action "{}" already exists and corresponds to a definition.'.format(
|
||||
name
|
||||
)
|
||||
)
|
||||
assert zabbix_action.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_update(input_params, existing_obj_diff, diff_params):
|
||||
"""
|
||||
Test to ensure that named action is present but must be updated
|
||||
"""
|
||||
name = "Auto registration Databases"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "action.get":
|
||||
return ["length of result is 1 = action exists"]
|
||||
elif args[0] == "action.update":
|
||||
return diff_params
|
||||
|
||||
with patch.dict(zabbix_action.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"action": "actionid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, existing_obj_diff]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value=diff_params),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Action "{name}" updated.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": (
|
||||
'Zabbix Action "{}" differed '
|
||||
"in following parameters: {}".format(name, diff_params)
|
||||
),
|
||||
"new": f'Zabbix Action "{name}" fixed.',
|
||||
}
|
||||
}
|
||||
assert zabbix_action.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_absent_test_mode():
|
||||
"""
|
||||
Test to ensure that named action is absent in test mode
|
||||
"""
|
||||
name = "Auto registration Databases"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_action.__opts__, {"test": True}):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Action "{name}" would be deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Action "{name}" exists.',
|
||||
"new": f'Zabbix Action "{name}" would be deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_action.absent(name) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that named action is absent
|
||||
"""
|
||||
name = "Auto registration Databases"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_action.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=False)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Action "{name}" does not exist.'
|
||||
assert zabbix_action.absent(name) == ret
|
||||
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
with patch.dict(
|
||||
zabbix_action.__salt__,
|
||||
{"zabbix.run_query": MagicMock(return_value=True)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Action "{name}" deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Action "{name}" existed.',
|
||||
"new": f'Zabbix Action "{name}" deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_action.absent(name) == ret
|
File diff suppressed because it is too large
Load diff
|
@ -1,331 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email:`Jakub Sliva <jakub.sliva@ultimum.io>`
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.zabbix_template as zabbix_template
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def defined_obj():
|
||||
return {
|
||||
"macros": [{"macro": "{$CEPH_CLUSTER_NAME}", "value": "ceph"}],
|
||||
"host": "A Testing Template",
|
||||
"hosts": [{"hostid": "10112"}, {"hostid": "10113"}],
|
||||
"description": "Template for Ceph nodes",
|
||||
"groups": [{"groupid": "1"}],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def defined_c_list_subs():
|
||||
return {
|
||||
"applications": [{"name": "Ceph OSD"}],
|
||||
"graphs": [],
|
||||
"triggers": [],
|
||||
"items": [],
|
||||
"httpTests": [],
|
||||
"screens": [],
|
||||
"gitems": [],
|
||||
"discoveries": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def substitute_params_create(defined_obj, defined_c_list_subs):
|
||||
return [
|
||||
defined_obj,
|
||||
[],
|
||||
defined_c_list_subs["applications"],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj():
|
||||
return [
|
||||
{
|
||||
"available": "0",
|
||||
"tls_connect": "1",
|
||||
"maintenance_type": "0",
|
||||
"groups": [{"groupid": "1"}],
|
||||
"macros": [
|
||||
{
|
||||
"macro": "{$CEPH_CLUSTER_NAME}",
|
||||
"hostmacroid": "60",
|
||||
"hostid": "10206",
|
||||
"value": "ceph",
|
||||
}
|
||||
],
|
||||
"hosts": [{"hostid": "10112"}, {"hostid": "10113"}],
|
||||
"status": "3",
|
||||
"description": "Template for Ceph nodes",
|
||||
"host": "A Testing Template",
|
||||
"disable_until": "0",
|
||||
"templateid": "10206",
|
||||
"name": "A Testing Template",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def substitute_params_exists(defined_obj, existing_obj):
|
||||
return [
|
||||
defined_obj,
|
||||
existing_obj[0],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj_diff():
|
||||
return [
|
||||
{
|
||||
"groups": [{"groupid": "1"}],
|
||||
"macros": [
|
||||
{
|
||||
"macro": "{$CEPH_CLUSTER_NAME}",
|
||||
"hostmacroid": "60",
|
||||
"hostid": "10206",
|
||||
"value": "ceph",
|
||||
}
|
||||
],
|
||||
"hosts": [{"hostid": "10112"}, {"hostid": "10113"}],
|
||||
"status": "3",
|
||||
"templateid": "10206",
|
||||
"name": "A Testing Template",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def substitute_params_update(defined_obj, existing_obj_diff):
|
||||
return [
|
||||
defined_obj,
|
||||
existing_obj_diff[0],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def diff_params():
|
||||
return {"old": {}, "new": {"macros": [], "templateid": "10206"}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {zabbix_template: {}}
|
||||
|
||||
|
||||
def test_present_create(substitute_params_create):
|
||||
"""
|
||||
Test to ensure that named template is created
|
||||
"""
|
||||
with patch("salt.states.zabbix_template.CHANGE_STACK", []):
|
||||
name = "A Testing Template"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] in ("template.get", "application.get"):
|
||||
return []
|
||||
elif args[0] == "template.create":
|
||||
return {"templateids": ["10206"]}
|
||||
elif args[0] == "application.create":
|
||||
return {"applicationids": ["701"]}
|
||||
|
||||
with patch.dict(zabbix_template.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"template": "templateid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=substitute_params_create
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value={}),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Template "{name}" created.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Template "{name}" did not exist.',
|
||||
"new": (
|
||||
'Zabbix Template "{}" created according definition.'.format(
|
||||
name
|
||||
)
|
||||
),
|
||||
}
|
||||
}
|
||||
assert zabbix_template.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_exists(existing_obj, substitute_params_exists):
|
||||
"""
|
||||
Test to ensure that named template is present and not changed
|
||||
"""
|
||||
with patch("salt.states.zabbix_template.CHANGE_STACK", []):
|
||||
name = "A Testing Template"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "template.get":
|
||||
return existing_obj
|
||||
elif args[0] == "application.get":
|
||||
return ["non-empty"]
|
||||
|
||||
with patch.dict(zabbix_template.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"template": "templateid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=substitute_params_exists
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(
|
||||
return_value={"new": {}, "old": {}}
|
||||
),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = (
|
||||
'Zabbix Template "{}" already exists and corresponds to a'
|
||||
" definition.".format(name)
|
||||
)
|
||||
assert zabbix_template.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_update(diff_params, substitute_params_update):
|
||||
"""
|
||||
Test to ensure that named template is present but must be updated
|
||||
"""
|
||||
with patch("salt.states.zabbix_template.CHANGE_STACK", []):
|
||||
name = "A Testing Template"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "template.get":
|
||||
return ["length of result is 1 = template exists"]
|
||||
elif args[0] == "template.update":
|
||||
return diff_params
|
||||
|
||||
with patch.dict(zabbix_template.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"template": "templateid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=substitute_params_update
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value=diff_params),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Template "{name}" updated.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Template "{name}" differed.',
|
||||
"new": (
|
||||
'Zabbix Template "{}" updated according definition.'.format(
|
||||
name
|
||||
)
|
||||
),
|
||||
}
|
||||
}
|
||||
assert zabbix_template.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_absent_test_mode():
|
||||
"""
|
||||
Test to ensure that named template is absent in test mode
|
||||
"""
|
||||
name = "A Testing Template"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_template.__opts__, {"test": True}):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Template "{name}" would be deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Template "{name}" exists.',
|
||||
"new": f'Zabbix Template "{name}" would be deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_template.absent(name) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that named template is absent
|
||||
"""
|
||||
name = "A Testing Template"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_template.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=False)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Template "{name}" does not exist.'
|
||||
assert zabbix_template.absent(name) == ret
|
||||
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
with patch.dict(
|
||||
zabbix_template.__salt__,
|
||||
{"zabbix.run_query": MagicMock(return_value=True)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Template "{name}" deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Template "{name}" existed.',
|
||||
"new": f'Zabbix Template "{name}" deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_template.absent(name) == ret
|
|
@ -1,235 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email:`Jakub Sliva <jakub.sliva@ultimum.io>`
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.zabbix_valuemap as zabbix_valuemap
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def input_params():
|
||||
return {
|
||||
"mappings": [
|
||||
{"newvalue": "OK", "value": "0h"},
|
||||
{"newvalue": "Failure", "value": "1"},
|
||||
],
|
||||
"name": "Server HP Health",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj():
|
||||
return [
|
||||
{
|
||||
"valuemapid": "21",
|
||||
"name": "Server HP Health",
|
||||
"mappings": [
|
||||
{"newvalue": "OK", "value": "0h"},
|
||||
{"newvalue": "Failure", "value": "1"},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_obj_diff():
|
||||
return {
|
||||
"valuemapid": "21",
|
||||
"name": "Server HP Health",
|
||||
"mappings": [
|
||||
{"newvalue": "OK", "value": "0h"},
|
||||
{"newvalue": "Failure", "value": "1"},
|
||||
{"newvalue": "some", "value": "2"},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def diff_params():
|
||||
return {
|
||||
"valuemapid": "21",
|
||||
"mappings": [
|
||||
{"newvalue": "OK", "value": "0h"},
|
||||
{"newvalue": "Failure", "value": "1"},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {zabbix_valuemap: {}}
|
||||
|
||||
|
||||
def test_present_create(input_params):
|
||||
"""
|
||||
Test to ensure that named value map is created
|
||||
"""
|
||||
name = "Server HP Health"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "valuemap.get":
|
||||
return False
|
||||
elif args[0] == "valuemap.create":
|
||||
return True
|
||||
|
||||
with patch.dict(zabbix_valuemap.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"valuemap": "valuemapid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, False]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value={}),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Value map "{name}" created.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Value map "{name}" did not exist.',
|
||||
"new": 'Zabbix Value map "{}" created according definition.'.format(
|
||||
name
|
||||
),
|
||||
}
|
||||
}
|
||||
assert zabbix_valuemap.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_exists(input_params, existing_obj):
|
||||
"""
|
||||
Test to ensure that named value map is present and not changed
|
||||
"""
|
||||
name = "Server HP Health"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
with patch.dict(zabbix_valuemap.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"valuemap": "valuemapid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, existing_obj]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(return_value=["length of result is 1"]),
|
||||
"zabbix.compare_params": MagicMock(return_value={}),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = (
|
||||
'Zabbix Value map "{}" already exists and corresponds to a definition.'.format(
|
||||
name
|
||||
)
|
||||
)
|
||||
assert zabbix_valuemap.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_present_update(input_params, existing_obj_diff, diff_params):
|
||||
"""
|
||||
Test to ensure that named value map is present but must be updated
|
||||
"""
|
||||
name = "Server HP Health"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
def side_effect_run_query(*args):
|
||||
"""
|
||||
Differentiate between __salt__ exec module function calls with different parameters.
|
||||
"""
|
||||
if args[0] == "valuemap.get":
|
||||
return ["length of result is 1 = valuemap exists"]
|
||||
elif args[0] == "valuemap.update":
|
||||
return diff_params
|
||||
|
||||
with patch.dict(zabbix_valuemap.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{
|
||||
"zabbix.get_zabbix_id_mapper": MagicMock(
|
||||
return_value={"valuemap": "valuemapid"}
|
||||
),
|
||||
"zabbix.substitute_params": MagicMock(
|
||||
side_effect=[input_params, existing_obj_diff]
|
||||
),
|
||||
"zabbix.run_query": MagicMock(side_effect=side_effect_run_query),
|
||||
"zabbix.compare_params": MagicMock(return_value=diff_params),
|
||||
},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Value map "{name}" updated.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": (
|
||||
'Zabbix Value map "{}" differed '
|
||||
"in following parameters: {}".format(name, diff_params)
|
||||
),
|
||||
"new": f'Zabbix Value map "{name}" fixed.',
|
||||
}
|
||||
}
|
||||
assert zabbix_valuemap.present(name, {}) == ret
|
||||
|
||||
|
||||
def test_absent_test_mode():
|
||||
"""
|
||||
Test to ensure that named value map is absent in test mode
|
||||
"""
|
||||
name = "Server HP Health"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_valuemap.__opts__, {"test": True}):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Value map "{name}" would be deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Value map "{name}" exists.',
|
||||
"new": f'Zabbix Value map "{name}" would be deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_valuemap.absent(name) == ret
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure that named value map is absent
|
||||
"""
|
||||
name = "Server HP Health"
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
with patch.dict(zabbix_valuemap.__opts__, {"test": False}):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=False)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Value map "{name}" does not exist.'
|
||||
assert zabbix_valuemap.absent(name) == ret
|
||||
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{"zabbix.get_object_id_by_params": MagicMock(return_value=11)},
|
||||
):
|
||||
with patch.dict(
|
||||
zabbix_valuemap.__salt__,
|
||||
{"zabbix.run_query": MagicMock(return_value=True)},
|
||||
):
|
||||
ret["result"] = True
|
||||
ret["comment"] = f'Zabbix Value map "{name}" deleted.'
|
||||
ret["changes"] = {
|
||||
name: {
|
||||
"old": f'Zabbix Value map "{name}" existed.',
|
||||
"new": f'Zabbix Value map "{name}" deleted.',
|
||||
}
|
||||
}
|
||||
assert zabbix_valuemap.absent(name) == ret
|
File diff suppressed because it is too large
Load diff
|
@ -1,54 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email:`Vernon Cole <vernondcole@gmail.com>`
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import salt.utils.sdb as sdb
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class SdbTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test cases for salt.modules.sdb
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.TEMP_DATABASE_FILE = os.path.join(RUNTIME_VARS.TMP, "test_sdb.sqlite")
|
||||
cls.sdb_opts = {
|
||||
"extension_modules": "",
|
||||
"optimization_order": [0, 1, 2],
|
||||
"test_sdb_data": {
|
||||
"driver": "sqlite3",
|
||||
"database": cls.TEMP_DATABASE_FILE,
|
||||
"table": "sdb",
|
||||
"create_table": True,
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
try:
|
||||
os.unlink(cls.TEMP_DATABASE_FILE)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {sdb: {}}
|
||||
|
||||
# test with SQLite database key not presest
|
||||
|
||||
def test_sqlite_get_not_found(self):
|
||||
what = sdb.sdb_get("sdb://test_sdb_data/thisKeyDoesNotExist", self.sdb_opts)
|
||||
self.assertEqual(what, None)
|
||||
|
||||
# test with SQLite database write and read
|
||||
|
||||
def test_sqlite_get_found(self):
|
||||
expected = {b"name": b"testone", b"number": 46}
|
||||
sdb.sdb_set("sdb://test_sdb_data/test1", expected, self.sdb_opts)
|
||||
resp = sdb.sdb_get("sdb://test_sdb_data/test1", self.sdb_opts)
|
||||
self.assertEqual(resp, expected)
|
Loading…
Add table
Reference in a new issue