Merge branch 'master' into pr/56249

This commit is contained in:
Daniel A. Wozniak 2020-04-20 06:56:47 +00:00
commit 67d7a5a427
2925 changed files with 436618 additions and 367359 deletions

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runDocs(
env: env)

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'amazon',
distro_version: '1',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 7,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'amazon',
distro_version: '2',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'arch',
distro_version: 'lts',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '-n integration.modules.test_pkg',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '6',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 0,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
kitchen_platforms_file: '/var/jenkins/workspace/nox-cloud-platforms.yml',
nox_env_name: 'runtests-cloud',
nox_passthrough_opts: '',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
extra_codecov_flags: ["proxy"],
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 0,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'debian',
distro_version: '8',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'debian',
distro_version: '9',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'fedora',
distro_version: '30',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'highsierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(
@ -11,7 +11,7 @@ runTestSuite(
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'mojave',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(
@ -11,7 +11,7 @@ runTestSuite(
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'sierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(
@ -11,7 +11,7 @@ runTestSuite(
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'opensuse',
distro_version: '15',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
extra_codecov_flags: ["proxy"],
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1804',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'windows',
distro_version: '2016',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
testrun_timeout: 8,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'windows',
distro_version: '2019',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
testrun_timeout: 8,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runLint(
env: env)

6
.ci/pre-commit Normal file
View file

@ -0,0 +1,6 @@
@Library('salt@master-1.7') _
runPreCommit(
env: env)
// vim: ft=groovy

View file

@ -2,21 +2,23 @@ codecov:
ci:
- drone.saltstack.com
- jenkinsci.saltstack.com
# max_report_age: 24 # The age you want coverage reports to expire at, or if you
# # want to disable this check. Expired reports will not be processed by codecov.
# require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
branch: master
# notify:
# require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
# after_n_builds: 46 # Only notify after N builds
# after_n_builds: 25 # Only notify after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
# wait_for_ci: yes # Should Codecov wait for all CI statuses to complete before sending ours.
# # Note: Codecov considers all non-codecov statues to be CI statuses
# Disable Notifications
notify: off
ignore:
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
coverage:
round: up
@ -25,9 +27,13 @@ coverage:
status:
project: # measuring the overall project coverage
default: false # disable the default status that measures entire project
default:
informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# resulting status will pass no matter what the coverage is or what other settings
# are specified. Informational mode is great to use if you want to expose codecov
# information to other developers in your pull request without necessarily gating
# PRs on that information.
salt: # declare a new status context "salt"
enabled: yes # must be yes|true to enable this status
paths: "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
@ -36,7 +42,6 @@ coverage:
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
tests: # declare a new status context "tests"
enabled: yes # must be yes|true to enable this status
#target: 100% # we always want 100% coverage here
target: auto # auto while we get this going
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
@ -50,7 +55,11 @@ coverage:
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
enabled: yes # must be yes|true to enable this status
informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# resulting status will pass no matter what the coverage is or what other settings
# are specified. Informational mode is great to use if you want to expose codecov
# information to other developers in your pull request without necessarily gating
# PRs on that information.
target: 100% # Newly added lines must have 100% coverage
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
@ -59,7 +68,11 @@ coverage:
changes: # if there are any unexpected changes in coverage
default:
enabled: yes # must be yes|true to enable this status
informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# resulting status will pass no matter what the coverage is or what other settings
# are specified. Informational mode is great to use if you want to expose codecov
# information to other developers in your pull request without necessarily gating
# PRs on that information.
if_no_uploads: error
if_not_found: success
if_ci_failed: error
@ -68,9 +81,11 @@ flags:
salt:
paths:
- salt/
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
tests:
paths:
- tests/
carryforward: true
#comment:
# layout: "reach, diff, flags, files"

View file

@ -0,0 +1,2 @@
# Blacken Salt
0b2a5613b345f17339cb90e60b407199b3d26980

View file

@ -67,19 +67,19 @@ Fixing issues
=============
If you wish to help us fix the issue you're reporting,
[Salt's documentation](http://docs.saltstack.com/en/latest/index.html)
[Salt's documentation](http://docs.saltstack.com/en/master/index.html)
already includes information to help you setup a development environment,
under [Developing Salt](http://docs.saltstack.com/en/latest/topics/development/hacking.html).
under [Developing Salt](http://docs.saltstack.com/en/master/topics/development/hacking.html).
[SaltStack's Contributing documentation](https://docs.saltstack.com/en/latest/topics/development/contributing.html)
[SaltStack's Contributing documentation](https://docs.saltstack.com/en/master/topics/development/contributing.html)
is also helpful, as it explains sending in pull requests, keeping your
salt branches in sync, and knowing
[which branch](https://docs.saltstack.com/en/latest/topics/development/contributing.html#which-salt-branch)
[which branch](https://docs.saltstack.com/en/master/topics/development/contributing.html#which-salt-branch)
new features or bug fixes should be submitted against.
Fix the issue you have in hand and, if possible, also add a test case to Salt's
testing suite. Then, create a
[pull request](http://docs.saltstack.com/en/latest/topics/development/contributing.html#sending-a-github-pull-request),
[pull request](http://docs.saltstack.com/en/master/topics/development/contributing.html#sending-a-github-pull-request),
and **that's it**!
Salt's development team will review your fix and if everything is OK, your fix

35
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,35 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: Bug
assignees: ''
---
**Description**
A clear and concise description of what the bug is.
**Setup**
(Please provide relevant configs and/or SLS files (be sure to remove sensitive info).
**Steps to Reproduce the behavior**
(Include debug logs if possible and relevant)
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Versions Report**
<details><summary>salt --versions-report</summary>
(Provided by running salt --versions-report. Please also mention any differences in master/minion versions.)
```
PASTE HERE
```
</details>
**Additional context**
Add any other context about the problem here.

14
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,14 @@
blank_issues_enabled: true
contact_links:
- name: Salt Community Slack
url: https://saltstackcommunity.slack.com/
about: Please ask and answer questions here.
- name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users
about: Please ask and answer questions here.
- name: Salt on Freenode
url: http://webchat.freenode.net/?channels=salt&uio=Mj10cnVlJjk9dHJ1ZSYxMD10cnVl83
about: Please ask and answer questions here.
- name: Security vulnerabilities
email: security@saltstack.com
about: Please report security vulnerabilities here.

View file

@ -0,0 +1,23 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE REQUEST]"
labels: Feature
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
**Please Note**
If this feature request would be considered a substantial change or addition, this should go through a SEP process here https://github.com/saltstack/salt-enhancement-proposals, instead of a feature request.

View file

@ -1,6 +1,7 @@
### What does this PR do?
### What issues does this PR fix or reference?
Fixes:
### Previous Behavior
Remove this section if not relevant
@ -8,16 +9,16 @@ Remove this section if not relevant
### New Behavior
Remove this section if not relevant
### Tests written?
### Merge requirements satisfied?
**[NOTICE] Bug fixes or features added to Salt require tests.**
Please review the [test documentation](https://docs.saltstack.com/en/latest/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite.
Yes/No
<!-- Please review the [test documentation](https://docs.saltstack.com/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
- [ ] Docs
- [ ] Changelog - https://docs.saltstack.com/en/latest/topics/development/changelog.html
- [ ] Tests written/updated
### Commits signed with GPG?
Yes/No
Please review [Salt's Contributing Guide](https://docs.saltstack.com/en/latest/topics/development/contributing.html) for best practices.
Please review [Salt's Contributing Guide](https://docs.saltstack.com/en/master/topics/development/contributing.html) for best practices.
See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.

6
.github/stale.yml vendored
View file

@ -2,10 +2,10 @@
# Number of days of inactivity before an issue becomes stale
# 600 is approximately 1 year and 8 months
daysUntilStale: 30
daysUntilStale: 90
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
daysUntilClose: false
# Issues with these labels will never be considered stale
exemptLabels:
@ -17,6 +17,7 @@ exemptLabels:
- Bug
- Feature
- Test Failure
-ZD
# Label to use when marking an issue as stale
staleLabel: stale
@ -36,4 +37,3 @@ closeComment: false
# Limit to only `issues` or `pulls`
only: issues

1
.gitignore vendored
View file

@ -114,3 +114,4 @@ kitchen.local.yml
.bundle/
Gemfile.lock
/artifacts/
requirements/static/py*/*.log

108
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,108 @@
---
stages:
- lint
- test
include:
- local: 'cicd/kitchen_template.yml'
- local: 'cicd/kitchen_testruns.yml'
# pre-commit-run-all:
# image:
# name: registry.gitlab.com/saltstack/pop/cicd/containers/ubuntu1804:latest
# entrypoint: [""]
# stage: lint
# variables:
# PRE_COMMIT_HOME: "${CI_PROJECT_DIR}/pre-commit-cache"
# only:
# refs:
# - merge_requests
# cache:
# key: pre-commit-cache
# paths:
# - pre-commit-cache/
# script:
# - pip3 install pre-commit
# - pre-commit run -a -v --color always
lint-salt-full:
image: registry.gitlab.com/saltstack/pop/cicd/containers/ubuntu1804:latest
stage: lint
tags:
- saltstack-internal
cache:
key: nox-lint-cache
paths:
- .nox
only:
refs:
- schedules
script:
- python --version
- pip3 install -U nox-py2==2019.6.25
- nox --version
- nox --install-only -e lint-salt
- EC=254
- export PYLINT_REPORT=pylint-report-salt-full.log
- nox -e lint-salt
- EC=$?
- exit $EC
lint-tests-full:
image: registry.gitlab.com/saltstack/pop/cicd/containers/ubuntu1804:latest
stage: lint
tags:
- saltstack-internal
cache:
key: nox-lint-cache
paths:
- .nox
only:
refs:
- schedules
script:
- python --version
- pip3 install -U nox-py2==2019.6.25
- nox --version
- nox --install-only -e lint-tests
- EC=254
- export PYLINT_REPORT=pylint-report-tests-full.log
- nox -e lint-tests
- EC=$?
- exit $EC
docs-build-html:
image: registry.gitlab.com/saltstack/pop/cicd/containers/ubuntu1804:latest
stage: test
tags:
- saltstack-internal
cache:
key: nox-docs-cache
paths:
- .nox
only:
refs:
- schedules
script:
- python --version
- pip install -U nox-py2==2019.6.25
- nox --version
- nox -e 'docs-html(compress=True)'
docs-build-man-pages:
image: registry.gitlab.com/saltstack/pop/cicd/containers/ubuntu1804:latest
stage: test
tags:
- saltstack-internal
cache:
key: nox-docs-cache
paths:
- .nox
only:
refs:
- schedules
script:
- python --version
- pip install -U nox-py2==2019.6.25
- nox --version
- nox -e 'docs-man(compress=True, update=False)'

View file

@ -1,120 +1,11 @@
default_language_version:
python: python3
exclude: ^(doc/_static/.*|doc/_themes/.*)$
repos:
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
rev: master
hooks:
- id: pip-tools-compile
alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=2.7
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-darwin-py2.7-zmq-requirements
name: Darwin Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=2.7
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py2.7-zmq-requirements
name: Windows Py2.7 ZeroMQ Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=2.7
- --platform=windows
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-cloud-py2.7-requirements
name: Cloud Py2.7 Requirements
files: ^requirements/(static/cloud\.in)$
args:
- -v
- --py-version=2.7
- id: pip-tools-compile
alias: compile-linux-crypto-py2.7-requirements
name: Linux Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=linux
- --out-prefix=linux
- id: pip-tools-compile
alias: compile-darwin-crypto-py2.7-requirements
name: Darwin Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=darwin
- --out-prefix=darwin
- id: pip-tools-compile
alias: compile-windows-crypto-py2.7-requirements
name: Windows Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=windows
- --out-prefix=windows
- id: pip-tools-compile
alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.4
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-cloud-py3.4-requirements
name: Cloud Py3.4 Requirements
files: ^requirements/(static/cloud\.in)$
args:
- -v
- --py-version=3.4
- id: pip-tools-compile
alias: compile-linux-crypto-py3.4-requirements
name: Linux Py3.4 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=3.4
- --platform=linux
- --out-prefix=linux
- id: pip-tools-compile
alias: compile-linux-py3.5-zmq-requirements
@ -132,13 +23,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.5-zmq-requirements
name: Darwin Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.5
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -175,6 +67,15 @@ repos:
- --py-version=3.5
- --platform=linux
- id: pip-tools-compile
alias: compile-changelog-requirements
name: Changelog Py3.5 Requirements
files: ^requirements/static/changelog\.in$
args:
- -v
- --py-version=3.5
- --platform=linux
- id: pip-tools-compile
alias: compile-linux-crypto-py3.5-requirements
name: Linux Py3.5 Crypto Requirements
@ -214,7 +115,6 @@ repos:
- --py-version=3.5
- --platform=linux
- id: pip-tools-compile
alias: compile-linux-py3.6-zmq-requirements
name: Linux Py3.6 ZeroMQ Requirements
@ -231,13 +131,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.6-zmq-requirements
name: Darwin Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.6
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -274,6 +175,15 @@ repos:
- --py-version=3.6
- --platform=linux
- id: pip-tools-compile
alias: compile-changelog-requirements
name: Changelog Py3.6 Requirements
files: ^requirements/static/changelog\.in$
args:
- -v
- --py-version=3.6
- --platform=linux
- id: pip-tools-compile
alias: compile-linux-crypto-py3.6-requirements
name: Linux Py3.6 Crypto Requirements
@ -313,7 +223,6 @@ repos:
- --py-version=3.6
- --platform=linux
- id: pip-tools-compile
alias: compile-linux-py3.7-zmq-requirements
name: Linux Py3.7 ZeroMQ Requirements
@ -330,13 +239,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.7-zmq-requirements
name: Darwin Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.7
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -373,6 +283,15 @@ repos:
- --py-version=3.7
- --platform=linux
- id: pip-tools-compile
alias: compile-changelog-requirements
name: Changelog Py3.7 Requirements
files: ^requirements/static/changelog\.in$
args:
- -v
- --py-version=3.7
- --platform=linux
- id: pip-tools-compile
alias: compile-linux-crypto-py3.7-requirements
name: Linux Py3.7 Crypto Requirements
@ -412,6 +331,32 @@ repos:
- --py-version=3.7
- --platform=linux
- repo: https://github.com/timothycrosley/isort
rev: "1e78a9acf3110e1f9721feb591f89a451fc9876a"
hooks:
- id: isort
additional_dependencies: ['toml']
# This tells pre-commit not to pass files to isort.
# This should be kept in sync with pyproject.toml
exclude: >
(?x)^(
templates/.*|
salt/ext/.*|
tests/kitchen/.*
)$
- repo: https://github.com/psf/black
rev: 19.10b0
hooks:
- id: black
# This tells pre-commit not to pass files to black.
# This should be kept in sync with pyproject.toml
exclude: >
(?x)^(
templates/.*|
salt/ext/.*|
tests/kitchen/.*
)$
- repo: https://github.com/saltstack/salt-nox-pre-commit
rev: master

View file

@ -8,11 +8,11 @@ extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,
ext
ext,
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
ignore-patterns=salt.ext.*
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
@ -77,6 +77,7 @@ disable=R,
no-member,
unsubscriptable-object,
un-indexed-curly-braces-error,
whitespace-before-colon,
indentation-is-not-a-multiple-of-four-comment,
blacklisted-name,
invalid-name,
@ -126,7 +127,9 @@ disable=R,
str-format-in-logging,
import-outside-toplevel,
deprecated-method,
keyword-arg-before-vararg
repr-flag-used-in-string,
keyword-arg-before-vararg,
incompatible-py3-code
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
@ -396,6 +399,7 @@ init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,
salt.ext.six.moves,
past.builtins,
future.builtins,
builtins,
@ -472,7 +476,9 @@ ignored-classes=SQLObject
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
ignored-modules=salt.ext.six.moves,
six.moves,
_MovedItems,
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
@ -511,7 +517,7 @@ min-similarity-lines=4
fileperms-default=0644
# File paths to ignore file permission. Glob patterns allowed.
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py,tests/unit/files/rosters/ansible/roster.py
[MODERNIZE]

View file

@ -1,4 +1,3 @@
# Changelog
All notable changes to Salt will be documented in this file.
This changelog follows [keepachangelog](https://keepachangelog.com/en/1.0.0/) format, and is intended for human consumption.
@ -6,7 +5,9 @@ This changelog follows [keepachangelog](https://keepachangelog.com/en/1.0.0/) fo
This project versioning is _similar_ to [Semantic Versioning](https://semver.org), and is documented in [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20/files).
Versions are `MAJOR.PATCH`.
### 3000.1
# Changelog
## 3001 - Sodium
### Removed
@ -15,6 +16,22 @@ Versions are `MAJOR.PATCH`.
### Changed
### Fixed
- [#56237](https://github.com/saltstack/salt/pull/56237) - Fix alphabetical ordering and remove duplicates across all documentation indexes - [@myii](https://github.com/myii)
- [#56325](https://github.com/saltstack/salt/pull/56325) - Fix hyperlinks to `salt.serializers` and other documentation issues - [@myii](https://github.com/myii)
### Added
- [#56627](https://github.com/saltstack/salt/pull/56627) - Add new salt-ssh set_path option
## 3000.1
### Removed
### Deprecated
### Changed
### Fixed
- [#56082](https://github.com/saltstack/salt/pull/56082) - Fix saltversioninfo grain for new version
- [#56143](https://github.com/saltstack/salt/pull/56143) - Use encoding when caching pillar data
- [#56172](https://github.com/saltstack/salt/pull/56172) - Only change mine data if using new allow_tgt feature
@ -40,6 +57,11 @@ Versions are `MAJOR.PATCH`.
- [#56310](https://github.com/saltstack/salt/pull/56310) - Only process ADMX files when loading policies
- [#56327](https://github.com/saltstack/salt/pull/56327) - keep cache_copied_files variable a list
- [#56360](https://github.com/saltstack/salt/pull/56360) - dont require virtualenv.virtualenv_version call, removed in 20.0.10
- [#56378](https://github.com/saltstack/salt/pull/56378) - Include _version.py if building wheel
- [#56376](https://github.com/saltstack/salt/pull/56376) - Fix win deps
- [#56418](https://github.com/saltstack/salt/pull/56418) - Ensure version.py included before we install
- [#56435](https://github.com/saltstack/salt/pull/56435) - Update mac build scripts
### Added

1
changelog/55836.added Normal file
View file

@ -0,0 +1 @@
Add towncrier tool to the Salt project to help manage CHANGELOG.md file.

50
cicd/kitchen_template.yml Normal file
View file

@ -0,0 +1,50 @@
.run-kitchen:
image: ruby:2.6.3
stage: test
tags:
- saltstack-kitchen
only:
refs:
- schedules
variables:
FORCE_FULL: 'true'
GOLDEN_IMAGES_CI_BRANCH: master
NOX_ENABLE_FROM_FILENAMES: 'true'
NOX_ENV_NAME: runtests-zeromq
NOX_PASSTHROUGH_OPTS: '--ssh-tests'
SALT_KITCHEN_DRIVER: kitchen-conf/driver.yml
SALT_KITCHEN_PLATFORMS: kitchen-conf/nox-platforms.yml
SALT_KITCHEN_VERIFIER: kitchen-conf/nox-verifier.yml
TEST_SUITE: py3
USE_SPOT_INSTANCES: 'true'
script:
- apt update
- apt -y install moreutils rsync dos2unix
- mkdir -p ~/.ssh
- echo "${KITCHEN_SSHKEY}" | tr -d '\r' > ~/.ssh/kitchen.pem
- chmod 700 ~/.ssh/
- chmod 600 ~/.ssh/kitchen.pem
- git clone https://gitlab.com/saltstack/open/cicd/kitchen-conf.git
- bundle install --with ec2 windows --without docker vagrant
- t=$(shuf -i 30-150 -n 1); echo "Sleeping $t seconds"; sleep $t
- if [ "${USE_SPOT_INSTANCES}" == "true" ]; then cp -f kitchen-conf/spot.yml .kitchen.local.yml; fi
- 'bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM)'
- bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM > kitchen-diagnose-info.txt
- grep 'image_id:' kitchen-diagnose-info.txt
- grep 'instance_type:' -A5 kitchen-diagnose-info.txt
- rm -f kitchen-diagnose-info.txt
- rm -f .kitchen.local.yml
- ssh-agent /bin/bash -xc 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
- TEST_EXIT_CODE=0
- 'DONT_DOWNLOAD_ARTEFACTS=1 bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM|ts -s || TEST_EXIT_CODE=$?'
- 'ONLY_DOWNLOAD_ARTEFACTS=1 bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM|ts -s || true'
- bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM
- exit $TEST_EXIT_CODE
artifacts:
when: always
paths:
- artifacts/
- .kitchen/
expire_in: 6 months

123
cicd/kitchen_testruns.yml Normal file
View file

@ -0,0 +1,123 @@
kitchen-amazon2-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: amazon-2
kitchen-archlts-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: arch-lts
kitchen-archlts-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: arch-lts
NOX_PASSTHROUGH_OPTS: '-n integration.modules.test_pkg'
kitchen-centos7-py3-m2crypto:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-7
NOX_ENV_NAME: runtests-zeromq-m2crypto
kitchen-centos7-py3-proxy:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-7
NOX_PASSTHROUGH_OPTS: '--proxy'
kitchen-centos7-py3-pycryptodomex:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-7
NOX_ENV_NAME: runtests-zeromq-pycryptodomex
kitchen-centos7-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-7
kitchen-centos7-py3-tcp:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-7
NOX_ENV_NAME: runtests-tcp
kitchen-centos8-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: centos-8
kitchen-debian9-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: debian-9
kitchen-debian10-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: debian-10
kitchen-fedora30-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: fedora-30
kitchen-fedora31-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: fedora-31
kitchen-opensuse15-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: opensuse-15
kitchen-ubuntu1604-py3-m2crypto:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1604
NOX_ENV_NAME: runtests-zeromq-m2crypto
kitchen-ubuntu1604-py3-proxy:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1604
NOX_PASSTHROUGH_OPTS: '--proxy'
kitchen-ubuntu1604-py3-pycryptodomex:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1604
NOX_ENV_NAME: runtests-zeromq-pycryptodomex
kitchen-ubuntu1604-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1604
kitchen-ubuntu1604-py3-tcp:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1604
NOX_ENV_NAME: runtests-tcp
kitchen-ubuntu1804-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: ubuntu-1804
kitchen-windows2016-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: windows-2016
NOX_PASSTHROUGH_OPTS: '--unit'
USE_SPOT_INSTANCES: 'false'
kitchen-windows2019-py3:
extends: .run-kitchen
variables:
TEST_PLATFORM: windows-2019
NOX_PASSTHROUGH_OPTS: '--unit'
USE_SPOT_INSTANCES: 'false'

View file

@ -506,6 +506,12 @@
# Boolean to run command via sudo.
#ssh_sudo: False
# Boolean to run ssh_pre_flight script defined in roster. By default
# the script will only run if the thin_dir does not exist on the targeted
# minion. This forces the script to run regardless of the thin dir existing
# or not.
#ssh_run_pre_flight: True
# Number of seconds to wait for a response when establishing an SSH connection.
#ssh_timeout: 60

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
@ -8,54 +8,49 @@
~~~~~~~~~~~~~~~~~~~~~~
Setup the Transifex client configuration file
'''
"""
import getpass
# Import python libs
import os
import sys
import getpass
import ConfigParser
HOST = 'https://www.transifex.com'
HOST = "https://www.transifex.com"
RCFILE = os.path.abspath(
os.environ.get(
'TRANSIFEX_RC',
os.path.expanduser('~/.transifexrc')
)
os.environ.get("TRANSIFEX_RC", os.path.expanduser("~/.transifexrc"))
)
def main():
'''
"""
Run the setup code
'''
"""
print(
'This script will setup a Transifex client configuration file, or, '
'if it already exists, make some minimal checks to see if it\'s '
'properly configured\n'
"This script will setup a Transifex client configuration file, or, "
"if it already exists, make some minimal checks to see if it's "
"properly configured\n"
)
if not os.path.exists(RCFILE):
while True:
username = os.environ.get('TRANSIFEX_USER', None)
username = os.environ.get("TRANSIFEX_USER", None)
if username is not None:
break
try:
username = raw_input(
'What is your username on Transifex.com? '
)
username = raw_input("What is your username on Transifex.com? ")
if username:
break
except KeyboardInterrupt:
print
sys.exit(1)
while True:
password = os.environ.get('TRANSIFEX_PASS', None)
password = os.environ.get("TRANSIFEX_PASS", None)
if password is not None:
break
try:
password = getpass.getpass(
'What is your password on Transifex.com? '
)
password = getpass.getpass("What is your password on Transifex.com? ")
if password:
break
except KeyboardInterrupt:
@ -64,16 +59,16 @@ def main():
config = ConfigParser.SafeConfigParser()
config.add_section(HOST)
config.set(HOST, 'token', '')
config.set(HOST, 'hostname', HOST)
config.set(HOST, 'username', username)
config.set(HOST, 'password', password)
config.set(HOST, "token", "")
config.set(HOST, "hostname", HOST)
config.set(HOST, "username", username)
config.set(HOST, "password", password)
config.write(open(RCFILE, 'w'))
print('username and password stored in \'{0}\''.format(RCFILE))
config.write(open(RCFILE, "w"))
print("username and password stored in '{0}'".format(RCFILE))
os.chmod(RCFILE, 0600)
print('Secured the permissions on \'{0}\' to 0600'.format(RCFILE))
print("Secured the permissions on '{0}' to 0600".format(RCFILE))
sys.exit(0)
@ -82,24 +77,30 @@ def main():
config.read([RCFILE])
if not config.has_section(HOST):
print('\'~/.transifexrc\' is not properly configured, it\'s missing '
'the {0} section'.format(HOST))
print(
"'~/.transifexrc' is not properly configured, it's missing "
"the {0} section".format(HOST)
)
for setting in ('username', 'password', 'hostname', 'token'):
for setting in ("username", "password", "hostname", "token"):
if not config.has_option(HOST, setting):
print('\'~/.transifexrc\' is not properly configured, it\'s '
'missing the {0} option'.format(setting))
print(
"'~/.transifexrc' is not properly configured, it's "
"missing the {0} option".format(setting)
)
sys.exit(1)
if setting == 'token':
if setting == "token":
# Token should be left empty
continue
if not config.get(HOST, setting):
print('\'~/.transifexrc\' is not properly configured, it\'s '
'missing a value for the {0} option'.format(setting))
print(
"'~/.transifexrc' is not properly configured, it's "
"missing a value for the {0} option".format(setting)
)
sys.exit(1)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -13,18 +13,16 @@ import re
from docutils import nodes
from docutils.parsers.rst.roles import set_classes
from pygments.lexer import RegexLexer, bygroups
from pygments.lexers import get_lexer_by_name
from pygments.token import Literal, Text, Operator, Keyword, Name, Number
from pygments.token import Keyword, Literal, Name, Number, Operator, Text
from pygments.util import ClassNotFound
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.domains import Domain, Index, ObjType
from sphinx.roles import XRefRole
from sphinx.util.docfields import GroupedField, TypedField
from sphinx.util.nodes import make_refnode
class DocRef(object):
@ -44,252 +42,275 @@ class DocRef(object):
location of the RFC which defines some HTTP method.
"""
return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section)
return "{0}#{1}{2}".format(self.base_url, self.anchor, self.section)
#: The URL of the HTTP/1.1 RFC which defines the HTTP methods OPTIONS, GET,
#: HEAD, POST, PUT, DELETE, TRACE, and CONNECT.
RFC2616 = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html'
RFC2616 = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html"
#: The name to use for section anchors in RFC2616.
RFC2616ANCHOR = 'sec'
RFC2616ANCHOR = "sec"
#: The URL of the RFC which defines the HTTP PATCH method.
RFC5789 = 'http://tools.ietf.org/html/rfc5789'
RFC5789 = "http://tools.ietf.org/html/rfc5789"
#: The name to use for section anchors in RFC5789.
RFC5789ANCHOR = 'section-'
RFC5789ANCHOR = "section-"
#: Mapping from lowercase HTTP method name to :class:`DocRef` object which
#: maintains the URL which points to the section of the RFC which defines that
#: HTTP method.
DOCREFS = {
'patch': DocRef(RFC5789, RFC5789ANCHOR, 2),
'options': DocRef(RFC2616, RFC2616ANCHOR, 9.2),
'get': DocRef(RFC2616, RFC2616ANCHOR, 9.3),
'head': DocRef(RFC2616, RFC2616ANCHOR, 9.4),
'post': DocRef(RFC2616, RFC2616ANCHOR, 9.5),
'put': DocRef(RFC2616, RFC2616ANCHOR, 9.6),
'delete': DocRef(RFC2616, RFC2616ANCHOR, 9.7),
'trace': DocRef(RFC2616, RFC2616ANCHOR, 9.8),
'connect': DocRef(RFC2616, RFC2616ANCHOR, 9.9)
"patch": DocRef(RFC5789, RFC5789ANCHOR, 2),
"options": DocRef(RFC2616, RFC2616ANCHOR, 9.2),
"get": DocRef(RFC2616, RFC2616ANCHOR, 9.3),
"head": DocRef(RFC2616, RFC2616ANCHOR, 9.4),
"post": DocRef(RFC2616, RFC2616ANCHOR, 9.5),
"put": DocRef(RFC2616, RFC2616ANCHOR, 9.6),
"delete": DocRef(RFC2616, RFC2616ANCHOR, 9.7),
"trace": DocRef(RFC2616, RFC2616ANCHOR, 9.8),
"connect": DocRef(RFC2616, RFC2616ANCHOR, 9.9),
}
HTTP_STATUS_CODES = {
100: 'Continue',
101: 'Switching Protocols',
102: 'Processing',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
207: 'Multi Status',
226: 'IM Used', # see RFC 3229
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required', # unused
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
418: "I'm a teapot", # see RFC 2324
422: 'Unprocessable Entity',
423: 'Locked',
424: 'Failed Dependency',
426: 'Upgrade Required',
449: 'Retry With', # proprietary MS extension
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
507: 'Insufficient Storage',
510: 'Not Extended'
100: "Continue",
101: "Switching Protocols",
102: "Processing",
200: "OK",
201: "Created",
202: "Accepted",
203: "Non Authoritative Information",
204: "No Content",
205: "Reset Content",
206: "Partial Content",
207: "Multi Status",
226: "IM Used", # see RFC 3229
300: "Multiple Choices",
301: "Moved Permanently",
302: "Found",
303: "See Other",
304: "Not Modified",
305: "Use Proxy",
307: "Temporary Redirect",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required", # unused
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict",
410: "Gone",
411: "Length Required",
412: "Precondition Failed",
413: "Request Entity Too Large",
414: "Request URI Too Long",
415: "Unsupported Media Type",
416: "Requested Range Not Satisfiable",
417: "Expectation Failed",
418: "I'm a teapot", # see RFC 2324
422: "Unprocessable Entity",
423: "Locked",
424: "Failed Dependency",
426: "Upgrade Required",
449: "Retry With", # proprietary MS extension
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway",
503: "Service Unavailable",
504: "Gateway Timeout",
505: "HTTP Version Not Supported",
507: "Insufficient Storage",
510: "Not Extended",
}
http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)',
re.VERBOSE)
http_sig_param_re = re.compile(
r"\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)", re.VERBOSE
)
def http_resource_anchor(method, path):
path = re.sub(r'[<>:/]', '-', path)
return method.lower() + '-' + path
path = re.sub(r"[<>:/]", "-", path)
return method.lower() + "-" + path
class HTTPResource(ObjectDescription):
doc_field_types = [
TypedField('parameter', label='Parameters',
names=('param', 'parameter', 'arg', 'argument'),
typerolename='obj', typenames=('paramtype', 'type')),
TypedField('jsonparameter', label='JSON Parameters',
names=('jsonparameter', 'jsonparam', 'json'),
typerolename='obj', typenames=('jsonparamtype', 'jsontype')),
TypedField('queryparameter', label='Query Parameters',
names=('queryparameter', 'queryparam', 'qparam', 'query'),
typerolename='obj', typenames=('queryparamtype', 'querytype', 'qtype')),
GroupedField('formparameter', label='Form Parameters',
names=('formparameter', 'formparam', 'fparam', 'form')),
GroupedField('requestheader', label='Request Headers',
rolename='mailheader',
names=('reqheader', 'requestheader')),
GroupedField('responseheader', label='Response Headers',
rolename='mailheader',
names=('resheader', 'responseheader')),
GroupedField('statuscode', label='Status Codes',
rolename='statuscode',
names=('statuscode', 'status', 'code'))
TypedField(
"parameter",
label="Parameters",
names=("param", "parameter", "arg", "argument"),
typerolename="obj",
typenames=("paramtype", "type"),
),
TypedField(
"jsonparameter",
label="JSON Parameters",
names=("jsonparameter", "jsonparam", "json"),
typerolename="obj",
typenames=("jsonparamtype", "jsontype"),
),
TypedField(
"queryparameter",
label="Query Parameters",
names=("queryparameter", "queryparam", "qparam", "query"),
typerolename="obj",
typenames=("queryparamtype", "querytype", "qtype"),
),
GroupedField(
"formparameter",
label="Form Parameters",
names=("formparameter", "formparam", "fparam", "form"),
),
GroupedField(
"requestheader",
label="Request Headers",
rolename="mailheader",
names=("reqheader", "requestheader"),
),
GroupedField(
"responseheader",
label="Response Headers",
rolename="mailheader",
names=("resheader", "responseheader"),
),
GroupedField(
"statuscode",
label="Status Codes",
rolename="statuscode",
names=("statuscode", "status", "code"),
),
]
method = NotImplemented
def handle_signature(self, sig, signode):
method = self.method.upper() + ' '
method = self.method.upper() + " "
signode += addnodes.desc_name(method, method)
offset = 0
for match in http_sig_param_re.finditer(sig):
path = sig[offset:match.start()]
path = sig[offset : match.start()]
signode += addnodes.desc_name(path, path)
params = addnodes.desc_parameterlist()
typ = match.group('type')
typ = match.group("type")
if typ:
typ = typ + ': '
typ = typ + ": "
params += addnodes.desc_annotation(typ, typ)
name = match.group('name')
name = match.group("name")
params += addnodes.desc_parameter(name, name)
signode += params
offset = match.end()
if offset < len(sig):
path = sig[offset:len(sig)]
path = sig[offset : len(sig)]
signode += addnodes.desc_name(path, path)
fullname = self.method.upper() + ' ' + path
signode['method'] = self.method
signode['path'] = sig
signode['fullname'] = fullname
fullname = self.method.upper() + " " + path
signode["method"] = self.method
signode["path"] = sig
signode["fullname"] = fullname
return (fullname, self.method, sig)
def needs_arglist(self):
return False
def add_target_and_index(self, name_cls, sig, signode):
signode['ids'].append(http_resource_anchor(*name_cls[1:]))
self.env.domaindata['http'][self.method][sig] = (self.env.docname, '')
signode["ids"].append(http_resource_anchor(*name_cls[1:]))
self.env.domaindata["http"][self.method][sig] = (self.env.docname, "")
def get_index_text(self, modname, name):
return ''
return ""
class HTTPOptions(HTTPResource):
method = 'options'
method = "options"
class HTTPHead(HTTPResource):
method = 'head'
method = "head"
class HTTPPatch(HTTPResource):
method = 'patch'
method = "patch"
class HTTPPost(HTTPResource):
method = 'post'
method = "post"
class HTTPGet(HTTPResource):
method = 'get'
method = "get"
class HTTPPut(HTTPResource):
method = 'put'
method = "put"
class HTTPDelete(HTTPResource):
method = 'delete'
method = "delete"
class HTTPTrace(HTTPResource):
method = 'trace'
method = "trace"
def http_statuscode_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
def http_statuscode_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
if text.isdigit():
code = int(text)
try:
status = HTTP_STATUS_CODES[code]
except KeyError:
msg = inliner.reporter.error('%d is invalid HTTP status code'
% code, lineno=lineno)
msg = inliner.reporter.error(
"%d is invalid HTTP status code" % code, lineno=lineno
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
else:
try:
code, status = re.split(r'\s', text.strip(), 1)
code, status = re.split(r"\s", text.strip(), 1)
code = int(code)
except ValueError:
msg = inliner.reporter.error(
'HTTP status code must be an integer (e.g. `200`) or '
'start with an integer (e.g. `200 OK`); %r is invalid' %
text,
line=lineno
"HTTP status code must be an integer (e.g. `200`) or "
"start with an integer (e.g. `200 OK`); %r is invalid" % text,
line=lineno,
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
nodes.reference(rawtext)
if code == 226:
url = 'http://www.ietf.org/rfc/rfc3229.txt'
url = "http://www.ietf.org/rfc/rfc3229.txt"
if code == 418:
url = 'http://www.ietf.org/rfc/rfc2324.txt'
url = "http://www.ietf.org/rfc/rfc2324.txt"
if code == 449:
url = 'http://msdn.microsoft.com/en-us/library' \
'/dd891478(v=prot.10).aspx'
url = "http://msdn.microsoft.com/en-us/library" "/dd891478(v=prot.10).aspx"
elif code in HTTP_STATUS_CODES:
url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \
'#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100))
url = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html" "#sec10." + (
"%d.%d" % (code // 100, 1 + code % 100)
)
else:
url = ''
url = ""
set_classes(options)
node = nodes.reference(rawtext, '%d %s' % (code, status),
refuri=url, **options)
node = nodes.reference(rawtext, "%d %s" % (code, status), refuri=url, **options)
return [node], []
def http_method_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
def http_method_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
method = str(text).lower()
if method not in DOCREFS:
msg = inliner.reporter.error('%s is not valid HTTP method' % method,
lineno=lineno)
msg = inliner.reporter.error(
"%s is not valid HTTP method" % method, lineno=lineno
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url = str(DOCREFS[method])
@ -298,51 +319,61 @@ def http_method_role(name, rawtext, text, lineno, inliner,
class HTTPXRefRole(XRefRole):
def __init__(self, method, **kwargs):
XRefRole.__init__(self, **kwargs)
self.method = method
def process_link(self, env, refnode, has_explicit_title, title, target):
if not target.startswith('/'):
if not target.startswith("/"):
pass
if not has_explicit_title:
title = self.method.upper() + ' ' + title
title = self.method.upper() + " " + title
return title, target
class HTTPIndex(Index):
name = 'routingtable'
localname = 'HTTP Routing Table'
shortname = 'routing table'
name = "routingtable"
localname = "HTTP Routing Table"
shortname = "routing table"
def __init__(self, *args, **kwargs):
super(HTTPIndex, self).__init__(*args, **kwargs)
self.ignore = [[l for l in x.split('/') if l]
for x in self.domain.env.config['http_index_ignore_prefixes']]
self.ignore = [
[l for l in x.split("/") if l]
for x in self.domain.env.config["http_index_ignore_prefixes"]
]
self.ignore.sort(key=lambda x: -len(x))
def grouping_prefix(self, path):
letters = [x for x in path.split('/') if x]
letters = [x for x in path.split("/") if x]
for prefix in self.ignore:
if letters[:len(prefix)] == prefix:
return '/' + '/'.join(letters[:len(prefix) + 1])
return '/%s' % (letters[0] if letters else '',)
if letters[: len(prefix)] == prefix:
return "/" + "/".join(letters[: len(prefix) + 1])
return "/%s" % (letters[0] if letters else "",)
def generate(self, docnames=None):
content = {}
items = ((method, path, info)
items = (
(method, path, info)
for method, routes in self.domain.routes.items()
for path, info in routes.items())
for path, info in routes.items()
)
items = sorted(items, key=lambda item: item[1])
for method, path, info in items:
entries = content.setdefault(self.grouping_prefix(path), [])
entries.append([
method.upper() + ' ' + path, 0, info[0],
http_resource_anchor(method, path), '', '', info[1]
])
entries.append(
[
method.upper() + " " + path,
0,
info[0],
http_resource_anchor(method, path),
"",
"",
info[1],
]
)
content = sorted(content.items(), key=lambda k: k[0])
return (content, True)
@ -350,53 +381,53 @@ class HTTPIndex(Index):
class HTTPDomain(Domain):
"""HTTP domain."""
name = 'http'
label = 'HTTP'
name = "http"
label = "HTTP"
object_types = {
'options': ObjType('options', 'options', 'obj'),
'head': ObjType('head', 'head', 'obj'),
'post': ObjType('post', 'post', 'obj'),
'get': ObjType('get', 'get', 'obj'),
'put': ObjType('put', 'put', 'obj'),
'patch': ObjType('patch', 'patch', 'obj'),
'delete': ObjType('delete', 'delete', 'obj'),
'trace': ObjType('trace', 'trace', 'obj')
"options": ObjType("options", "options", "obj"),
"head": ObjType("head", "head", "obj"),
"post": ObjType("post", "post", "obj"),
"get": ObjType("get", "get", "obj"),
"put": ObjType("put", "put", "obj"),
"patch": ObjType("patch", "patch", "obj"),
"delete": ObjType("delete", "delete", "obj"),
"trace": ObjType("trace", "trace", "obj"),
}
directives = {
'options': HTTPOptions,
'head': HTTPHead,
'post': HTTPPost,
'get': HTTPGet,
'put': HTTPPut,
'patch': HTTPPatch,
'delete': HTTPDelete,
'trace': HTTPTrace
"options": HTTPOptions,
"head": HTTPHead,
"post": HTTPPost,
"get": HTTPGet,
"put": HTTPPut,
"patch": HTTPPatch,
"delete": HTTPDelete,
"trace": HTTPTrace,
}
roles = {
'options': HTTPXRefRole('options'),
'head': HTTPXRefRole('head'),
'post': HTTPXRefRole('post'),
'get': HTTPXRefRole('get'),
'put': HTTPXRefRole('put'),
'patch': HTTPXRefRole('patch'),
'delete': HTTPXRefRole('delete'),
'trace': HTTPXRefRole('trace'),
'statuscode': http_statuscode_role,
'method': http_method_role
"options": HTTPXRefRole("options"),
"head": HTTPXRefRole("head"),
"post": HTTPXRefRole("post"),
"get": HTTPXRefRole("get"),
"put": HTTPXRefRole("put"),
"patch": HTTPXRefRole("patch"),
"delete": HTTPXRefRole("delete"),
"trace": HTTPXRefRole("trace"),
"statuscode": http_statuscode_role,
"method": http_method_role,
}
initial_data = {
'options': {}, # path: (docname, synopsis)
'head': {},
'post': {},
'get': {},
'put': {},
'patch': {},
'delete': {},
'trace': {}
"options": {}, # path: (docname, synopsis)
"head": {},
"post": {},
"get": {},
"put": {},
"patch": {},
"delete": {},
"trace": {},
}
# indices = [HTTPIndex]
@ -412,17 +443,15 @@ class HTTPDomain(Domain):
if info[0] == docname:
del routes[path]
def resolve_xref(self, env, fromdocname, builder, typ, target,
node, contnode):
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
try:
info = self.data[str(typ)][target]
except KeyError:
return
else:
anchor = http_resource_anchor(typ, target)
title = typ.upper() + ' ' + target
return make_refnode(builder, fromdocname, info[0], anchor,
contnode, title)
title = typ.upper() + " " + target
return make_refnode(builder, fromdocname, info[0], anchor, contnode, title)
def get_objects(self):
for method, routes in self.routes.items():
@ -434,16 +463,16 @@ class HTTPDomain(Domain):
class HTTPLexer(RegexLexer):
"""Lexer for HTTP sessions."""
name = 'HTTP'
aliases = ['http']
name = "HTTP"
aliases = ["http"]
flags = re.DOTALL
def header_callback(self, match):
if match.group(1).lower() == 'content-type':
if match.group(1).lower() == "content-type":
content_type = match.group(5).strip()
if ';' in content_type:
content_type = content_type[:content_type.find(';')].strip()
if ";" in content_type:
content_type = content_type[: content_type.find(";")].strip()
self.content_type = content_type
yield match.start(1), Name.Attribute, match.group(1)
yield match.start(2), Text, match.group(2)
@ -458,11 +487,12 @@ class HTTPLexer(RegexLexer):
yield match.start(3), Text, match.group(3)
def content_callback(self, match):
content_type = getattr(self, 'content_type', None)
content_type = getattr(self, "content_type", None)
content = match.group()
offset = match.start()
if content_type:
from pygments.lexers import get_lexer_for_mimetype
try:
lexer = get_lexer_for_mimetype(content_type)
except ClassNotFound:
@ -474,33 +504,50 @@ class HTTPLexer(RegexLexer):
yield offset, Text, content
tokens = {
'root': [
(r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
(r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
Text, Name.Exception, Text),
'headers'),
"root": [
(
r"(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)"
r"(HTTPS?)(/)(1\.[01])(\r?\n|$)",
bygroups(
Name.Function,
Text,
Name.Namespace,
Text,
Keyword.Reserved,
Operator,
Number,
Text,
),
"headers",
),
(
r"(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)",
bygroups(
Keyword.Reserved,
Operator,
Number,
Text,
Number,
Text,
Name.Exception,
Text,
),
"headers",
),
],
'headers': [
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
(r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
(r'\r?\n', Text, 'content')
"headers": [
(r"([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)", header_callback),
(r"([\t ]+)([^\r\n]+)(\r?\n|$)", continuous_header_callback),
(r"\r?\n", Text, "content"),
],
'content': [
(r'.+', content_callback)
]
"content": [(r".+", content_callback)],
}
def setup(app):
app.add_domain(HTTPDomain)
try:
get_lexer_by_name('http')
get_lexer_by_name("http")
except ClassNotFound:
app.add_lexer('http', HTTPLexer())
app.add_config_value('http_index_ignore_prefixes', [], None)
app.add_lexer("http", HTTPLexer())
app.add_config_value("http_index_ignore_prefixes", [], None)

View file

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
'''
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
@ -7,23 +7,23 @@
~~~~~~~~~~~~~~
Properly handle ``__func_alias__``
'''
"""
# Import Sphinx libs
from sphinx.ext.autodoc import FunctionDocumenter as FunctionDocumenter
class SaltFunctionDocumenter(FunctionDocumenter):
'''
"""
Simple override of sphinx.ext.autodoc.FunctionDocumenter to properly render
salt's aliased function names.
'''
"""
def format_name(self):
'''
"""
Format the function name
'''
if not hasattr(self.module, '__func_alias__'):
"""
if not hasattr(self.module, "__func_alias__"):
# Resume normal sphinx.ext.autodoc operation
return super(FunctionDocumenter, self).format_name()
@ -46,4 +46,4 @@ def setup(app):
# add_autodocumenter() must be called after the initial setup and the
# 'builder-inited' event, as sphinx.ext.autosummary will restore the
# original documenter on 'builder-inited'
app.connect('env-before-read-docs', add_documenter)
app.connect("env-before-read-docs", add_documenter)

View file

@ -2,70 +2,66 @@ import itertools
import os
import re
import salt
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.statemachine import ViewList
from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.domains import python as python_domain
from sphinx.domains.python import PyObject
from sphinx.locale import _
from sphinx.roles import XRefRole
from sphinx.util.nodes import make_refnode
from sphinx.util.nodes import nested_parse_with_titles
from sphinx.util.nodes import set_source_info
from sphinx.domains import python as python_domain
import salt
from sphinx.util.nodes import make_refnode, nested_parse_with_titles, set_source_info
class Event(PyObject):
'''
"""
Document Salt events
'''
domain = 'salt'
"""
domain = "salt"
class LiterateCoding(Directive):
'''
"""
Auto-doc SLS files using literate-style comment/code separation
'''
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
def parse_file(self, fpath):
'''
"""
Read a file on the file system (relative to salt's base project dir)
:returns: A file-like object.
:raises IOError: If the file cannot be found or read.
'''
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__),
os.pardir))
with open(os.path.join(sdir, fpath), 'rb') as f:
"""
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__), os.pardir))
with open(os.path.join(sdir, fpath), "rb") as f:
return f.readlines()
def parse_lit(self, lines):
'''
"""
Parse a string line-by-line delineating comments and code
:returns: An tuple of boolean/list-of-string pairs. True designates a
comment; False designates code.
'''
comment_char = '#' # TODO: move this into a directive option
comment = re.compile(r'^\s*{0}[ \n]'.format(comment_char))
"""
comment_char = "#" # TODO: move this into a directive option
comment = re.compile(r"^\s*{0}[ \n]".format(comment_char))
section_test = lambda val: bool(comment.match(val))
sections = []
for is_doc, group in itertools.groupby(lines, section_test):
if is_doc:
text = [comment.sub('', i).rstrip('\r\n') for i in group]
text = [comment.sub("", i).rstrip("\r\n") for i in group]
else:
text = [i.rstrip('\r\n') for i in group]
text = [i.rstrip("\r\n") for i in group]
sections.append((is_doc, text))
@ -79,33 +75,33 @@ class LiterateCoding(Directive):
return [document.reporter.warning(str(exc), line=self.lineno)]
node = nodes.container()
node['classes'] = ['lit-container']
node["classes"] = ["lit-container"]
node.document = self.state.document
enum = nodes.enumerated_list()
enum['classes'] = ['lit-docs']
enum["classes"] = ["lit-docs"]
node.append(enum)
# make first list item
list_item = nodes.list_item()
list_item['classes'] = ['lit-item']
list_item["classes"] = ["lit-item"]
for is_doc, line in lines:
if is_doc and line == ['']:
if is_doc and line == [""]:
continue
section = nodes.section()
if is_doc:
section['classes'] = ['lit-annotation']
section["classes"] = ["lit-annotation"]
nested_parse_with_titles(self.state, ViewList(line), section)
else:
section['classes'] = ['lit-content']
section["classes"] = ["lit-content"]
code = '\n'.join(line)
code = "\n".join(line)
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
literal["language"] = "yaml"
set_source_info(self, literal)
section.append(literal)
@ -116,42 +112,41 @@ class LiterateCoding(Directive):
if len(list_item.children) == 2:
enum.append(list_item)
list_item = nodes.list_item()
list_item['classes'] = ['lit-item']
list_item["classes"] = ["lit-item"]
# Non-semantic div for styling
bg = nodes.container()
bg['classes'] = ['lit-background']
bg["classes"] = ["lit-background"]
node.append(bg)
return [node]
class LiterateFormula(LiterateCoding):
'''
"""
Customizations to handle finding and parsing SLS files
'''
"""
def parse_file(self, sls_path):
'''
"""
Given a typical Salt SLS path (e.g.: apache.vhosts.standard), find the
file on the file system and parse it
'''
"""
config = self.state.document.settings.env.config
formulas_dirs = config.formulas_dirs
fpath = sls_path.replace('.', '/')
fpath = sls_path.replace(".", "/")
name_options = (
'{0}.sls'.format(fpath),
os.path.join(fpath, 'init.sls')
)
name_options = ("{0}.sls".format(fpath), os.path.join(fpath, "init.sls"))
paths = [os.path.join(fdir, fname)
for fname in name_options
for fdir in formulas_dirs]
paths = [
os.path.join(fdir, fname)
for fname in name_options
for fdir in formulas_dirs
]
for i in paths:
try:
with open(i, 'rb') as f:
with open(i, "rb") as f:
return f.readlines()
except IOError:
pass
@ -160,7 +155,7 @@ class LiterateFormula(LiterateCoding):
class CurrentFormula(Directive):
domain = 'salt'
domain = "salt"
has_content = False
required_arguments = 1
optional_arguments = 0
@ -170,15 +165,15 @@ class CurrentFormula(Directive):
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
if modname == 'None':
env.temp_data['salt:formula'] = None
if modname == "None":
env.temp_data["salt:formula"] = None
else:
env.temp_data['salt:formula'] = modname
env.temp_data["salt:formula"] = modname
return []
class Formula(Directive):
domain = 'salt'
domain = "salt"
has_content = True
required_arguments = 1
@ -186,30 +181,31 @@ class Formula(Directive):
env = self.state.document.settings.env
formname = self.arguments[0].strip()
env.temp_data['salt:formula'] = formname
env.temp_data["salt:formula"] = formname
if 'noindex' in self.options:
if "noindex" in self.options:
return []
env.domaindata['salt']['formulas'][formname] = (
env.docname,
self.options.get('synopsis', ''),
self.options.get('platform', ''),
'deprecated' in self.options)
env.domaindata["salt"]["formulas"][formname] = (
env.docname,
self.options.get("synopsis", ""),
self.options.get("platform", ""),
"deprecated" in self.options,
)
targetnode = nodes.target('', '', ids=['module-' + formname],
ismod=True)
targetnode = nodes.target("", "", ids=["module-" + formname], ismod=True)
self.state.document.note_explicit_target(targetnode)
indextext = u'{0}-formula)'.format(formname)
inode = addnodes.index(entries=[('single', indextext,
'module-' + formname, '')])
indextext = u"{0}-formula)".format(formname)
inode = addnodes.index(
entries=[("single", indextext, "module-" + formname, "")]
)
return [targetnode, inode]
class State(Directive):
domain = 'salt'
domain = "salt"
has_content = True
required_arguments = 1
@ -217,19 +213,18 @@ class State(Directive):
env = self.state.document.settings.env
statename = self.arguments[0].strip()
if 'noindex' in self.options:
if "noindex" in self.options:
return []
targetnode = nodes.target('', '', ids=['module-' + statename],
ismod=True)
targetnode = nodes.target("", "", ids=["module-" + statename], ismod=True)
self.state.document.note_explicit_target(targetnode)
formula = env.temp_data.get('salt:formula')
formula = env.temp_data.get("salt:formula")
indextext = u'{1} ({0}-formula)'.format(formula, statename)
inode = addnodes.index(entries=[
('single', indextext, 'module-{0}'.format(statename), ''),
])
indextext = u"{1} ({0}-formula)".format(formula, statename)
inode = addnodes.index(
entries=[("single", indextext, "module-{0}".format(statename), ""),]
)
return [targetnode, inode]
@ -239,55 +234,56 @@ class SLSXRefRole(XRefRole):
class SaltModuleIndex(python_domain.PythonModuleIndex):
name = 'modindex'
localname = _('Salt Module Index')
shortname = _('all salt modules')
name = "modindex"
localname = _("Salt Module Index")
shortname = _("all salt modules")
class SaltDomain(python_domain.PythonDomain):
name = 'salt'
label = 'Salt'
name = "salt"
label = "Salt"
data_version = 2
object_types = python_domain.PythonDomain.object_types
object_types.update({
'state': ObjType(_('state'), 'state'),
})
object_types.update(
{"state": ObjType(_("state"), "state"),}
)
directives = python_domain.PythonDomain.directives
directives.update({
'event': Event,
'state': State,
'formula': LiterateFormula,
'currentformula': CurrentFormula,
'saltconfig': LiterateCoding,
})
directives.update(
{
"event": Event,
"state": State,
"formula": LiterateFormula,
"currentformula": CurrentFormula,
"saltconfig": LiterateCoding,
}
)
roles = python_domain.PythonDomain.roles
roles.update({
'formula': SLSXRefRole(),
})
roles.update(
{"formula": SLSXRefRole(),}
)
initial_data = python_domain.PythonDomain.initial_data
initial_data.update({
'formulas': {},
})
initial_data.update(
{"formulas": {},}
)
indices = [
SaltModuleIndex,
]
def resolve_xref(self, env, fromdocname, builder, type, target, node,
contnode):
if type == 'formula' and target in self.data['formulas']:
doc, _, _, _ = self.data['formulas'].get(target, (None, None))
def resolve_xref(self, env, fromdocname, builder, type, target, node, contnode):
if type == "formula" and target in self.data["formulas"]:
doc, _, _, _ = self.data["formulas"].get(target, (None, None))
if doc:
return make_refnode(builder, fromdocname, doc, target,
contnode, target)
return make_refnode(builder, fromdocname, doc, target, contnode, target)
else:
super(SaltDomain, self).resolve_xref(env, fromdocname, builder,
type, target, node, contnode)
super(SaltDomain, self).resolve_xref(
env, fromdocname, builder, type, target, node, contnode
)
# Monkey-patch the Python domain remove the python module index
python_domain.PythonDomain.indices = [SaltModuleIndex]
@ -296,18 +292,34 @@ python_domain.PythonDomain.indices = [SaltModuleIndex]
def setup(app):
app.add_domain(SaltDomain)
formulas_path = 'templates/formulas'
formulas_dir = os.path.join(os.path.abspath(os.path.dirname(salt.__file__)),
formulas_path)
app.add_config_value('formulas_dirs', [formulas_dir], 'env')
formulas_path = "templates/formulas"
formulas_dir = os.path.join(
os.path.abspath(os.path.dirname(salt.__file__)), formulas_path
)
app.add_config_value("formulas_dirs", [formulas_dir], "env")
app.add_crossref_type(directivename="conf_master", rolename="conf_master",
indextemplate="pair: %s; conf/master")
app.add_crossref_type(directivename="conf_minion", rolename="conf_minion",
indextemplate="pair: %s; conf/minion")
app.add_crossref_type(directivename="conf_proxy", rolename="conf_proxy",
indextemplate="pair: %s; conf/proxy")
app.add_crossref_type(directivename="conf_log", rolename="conf_log",
indextemplate="pair: %s; conf/logging")
app.add_crossref_type(directivename="jinja_ref", rolename="jinja_ref",
indextemplate="pair: %s; jinja filters")
app.add_crossref_type(
directivename="conf_master",
rolename="conf_master",
indextemplate="pair: %s; conf/master",
)
app.add_crossref_type(
directivename="conf_minion",
rolename="conf_minion",
indextemplate="pair: %s; conf/minion",
)
app.add_crossref_type(
directivename="conf_proxy",
rolename="conf_proxy",
indextemplate="pair: %s; conf/proxy",
)
app.add_crossref_type(
directivename="conf_log",
rolename="conf_log",
indextemplate="pair: %s; conf/logging",
)
app.add_crossref_type(
directivename="jinja_ref",
rolename="jinja_ref",
indextemplate="pair: %s; jinja filters",
)

View file

@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
'''
"""
saltrepo
~~~~~~~~
SaltStack Repository Sphinx directives
'''
"""
def source_read_handler(app, docname, source):
if '|repo_primary_branch|' in source[0]:
if "|repo_primary_branch|" in source[0]:
source[0] = source[0].replace(
'|repo_primary_branch|',
app.config.html_context['repo_primary_branch']
"|repo_primary_branch|", app.config.html_context["repo_primary_branch"]
)
def setup(app):
app.connect('source-read', source_read_handler)
app.connect("source-read", source_read_handler)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
"version": "builtin",
"parallel_read_safe": True,
"parallel_write_safe": True,
}

View file

@ -1,22 +1,24 @@
'''
"""
Short-URL redirects
'''
"""
import json
import os
import sphinx.ext.intersphinx
DOCS_URL = 'http://docs.saltstack.com/en/latest/'
DOCS_URL = "http://docs.saltstack.com/en/latest/"
def write_urls_index(app, exc):
'''
"""
Generate a JSON file to serve as an index for short-URL lookups
'''
inventory = os.path.join(app.builder.outdir, 'objects.inv')
"""
inventory = os.path.join(app.builder.outdir, "objects.inv")
objects = sphinx.ext.intersphinx.fetch_inventory(app, DOCS_URL, inventory)
with open(os.path.join(app.builder.outdir, 'shorturls.json'), 'w') as f:
with open(os.path.join(app.builder.outdir, "shorturls.json"), "w") as f:
json.dump(objects, f)
def setup(app):
app.connect('build-finished', write_urls_index)
app.connect("build-finished", write_urls_index)

View file

@ -36,8 +36,10 @@
from __future__ import division
import re
from docutils import nodes
from docutils.parsers.rst import directives
try:
from sphinx.util.compat import Directive
except ImportError:
@ -140,7 +142,9 @@ class YouTube(Directive):
aspect = None
width = get_size(self.options, "width")
height = get_size(self.options, "height")
return [youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)]
return [
youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)
]
def setup(app):

View file

@ -1,19 +1,19 @@
# -*- coding: utf-8 -*-
# pylint: disable=C0103,W0622
'''
"""
Sphinx documentation for Salt
'''
import sys
"""
import os
import re
import types
import sys
import time
import types
from sphinx.directives import TocTree
from sphinx.directives.other import TocTree
class Mock(object):
'''
"""
Mock out specified imports.
This allows autodoc to do its thing without having oodles of req'd
@ -22,8 +22,11 @@ class Mock(object):
This Mock class can be configured to return a specific values at specific names, if required.
http://read-the-docs.readthedocs.org/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
'''
def __init__(self, mapping=None, *args, **kwargs): # pylint: disable=unused-argument
"""
def __init__(
self, mapping=None, *args, **kwargs
): # pylint: disable=unused-argument
"""
Mapping allows autodoc to bypass the Mock object, but actually assign
a specific value, expected by a specific attribute returned.
@ -41,9 +44,9 @@ class Mock(object):
def __getattr__(self, name):
if name in self.__mapping:
data = self.__mapping.get(name)
elif name in ('__file__', '__path__'):
data = '/dev/null'
elif name in ('__mro_entries__', '__qualname__'):
elif name in ("__file__", "__path__"):
data = "/dev/null"
elif name in ("__mro_entries__", "__qualname__"):
raise AttributeError("'Mock' object has no attribute '%s'" % (name))
else:
data = Mock(mapping=self.__mapping)
@ -61,7 +64,7 @@ class Mock(object):
def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-argument
'''
"""
Optionally mock a decorator that takes parameters
E.g.:
@ -69,153 +72,144 @@ def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-arg
@blah(stuff=True)
def things():
pass
'''
"""
def inner(fn, *iargs, **ikwargs): # pylint: disable=unused-argument
if hasattr(fn, '__call__'):
if hasattr(fn, "__call__"):
return fn
return Mock()
return inner
MOCK_MODULES = [
# Python stdlib
'user',
"user",
# salt core
'concurrent',
'Crypto',
'Crypto.Signature',
'Crypto.Cipher',
'Crypto.Hash',
'Crypto.PublicKey',
'Crypto.Random',
'Crypto.Signature',
'Crypto.Signature.PKCS1_v1_5',
'M2Crypto',
'msgpack',
'yaml',
'yaml.constructor',
'yaml.nodes',
'yaml.parser',
'yaml.scanner',
'zmq',
'zmq.eventloop',
'zmq.eventloop.ioloop',
"Crypto",
"Crypto.Signature",
"Crypto.Cipher",
"Crypto.Hash",
"Crypto.PublicKey",
"Crypto.Random",
"Crypto.Signature",
"Crypto.Signature.PKCS1_v1_5",
"M2Crypto",
"msgpack",
"yaml",
"yaml.constructor",
"yaml.nodes",
"yaml.parser",
"yaml.scanner",
"zmq",
"zmq.eventloop",
"zmq.eventloop.ioloop",
# third-party libs for cloud modules
'libcloud',
'libcloud.compute',
'libcloud.compute.base',
'libcloud.compute.deployment',
'libcloud.compute.providers',
'libcloud.compute.types',
'libcloud.loadbalancer',
'libcloud.loadbalancer.types',
'libcloud.loadbalancer.providers',
'libcloud.common',
'libcloud.common.google',
"libcloud",
"libcloud.compute",
"libcloud.compute.base",
"libcloud.compute.deployment",
"libcloud.compute.providers",
"libcloud.compute.types",
"libcloud.loadbalancer",
"libcloud.loadbalancer.types",
"libcloud.loadbalancer.providers",
"libcloud.common",
"libcloud.common.google",
# third-party libs for netapi modules
'cherrypy',
'cherrypy.lib',
'cherrypy.process',
'cherrypy.wsgiserver',
'cherrypy.wsgiserver.ssl_builtin',
'tornado',
'tornado.concurrent',
'tornado.escape',
'tornado.gen',
'tornado.httpclient',
'tornado.httpserver',
'tornado.httputil',
'tornado.ioloop',
'tornado.iostream',
'tornado.netutil',
'tornado.simple_httpclient',
'tornado.stack_context',
'tornado.web',
'tornado.websocket',
'tornado.locks',
'ws4py',
'ws4py.server',
'ws4py.server.cherrypyserver',
'ws4py.websocket',
"cherrypy",
"cherrypy.lib",
"cherrypy.process",
"cherrypy.wsgiserver",
"cherrypy.wsgiserver.ssl_builtin",
"tornado",
"tornado.concurrent",
"tornado.escape",
"tornado.gen",
"tornado.httpclient",
"tornado.httpserver",
"tornado.httputil",
"tornado.ioloop",
"tornado.iostream",
"tornado.netutil",
"tornado.simple_httpclient",
"tornado.stack_context",
"tornado.web",
"tornado.websocket",
"tornado.locks",
"ws4py",
"ws4py.server",
"ws4py.server.cherrypyserver",
"ws4py.websocket",
# modules, renderers, states, returners, et al
'ClusterShell',
'ClusterShell.NodeSet',
'MySQLdb',
'MySQLdb.cursors',
'OpenSSL',
'avahi',
'boto.regioninfo',
'concurrent',
'dbus',
'django',
'dns',
'dns.resolver',
'dson',
'hjson',
'jnpr',
'jnpr.junos',
'jnpr.junos.utils',
'jnpr.junos.utils.config',
'jnpr.junos.utils.sw',
'keyring',
'libvirt',
'lxml',
'lxml.etree',
'msgpack',
'nagios_json',
'napalm',
'netaddr',
'netaddr.IPAddress',
'netaddr.core',
'netaddr.core.AddrFormatError',
'ntsecuritycon',
'psutil',
'pycassa',
'pyconnman',
'pyiface',
'pymongo',
'pyroute2',
'pyroute2.ipdb',
'rabbitmq_server',
'redis',
'rpm',
'rpmUtils',
'rpmUtils.arch',
'salt.ext.six.moves.winreg',
'twisted',
'twisted.internet',
'twisted.internet.protocol',
'twisted.internet.protocol.DatagramProtocol',
'win32security',
'yum',
'zfs',
"ClusterShell",
"ClusterShell.NodeSet",
"MySQLdb",
"MySQLdb.cursors",
"OpenSSL",
"avahi",
"boto.regioninfo",
"dbus",
"django",
"dns",
"dns.resolver",
"dson",
"hjson",
"jnpr",
"jnpr.junos",
"jnpr.junos.utils",
"jnpr.junos.utils.config",
"jnpr.junos.utils.sw",
"keyring",
"libvirt",
"lxml",
"lxml.etree",
"msgpack",
"nagios_json",
"napalm",
"netaddr",
"netaddr.IPAddress",
"netaddr.core",
"netaddr.core.AddrFormatError",
"ntsecuritycon",
"psutil",
"pycassa",
"pyconnman",
"pyiface",
"pymongo",
"pyroute2",
"pyroute2.ipdb",
"rabbitmq_server",
"redis",
"rpm",
"rpmUtils",
"rpmUtils.arch",
"salt.ext.six.moves.winreg",
"twisted",
"twisted.internet",
"twisted.internet.protocol",
"twisted.internet.protocol.DatagramProtocol",
"win32security",
"yum",
"zfs",
]
MOCK_MODULES_MAPPING = {
'cherrypy': {'config': mock_decorator_with_params},
'ntsecuritycon': {
'STANDARD_RIGHTS_REQUIRED': 0,
'SYNCHRONIZE': 0,
},
'psutil': {'total': 0}, # Otherwise it will crash Sphinx
"cherrypy": {"config": mock_decorator_with_params},
"ntsecuritycon": {"STANDARD_RIGHTS_REQUIRED": 0, "SYNCHRONIZE": 0,},
"psutil": {"total": 0}, # Otherwise it will crash Sphinx
}
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock(mapping=MOCK_MODULES_MAPPING.get(mod_name))
# Define a fake version attribute for the following libs.
sys.modules['libcloud'].__version__ = '0.0.0'
sys.modules['msgpack'].version = (1, 0, 0)
sys.modules['psutil'].version_info = (3, 0, 0)
sys.modules['pymongo'].version = '0.0.0'
sys.modules['tornado'].version_info = (0, 0, 0)
sys.modules['boto.regioninfo']._load_json_file = {'endpoints': None}
sys.modules["libcloud"].__version__ = "0.0.0"
sys.modules["msgpack"].version = (1, 0, 0)
sys.modules["psutil"].version_info = (3, 0, 0)
sys.modules["pymongo"].version = "0.0.0"
sys.modules["tornado"].version_info = (0, 0, 0)
sys.modules["boto.regioninfo"]._load_json_file = {"endpoints": None}
# -- Add paths to PYTHONPATH ---------------------------------------------------
@ -224,102 +218,114 @@ try:
except NameError:
# sphinx-intl and six execute some code which will raise this NameError
# assume we're in the doc/ directory
docs_basepath = os.path.abspath(os.path.dirname('.'))
docs_basepath = os.path.abspath(os.path.dirname("."))
addtl_paths = (
os.pardir, # salt itself (for autodoc)
'_ext', # custom Sphinx extensions
"_ext", # custom Sphinx extensions
)
for addtl_path in addtl_paths:
sys.path.insert(0, os.path.abspath(os.path.join(docs_basepath, addtl_path)))
# We're now able to import salt
import salt.version
import salt.version # isort:skip
formulas_dir = os.path.join(os.pardir, docs_basepath, 'formulas')
formulas_dir = os.path.join(os.pardir, docs_basepath, "formulas")
# ----- Intersphinx Settings ------------------------------------------------>
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None)
}
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
# <---- Intersphinx Settings -------------------------------------------------
# -- General Configuration -----------------------------------------------------
# Set a var if we're building docs for the live site or not
on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
on_saltstack = "SALT_ON_SALTSTACK" in os.environ
project = 'Salt'
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
project = "Salt"
repo_primary_branch = (
"master" # This is the default branch on GitHub for the Salt project
)
version = salt.version.__version__
latest_release = os.environ.get('LATEST_RELEASE', 'latest_release') # latest release (2019.2.3)
previous_release = os.environ.get('PREVIOUS_RELEASE', 'previous_release') # latest release from previous branch (2018.3.5)
previous_release_dir = os.environ.get('PREVIOUS_RELEASE_DIR', 'previous_release_dir') # path on web server for previous branch (2018.3)
next_release = '' # next release
next_release_dir = '' # path on web server for next release branch
latest_release = os.environ.get(
"LATEST_RELEASE", "latest_release"
) # latest release (2019.2.3)
previous_release = os.environ.get(
"PREVIOUS_RELEASE", "previous_release"
) # latest release from previous branch (2018.3.5)
previous_release_dir = os.environ.get(
"PREVIOUS_RELEASE_DIR", "previous_release_dir"
) # path on web server for previous branch (2018.3)
next_release = "" # next release
next_release_dir = "" # path on web server for next release branch
today = ''
copyright = ''
today = ""
copyright = ""
if on_saltstack:
today = "Generated on " + time.strftime("%B %d, %Y") + " at " + time.strftime("%X %Z") + "."
today = (
"Generated on "
+ time.strftime("%B %d, %Y")
+ " at "
+ time.strftime("%X %Z")
+ "."
)
copyright = time.strftime("%Y")
# < --- START do not merge these settings to other branches START ---> #
build_type = os.environ.get('BUILD_TYPE', repo_primary_branch) # latest, previous, master, next
build_type = os.environ.get(
"BUILD_TYPE", repo_primary_branch
) # latest, previous, master, next
# < --- END do not merge these settings to other branches END ---> #
# Set google custom search engine
if build_type == repo_primary_branch:
release = latest_release
search_cx = '011515552685726825874:v1had6i279q' # master
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == 'next':
search_cx = "011515552685726825874:v1had6i279q" # master
# search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == "next":
release = next_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif build_type == 'previous':
search_cx = "011515552685726825874:ht0p8miksrm" # latest
elif build_type == "previous":
release = previous_release
if release.startswith('3000'):
search_cx = '011515552685726825874:3skhaozjtyn' # 3000
elif release.startswith('2019.2'):
search_cx = '011515552685726825874:huvjhlpptnm' # 2019.2
elif release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
if release.startswith("3000"):
search_cx = "011515552685726825874:3skhaozjtyn" # 3000
elif release.startswith("2019.2"):
search_cx = "011515552685726825874:huvjhlpptnm" # 2019.2
elif release.startswith("2018.3"):
search_cx = "011515552685726825874:vadptdpvyyu" # 2018.3
elif release.startswith("2017.7"):
search_cx = "011515552685726825874:w-hxmnbcpou" # 2017.7
elif release.startswith("2016.11"):
search_cx = "011515552685726825874:dlsj745pvhq" # 2016.11
else:
search_cx = '011515552685726825874:ht0p8miksrm' # latest
else: # latest or something else
search_cx = "011515552685726825874:ht0p8miksrm" # latest
else: # latest or something else
release = latest_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
search_cx = "011515552685726825874:ht0p8miksrm" # latest
needs_sphinx = '1.3'
needs_sphinx = "1.3"
spelling_lang = 'en_US'
language = 'en'
spelling_lang = "en_US"
language = "en"
locale_dirs = [
'_locale',
"_locale",
]
master_doc = 'contents'
templates_path = ['_templates']
exclude_patterns = ['_build', '_incl/*', 'ref/cli/_includes/*.rst']
master_doc = "contents"
templates_path = ["_templates"]
exclude_patterns = ["_build", "_incl/*", "ref/cli/_includes/*.rst"]
extensions = [
'saltdomain', # Must come early
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'httpdomain',
'youtube',
'saltrepo'
"saltdomain", # Must come early
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.autosummary",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"httpdomain",
"youtube",
"saltrepo"
#'saltautodoc', # Must be AFTER autodoc
#'shorturls',
]
@ -329,14 +335,14 @@ try:
except ImportError:
pass
else:
extensions += ['sphinxcontrib.spelling']
extensions += ["sphinxcontrib.spelling"]
modindex_common_prefix = ['salt.']
modindex_common_prefix = ["salt."]
autosummary_generate = True
# strip git rev as there won't necessarily be a release based on it
stripped_release = re.sub(r'-\d+-g[0-9a-f]+$', '', release)
stripped_release = re.sub(r"-\d+-g[0-9a-f]+$", "", release)
# Define a substitution for linking to the latest release tarball
rst_prolog = """\
@ -374,87 +380,91 @@ rst_prolog = """\
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
| <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
""".format(release=stripped_release)
""".format(
release=stripped_release
)
# A shortcut for linking to tickets on the GitHub issue tracker
extlinks = {
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
"blob": (
"https://github.com/saltstack/salt/blob/%s/%%s" % repo_primary_branch,
None,
),
"issue": ("https://github.com/saltstack/salt/issues/%s", "issue #"),
"pull": ("https://github.com/saltstack/salt/pull/%s", "PR #"),
"formula_url": ("https://github.com/saltstack-formulas/%s", ""),
}
# ----- Localization -------------------------------------------------------->
locale_dirs = ['locale/']
locale_dirs = ["locale/"]
gettext_compact = False
# <---- Localization ---------------------------------------------------------
### HTML options
# set 'HTML_THEME=saltstack' to use previous theme
html_theme = os.environ.get('HTML_THEME', 'saltstack2')
html_theme_path = ['_themes']
html_title = u''
html_short_title = 'Salt'
html_theme = os.environ.get("HTML_THEME", "saltstack2")
html_theme_path = ["_themes"]
html_title = u""
html_short_title = "Salt"
html_static_path = ['_static']
html_logo = None # specified in the theme layout.html
html_favicon = 'favicon.ico'
html_static_path = ["_static"]
html_logo = None # specified in the theme layout.html
html_favicon = "favicon.ico"
smartquotes = False
# Use Google customized search or use Sphinx built-in JavaScript search
if on_saltstack:
html_search_template = 'googlesearch.html'
html_search_template = "googlesearch.html"
else:
html_search_template = 'searchbox.html'
html_search_template = "searchbox.html"
html_additional_pages = {
'404': '404.html',
"404": "404.html",
}
html_default_sidebars = [
html_search_template,
'version.html',
'localtoc.html',
'relations.html',
'sourcelink.html',
'saltstack.html',
"version.html",
"localtoc.html",
"relations.html",
"sourcelink.html",
"saltstack.html",
]
html_sidebars = {
'ref/**/all/salt.*': [
"ref/**/all/salt.*": [
html_search_template,
'version.html',
'modules-sidebar.html',
'localtoc.html',
'relations.html',
'sourcelink.html',
'saltstack.html',
],
'ref/formula/all/*': [
"version.html",
"modules-sidebar.html",
"localtoc.html",
"relations.html",
"sourcelink.html",
"saltstack.html",
],
"ref/formula/all/*": [],
}
html_context = {
'on_saltstack': on_saltstack,
'html_default_sidebars': html_default_sidebars,
'github_base': 'https://github.com/saltstack/salt',
'github_issues': 'https://github.com/saltstack/salt/issues',
'github_downloads': 'https://github.com/saltstack/salt/downloads',
'latest_release': latest_release,
'previous_release': previous_release,
'previous_release_dir': previous_release_dir,
'next_release': next_release,
'next_release_dir': next_release_dir,
'search_cx': search_cx,
'build_type': build_type,
'today': today,
'copyright': copyright,
'repo_primary_branch': repo_primary_branch
"on_saltstack": on_saltstack,
"html_default_sidebars": html_default_sidebars,
"github_base": "https://github.com/saltstack/salt",
"github_issues": "https://github.com/saltstack/salt/issues",
"github_downloads": "https://github.com/saltstack/salt/downloads",
"latest_release": latest_release,
"previous_release": previous_release,
"previous_release_dir": previous_release_dir,
"next_release": next_release,
"next_release_dir": next_release_dir,
"search_cx": search_cx,
"build_type": build_type,
"today": today,
"copyright": copyright,
"repo_primary_branch": repo_primary_branch,
}
html_use_index = True
html_last_updated_fmt = '%b %d, %Y'
html_last_updated_fmt = "%b %d, %Y"
html_show_sourcelink = False
html_show_sphinx = True
html_show_copyright = True
@ -462,20 +472,20 @@ html_show_copyright = True
### Latex options
latex_documents = [
('contents', 'Salt.tex', 'Salt Documentation', 'SaltStack, Inc.', 'manual'),
("contents", "Salt.tex", "Salt Documentation", "SaltStack, Inc.", "manual"),
]
latex_logo = '_static/salt-logo.png'
latex_logo = "_static/salt-logo.png"
latex_elements = {
'inputenc': '', # use XeTeX instead of the inputenc LaTeX package.
'utf8extra': '',
'preamble': r'''
"inputenc": "", # use XeTeX instead of the inputenc LaTeX package.
"utf8extra": "",
"preamble": r"""
\usepackage{fontspec}
\setsansfont{Linux Biolinum O}
\setromanfont{Linux Libertine O}
\setmonofont{Source Code Pro}
''',
""",
}
### Linux Biolinum, Linux Libertine: http://www.linuxlibertine.org/
### Source Code Pro: https://github.com/adobe-fonts/source-code-pro/releases
@ -483,34 +493,34 @@ latex_elements = {
### Linkcheck options
linkcheck_ignore = [
r'http://127.0.0.1',
r'http://salt:\d+',
r'http://local:\d+',
r'https://console.aws.amazon.com',
r'http://192.168.33.10',
r'http://domain:\d+',
r'http://123.456.789.012:\d+',
r'http://localhost',
r'https://groups.google.com/forum/#!forum/salt-users',
r'http://logstash.net/docs/latest/inputs/udp',
r'http://logstash.net/docs/latest/inputs/zeromq',
r'http://www.youtube.com/saltstack',
r'https://raven.readthedocs.io',
r'https://getsentry.com',
r'https://salt-cloud.readthedocs.io',
r'https://salt.readthedocs.io',
r'http://www.pip-installer.org/',
r'http://www.windowsazure.com/',
r'https://github.com/watching',
r'dash-feed://',
r'https://github.com/saltstack/salt/',
r'http://bootstrap.saltstack.org',
r'https://bootstrap.saltstack.com',
r'https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh',
r'media.readthedocs.org/dash/salt/latest/salt.xml',
r'https://portal.aws.amazon.com/gp/aws/securityCredentials',
r'https://help.github.com/articles/fork-a-repo',
r'dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml',
r"http://127.0.0.1",
r"http://salt:\d+",
r"http://local:\d+",
r"https://console.aws.amazon.com",
r"http://192.168.33.10",
r"http://domain:\d+",
r"http://123.456.789.012:\d+",
r"http://localhost",
r"https://groups.google.com/forum/#!forum/salt-users",
r"http://logstash.net/docs/latest/inputs/udp",
r"http://logstash.net/docs/latest/inputs/zeromq",
r"http://www.youtube.com/saltstack",
r"https://raven.readthedocs.io",
r"https://getsentry.com",
r"https://salt-cloud.readthedocs.io",
r"https://salt.readthedocs.io",
r"http://www.pip-installer.org/",
r"http://www.windowsazure.com/",
r"https://github.com/watching",
r"dash-feed://",
r"https://github.com/saltstack/salt/",
r"http://bootstrap.saltstack.org",
r"https://bootstrap.saltstack.com",
r"https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh",
r"media.readthedocs.org/dash/salt/latest/salt.xml",
r"https://portal.aws.amazon.com/gp/aws/securityCredentials",
r"https://help.github.com/articles/fork-a-repo",
r"dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml",
]
linkcheck_anchors = False
@ -519,53 +529,53 @@ linkcheck_anchors = False
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
authors = [
'Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file',
"Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file",
]
man_pages = [
('contents', 'salt', 'Salt Documentation', authors, 7),
('ref/cli/salt', 'salt', 'salt', authors, 1),
('ref/cli/salt-master', 'salt-master', 'salt-master Documentation', authors, 1),
('ref/cli/salt-minion', 'salt-minion', 'salt-minion Documentation', authors, 1),
('ref/cli/salt-key', 'salt-key', 'salt-key Documentation', authors, 1),
('ref/cli/salt-cp', 'salt-cp', 'salt-cp Documentation', authors, 1),
('ref/cli/salt-call', 'salt-call', 'salt-call Documentation', authors, 1),
('ref/cli/salt-proxy', 'salt-proxy', 'salt-proxy Documentation', authors, 1),
('ref/cli/salt-syndic', 'salt-syndic', 'salt-syndic Documentation', authors, 1),
('ref/cli/salt-run', 'salt-run', 'salt-run Documentation', authors, 1),
('ref/cli/salt-ssh', 'salt-ssh', 'salt-ssh Documentation', authors, 1),
('ref/cli/salt-cloud', 'salt-cloud', 'Salt Cloud Command', authors, 1),
('ref/cli/salt-api', 'salt-api', 'salt-api Command', authors, 1),
('ref/cli/salt-unity', 'salt-unity', 'salt-unity Command', authors, 1),
('ref/cli/spm', 'spm', 'Salt Package Manager Command', authors, 1),
("contents", "salt", "Salt Documentation", authors, 7),
("ref/cli/salt", "salt", "salt", authors, 1),
("ref/cli/salt-master", "salt-master", "salt-master Documentation", authors, 1),
("ref/cli/salt-minion", "salt-minion", "salt-minion Documentation", authors, 1),
("ref/cli/salt-key", "salt-key", "salt-key Documentation", authors, 1),
("ref/cli/salt-cp", "salt-cp", "salt-cp Documentation", authors, 1),
("ref/cli/salt-call", "salt-call", "salt-call Documentation", authors, 1),
("ref/cli/salt-proxy", "salt-proxy", "salt-proxy Documentation", authors, 1),
("ref/cli/salt-syndic", "salt-syndic", "salt-syndic Documentation", authors, 1),
("ref/cli/salt-run", "salt-run", "salt-run Documentation", authors, 1),
("ref/cli/salt-ssh", "salt-ssh", "salt-ssh Documentation", authors, 1),
("ref/cli/salt-cloud", "salt-cloud", "Salt Cloud Command", authors, 1),
("ref/cli/salt-api", "salt-api", "salt-api Command", authors, 1),
("ref/cli/salt-unity", "salt-unity", "salt-unity Command", authors, 1),
("ref/cli/spm", "spm", "Salt Package Manager Command", authors, 1),
]
### epub options
epub_title = 'Salt Documentation'
epub_author = 'SaltStack, Inc.'
epub_title = "Salt Documentation"
epub_author = "SaltStack, Inc."
epub_publisher = epub_author
epub_copyright = copyright
epub_scheme = 'URL'
epub_identifier = 'http://saltstack.com/'
epub_scheme = "URL"
epub_identifier = "http://saltstack.com/"
epub_tocdup = False
#epub_tocdepth = 3
# epub_tocdepth = 3
def skip_mod_init_member(app, what, name, obj, skip, options):
# pylint: disable=too-many-arguments,unused-argument
if name.startswith('_'):
if name.startswith("_"):
return True
if isinstance(obj, types.FunctionType) and obj.__name__ == 'mod_init':
if isinstance(obj, types.FunctionType) and obj.__name__ == "mod_init":
return True
return False
def _normalize_version(args):
_, path = args
return '.'.join([x.zfill(4) for x in (path.split('/')[-1].split('.'))])
return ".".join([x.zfill(4) for x in (path.split("/")[-1].split("."))])
class ReleasesTree(TocTree):
@ -573,12 +583,12 @@ class ReleasesTree(TocTree):
def run(self):
rst = super(ReleasesTree, self).run()
entries = rst[0][0]['entries'][:]
entries = rst[0][0]["entries"][:]
entries.sort(key=_normalize_version, reverse=True)
rst[0][0]['entries'][:] = entries
rst[0][0]["entries"][:] = entries
return rst
def setup(app):
app.add_directive('releasestree', ReleasesTree)
app.connect('autodoc-skip-member', skip_mod_init_member)
app.add_directive("releasestree", ReleasesTree)
app.connect("autodoc-skip-member", skip_mod_init_member)

View file

@ -104,7 +104,7 @@ Glossary
or stored externally.
Job ID
A unique identifier to represent a given :term:`job`. This is often
A unique identifier to represent a given :term:`job <Job>`. This is often
shortened to JID.
Low State
@ -227,7 +227,7 @@ Glossary
Contains a set of :term:`state declarations <State Declaration>`.
State Compiler
Translates :term:`highdata` into lowdata.
Translates :term:`highdata <Highdata>` into lowdata.
State Declaration
A data structure which contains a unique ID and describes one or more

View file

@ -11,16 +11,18 @@ beacon modules
:template: autosummary.rst.tmpl
adb
aix_account
avahi_announce
bonjour_announce
btmp
cert_info
diskusage
glxinfo
haproxy
inotify
journald
load
log
log_beacon
memusage
napalm_beacon
network_info
@ -32,7 +34,10 @@ beacon modules
sensehat
service
sh
smartos_imgadm
smartos_vmadm
status
telegram_bot_msg
twilio_txt_msg
watchdog
wtmp

View file

@ -0,0 +1,5 @@
salt.beacons.aix_account module
===============================
.. automodule:: salt.beacons.aix_account
:members:

View file

@ -0,0 +1,6 @@
======================
salt.beacons.cert_info
======================
.. automodule:: salt.beacons.cert_info
:members:

View file

@ -0,0 +1,5 @@
salt.beacons.smartos_imgadm module
==================================
.. automodule:: salt.beacons.smartos_imgadm
:members:

View file

@ -0,0 +1,5 @@
salt.beacons.smartos_vmadm module
=================================
.. automodule:: salt.beacons.smartos_vmadm
:members:

View file

@ -0,0 +1,5 @@
salt.beacons.watchdog module
============================
.. automodule:: salt.beacons.watchdog
:members:

View file

@ -2,6 +2,12 @@
Command Line Reference
======================
salt-api
========
.. toctree::
salt-api
salt-call
=========
.. toctree::
@ -80,12 +86,6 @@ salt-unity
salt-unity
salt-api
========
.. toctree::
salt-api
spm
===
.. toctree::

View file

@ -105,6 +105,14 @@ Options
Pass a JID to be used instead of generating one.
.. option:: --pre-flight
Run the ssh_pre_flight script defined in the roster.
By default this script will only run if the thin dir
does not exist on the target minion. This option will
force the script to run regardless of the thin dir
existing or not.
Authentication Options
----------------------

View file

@ -12,6 +12,7 @@ cloud modules
aliyun
azurearm
clc
cloudstack
digitalocean
dimensiondata
@ -19,12 +20,14 @@ cloud modules
gce
gogrid
joyent
libvirt
linode
lxc
msazure
oneandone
opennebula
openstack
packet
parallels
profitbricks
proxmox

View file

@ -0,0 +1,6 @@
=====================
salt.cloud.clouds.clc
=====================
.. automodule:: salt.cloud.clouds.clc
:members:

View file

@ -0,0 +1,6 @@
=========================
salt.cloud.clouds.libvirt
=========================
.. automodule:: salt.cloud.clouds.libvirt
:members:

View file

@ -0,0 +1,6 @@
========================
salt.cloud.clouds.packet
========================
.. automodule:: salt.cloud.clouds.packet
:members:

View file

@ -5,8 +5,8 @@ Configuring Salt
================
Salt configuration is very simple. The default configuration for the
:term:`master` will work for most installations and the only requirement for
setting up a :term:`minion` is to set the location of the master in the minion
:term:`master <Master>` will work for most installations and the only requirement for
setting up a :term:`minion <Minion>` is to set the location of the master in the minion
configuration file.
The configuration files will be installed to :file:`/etc/salt` and are named

View file

@ -484,6 +484,22 @@ grains for the master.
enable_gpu_grains: True
.. conf_master:: skip_grains
``skip_grains``
---------------------
Default: ``False``
MasterMinions should omit grains. A MasterMinion is "a minion function object
for generic use on the master" that omit pillar. A RunnerClient creates a
MasterMinion omitting states and renderer. Setting to True can improve master
performance.
.. code-block:: yaml
skip_grains: True
.. conf_master:: job_cache
``job_cache``
@ -1341,6 +1357,15 @@ salt-ssh.
groupA: minion1,minion2
groupB: minion1,minion3
.. conf_master:: ssh_run_pre_flight
Default: False
Run the ssh_pre_flight script defined in the salt-ssh roster. By default
the script will only run when the thin dir does not exist on the targeted
minion. This will force the script to run and not check if the thin dir
exists first.
.. conf_master:: thin_extra_mods
``thin_extra_mods``
@ -4008,7 +4033,7 @@ ext_pillar keys to override those from :conf_master:`pillar_roots`.
ext_pillar_first: False
.. conf_minion:: pillarenv_from_saltenv
.. conf_master:: pillarenv_from_saltenv
``pillarenv_from_saltenv``
--------------------------

View file

@ -710,7 +710,7 @@ This directory may contain sensitive data and should be protected accordingly.
cachedir: /var/cache/salt/minion
.. conf_master:: color_theme
.. conf_minion:: color_theme
``color_theme``
---------------
@ -831,12 +831,28 @@ Default: ``False``
The minion can locally cache grain data instead of refreshing the data
each time the grain is referenced. By default this feature is disabled,
to enable set grains_cache to ``True``.
to enable set ``grains_cache`` to ``True``.
.. code-block:: yaml
grains_cache: False
.. conf_minion:: grains_cache_expiration
``grains_cache_expiration``
---------------------------
Default: ``300``
Grains cache expiration, in seconds. If the cache file is older than this number
of seconds then the grains cache will be dumped and fully re-populated with
fresh data. Defaults to 5 minutes. Will have no effect if
:conf_minion:`grains_cache` is not enabled.
.. code-block:: yaml
grains_cache_expiration: 300
.. conf_minion:: grains_deep_merge
``grains_deep_merge``
@ -2207,6 +2223,9 @@ auto-loading modules when states run, set this value to ``False``.
.. conf_minion:: clean_dynamic_modules
``clean_dynamic_modules``
-------------------------
Default: ``True``
clean_dynamic_modules keeps the dynamic modules on the minion in sync with
@ -2700,7 +2719,7 @@ minion to clean the keys.
Default: ``''``
Fingerprint of the master public key to validate the identity of your Salt master
before the initial key exchange. The master fingerprint can be found by running
before the initial key exchange. The master fingerprint can be found as ``master.pub`` by running
"salt-key -F master" on the Salt master.
.. code-block:: yaml

View file

@ -15,11 +15,13 @@ engine modules
http_logstash
ircbot
junos_syslog
libvirt_events
logentries
logstash_engine
napalm_syslog
reactor
redis_sentinel
script
slack
sqs_events
stalekey

View file

@ -0,0 +1,6 @@
salt.engines.libvirt_events module
==================================
.. automodule:: salt.engines.libvirt_events
:members:
:undoc-members:

View file

@ -0,0 +1,6 @@
salt.engines.script module
==========================
.. automodule:: salt.engines.script
:members:
:undoc-members:

Some files were not shown because too many files have changed in this diff Show more