Merge branch 'master' into docs/fix-links-to-serializers

This commit is contained in:
Frode Gundersen 2020-04-14 09:39:08 -06:00 committed by GitHub
commit a6ae9a8dce
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2792 changed files with 429696 additions and 366061 deletions

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runDocs(
env: env)

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'amazon',
distro_version: '1',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 7,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'amazon',
distro_version: '2',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'arch',
distro_version: 'lts',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '-n integration.modules.test_pkg',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '6',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 0,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
kitchen_platforms_file: '/var/jenkins/workspace/nox-cloud-platforms.yml',
nox_env_name: 'runtests-cloud',
nox_passthrough_opts: '',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
extra_codecov_flags: ["proxy"],
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 0,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'debian',
distro_version: '8',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'debian',
distro_version: '9',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'fedora',
distro_version: '30',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'highsierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'mojave',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(

View file

@ -1,20 +0,0 @@
@Library('salt@master-1.6') _
// Pre-nox pipeline
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'sierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
// Pre-nox pipeline
runTestSuite(

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'opensuse',
distro_version: '15',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,17 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
extra_codecov_flags: ["proxy"],
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1804',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'windows',
distro_version: '2016',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
testrun_timeout: 8,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,16 +0,0 @@
@Library('salt@master-1.6') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'windows',
distro_version: '2019',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
testrun_timeout: 8,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,

View file

@ -1,4 +1,4 @@
@Library('salt@master-1.6') _
@Library('salt@master-1.7') _
runLint(
env: env)

6
.ci/pre-commit Normal file
View file

@ -0,0 +1,6 @@
@Library('salt@master-1.7') _
runPreCommit(
env: env)
// vim: ft=groovy

View file

@ -2,21 +2,24 @@ codecov:
ci:
- drone.saltstack.com
- jenkinsci.saltstack.com
# max_report_age: 24 # The age you want coverage reports to expire at, or if you
# # want to disable this check. Expired reports will not be processed by codecov.
# require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
branch: master
notify: off # Disable Notifications
# notify:
# require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
# after_n_builds: 46 # Only notify after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
# wait_for_ci: yes # Should Codecov wait for all CI statuses to complete before sending ours.
# # Note: Codecov considers all non-codecov statues to be CI statuses
# Disable Notifications
notify: off
ignore:
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
coverage:
round: up
@ -27,7 +30,7 @@ coverage:
project: # measuring the overall project coverage
default: false # disable the default status that measures entire project
salt: # declare a new status context "salt"
enabled: yes # must be yes|true to enable this status
enabled: no # must be yes|true to enable this status
paths: "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
@ -36,7 +39,7 @@ coverage:
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
tests: # declare a new status context "tests"
enabled: yes # must be yes|true to enable this status
enabled: no # must be yes|true to enable this status
#target: 100% # we always want 100% coverage here
target: auto # auto while we get this going
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
@ -50,7 +53,7 @@ coverage:
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
enabled: yes # must be yes|true to enable this status
enabled: no # must be yes|true to enable this status
target: 100% # Newly added lines must have 100% coverage
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
@ -59,7 +62,7 @@ coverage:
changes: # if there are any unexpected changes in coverage
default:
enabled: yes # must be yes|true to enable this status
enabled: no # must be yes|true to enable this status
if_no_uploads: error
if_not_found: success
if_ci_failed: error
@ -68,9 +71,11 @@ flags:
salt:
paths:
- salt/
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
tests:
paths:
- tests/
carryforward: true
#comment:
# layout: "reach, diff, flags, files"

View file

@ -0,0 +1,2 @@
# Blacken Salt
0b2a5613b345f17339cb90e60b407199b3d26980

35
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,35 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: bug
assignees: ''
---
**Description**
A clear and concise description of what the bug is.
**Setup**
(Please provide relevant configs and/or SLS files (be sure to remove sensitive info).
**Steps to Reproduce the behavior**
(Include debug logs if possible and relevant)
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Versions Report**
<details><summary>salt --versions-report</summary>
(Provided by running salt --versions-report. Please also mention any differences in master/minion versions.)
```
PASTE HERE
```
</details>
**Additional context**
Add any other context about the problem here.

14
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,14 @@
blank_issues_enabled: true
contact_links:
- name: Salt Community Slack
url: https://saltstackcommunity.slack.com/
about: Please ask and answer questions here.
- name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users
about: Please ask and answer questions here.
- name: Salt on Freenode
url: http://webchat.freenode.net/?channels=salt&uio=Mj10cnVlJjk9dHJ1ZSYxMD10cnVl83
about: Please ask and answer questions here.
- name: Security vulnerabilities
email: security@saltstack.com
about: Please report security vulnerabilities here.

View file

@ -0,0 +1,23 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE REQUEST]"
labels: Feature
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
**Please Note**
If this feature request would be considered a substantial change or addition, this should go through a SEP process here https://github.com/saltstack/salt-enhancement-proposals, instead of a feature request.

View file

@ -1,6 +1,7 @@
### What does this PR do?
### What issues does this PR fix or reference?
Fixes:
### Previous Behavior
Remove this section if not relevant
@ -8,16 +9,16 @@ Remove this section if not relevant
### New Behavior
Remove this section if not relevant
### Tests written?
### Merge requirements satisfied?
**[NOTICE] Bug fixes or features added to Salt require tests.**
Please review the [test documentation](https://docs.saltstack.com/en/latest/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite.
Yes/No
<!-- Please review the [test documentation](https://docs.saltstack.com/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
- [ ] Docs
- [ ] Changelog
- [ ] Tests written/updated
### Commits signed with GPG?
Yes/No
Please review [Salt's Contributing Guide](https://docs.saltstack.com/en/latest/topics/development/contributing.html) for best practices.
Please review [Salt's Contributing Guide](https://docs.saltstack.com/en/master/topics/development/contributing.html) for best practices.
See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.

6
.github/stale.yml vendored
View file

@ -2,10 +2,10 @@
# Number of days of inactivity before an issue becomes stale
# 600 is approximately 1 year and 8 months
daysUntilStale: 30
daysUntilStale: 90
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
daysUntilClose: false
# Issues with these labels will never be considered stale
exemptLabels:
@ -17,6 +17,7 @@ exemptLabels:
- Bug
- Feature
- Test Failure
-ZD
# Label to use when marking an issue as stale
staleLabel: stale
@ -36,4 +37,3 @@ closeComment: false
# Limit to only `issues` or `pulls`
only: issues

1
.gitignore vendored
View file

@ -114,3 +114,4 @@ kitchen.local.yml
.bundle/
Gemfile.lock
/artifacts/
requirements/static/py*/*.log

View file

@ -1,120 +1,11 @@
default_language_version:
python: python3
exclude: ^(doc/_static/.*|doc/_themes/.*)$
repos:
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
rev: master
hooks:
- id: pip-tools-compile
alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=2.7
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-darwin-py2.7-zmq-requirements
name: Darwin Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=2.7
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py2.7-zmq-requirements
name: Windows Py2.7 ZeroMQ Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=2.7
- --platform=windows
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-cloud-py2.7-requirements
name: Cloud Py2.7 Requirements
files: ^requirements/(static/cloud\.in)$
args:
- -v
- --py-version=2.7
- id: pip-tools-compile
alias: compile-linux-crypto-py2.7-requirements
name: Linux Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=linux
- --out-prefix=linux
- id: pip-tools-compile
alias: compile-darwin-crypto-py2.7-requirements
name: Darwin Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=darwin
- --out-prefix=darwin
- id: pip-tools-compile
alias: compile-windows-crypto-py2.7-requirements
name: Windows Py2.7 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=2.7
- --platform=windows
- --out-prefix=windows
- id: pip-tools-compile
alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.4
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- id: pip-tools-compile
alias: compile-cloud-py3.4-requirements
name: Cloud Py3.4 Requirements
files: ^requirements/(static/cloud\.in)$
args:
- -v
- --py-version=3.4
- id: pip-tools-compile
alias: compile-linux-crypto-py3.4-requirements
name: Linux Py3.4 Crypto Requirements
files: ^requirements/(crypto\.txt|static/crypto\.in)$
args:
- -v
- --py-version=3.4
- --platform=linux
- --out-prefix=linux
- id: pip-tools-compile
alias: compile-linux-py3.5-zmq-requirements
@ -132,13 +23,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.5-zmq-requirements
name: Darwin Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.5
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -231,13 +123,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.6-zmq-requirements
name: Darwin Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.6
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -330,13 +223,14 @@ repos:
- id: pip-tools-compile
alias: compile-darwin-py3.7-zmq-requirements
name: Darwin Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
files: ^(pkg/osx/(req|req_ext|req_pyobjc)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.7
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=pkg/osx/req_pyobjc.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -412,6 +306,32 @@ repos:
- --py-version=3.7
- --platform=linux
- repo: https://github.com/timothycrosley/isort
rev: '1e78a9acf3110e1f9721feb591f89a451fc9876a'
hooks:
- id: isort
additional_dependencies: ['toml']
# This tells pre-commit not to pass files to isort.
# This should be kept in sync with pyproject.toml
exclude: >
(?x)^(
templates/.*|
salt/ext/.*|
tests/kitchen/.*
)$
- repo: https://github.com/psf/black
rev: stable
hooks:
- id: black
# This tells pre-commit not to pass files to black.
# This should be kept in sync with pyproject.toml
exclude: >
(?x)^(
templates/.*|
salt/ext/.*|
tests/kitchen/.*
)$
- repo: https://github.com/saltstack/salt-nox-pre-commit
rev: master

View file

@ -8,11 +8,11 @@ extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,
ext
ext,
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
ignore-patterns=salt.ext.*
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
@ -77,6 +77,7 @@ disable=R,
no-member,
unsubscriptable-object,
un-indexed-curly-braces-error,
whitespace-before-colon,
indentation-is-not-a-multiple-of-four-comment,
blacklisted-name,
invalid-name,
@ -126,6 +127,7 @@ disable=R,
str-format-in-logging,
import-outside-toplevel,
deprecated-method,
repr-flag-used-in-string,
keyword-arg-before-vararg
# Enable the message, report, category or checker with the given id(s). You can
@ -396,6 +398,7 @@ init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,
salt.ext.six.moves,
past.builtins,
future.builtins,
builtins,
@ -472,7 +475,9 @@ ignored-classes=SQLObject
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
ignored-modules=salt.ext.six.moves,
six.moves,
_MovedItems,
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
@ -511,7 +516,7 @@ min-similarity-lines=4
fileperms-default=0644
# File paths to ignore file permission. Glob patterns allowed.
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py,tests/unit/files/rosters/ansible/roster.py
[MODERNIZE]

View file

@ -15,7 +15,9 @@ Versions are `MAJOR.PATCH`.
### Changed
### Fixed
- [#56325](https://github.com/saltstack/salt/pull/56325) - Fix hyperlinks to `salt.serializers` and other documentation issues - [@myii](https://github.com/myii)
- [#56237](https://github.com/saltstack/salt/pull/56237) - Fix alphabetical ordering and remove duplicates across all documentation indexes - [@myii](https://github.com/myii)
- [#56082](https://github.com/saltstack/salt/pull/56082) - Fix saltversioninfo grain for new version
- [#56143](https://github.com/saltstack/salt/pull/56143) - Use encoding when caching pillar data
- [#56172](https://github.com/saltstack/salt/pull/56172) - Only change mine data if using new allow_tgt feature
@ -41,6 +43,10 @@ Versions are `MAJOR.PATCH`.
- [#56310](https://github.com/saltstack/salt/pull/56310) - Only process ADMX files when loading policies
- [#56327](https://github.com/saltstack/salt/pull/56327) - keep cache_copied_files variable a list
- [#56360](https://github.com/saltstack/salt/pull/56360) - dont require virtualenv.virtualenv_version call, removed in 20.0.10
- [#56378](https://github.com/saltstack/salt/pull/56378) - Include _version.py if building wheel
- [#56376](https://github.com/saltstack/salt/pull/56376) - Fix win deps
- [#56418](https://github.com/saltstack/salt/pull/56418) - Ensure version.py included before we install
- [#56435](https://github.com/saltstack/salt/pull/56435) - Update mac build scripts
### Added

View file

@ -506,6 +506,12 @@
# Boolean to run command via sudo.
#ssh_sudo: False
# Boolean to run ssh_pre_flight script defined in roster. By default
# the script will only run if the thin_dir does not exist on the targeted
# minion. This forces the script to run regardless of the thin dir existing
# or not.
#ssh_run_pre_flight: True
# Number of seconds to wait for a response when establishing an SSH connection.
#ssh_timeout: 60

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
@ -8,54 +8,49 @@
~~~~~~~~~~~~~~~~~~~~~~
Setup the Transifex client configuration file
'''
"""
import getpass
# Import python libs
import os
import sys
import getpass
import ConfigParser
HOST = 'https://www.transifex.com'
HOST = "https://www.transifex.com"
RCFILE = os.path.abspath(
os.environ.get(
'TRANSIFEX_RC',
os.path.expanduser('~/.transifexrc')
)
os.environ.get("TRANSIFEX_RC", os.path.expanduser("~/.transifexrc"))
)
def main():
'''
"""
Run the setup code
'''
"""
print(
'This script will setup a Transifex client configuration file, or, '
'if it already exists, make some minimal checks to see if it\'s '
'properly configured\n'
"This script will setup a Transifex client configuration file, or, "
"if it already exists, make some minimal checks to see if it's "
"properly configured\n"
)
if not os.path.exists(RCFILE):
while True:
username = os.environ.get('TRANSIFEX_USER', None)
username = os.environ.get("TRANSIFEX_USER", None)
if username is not None:
break
try:
username = raw_input(
'What is your username on Transifex.com? '
)
username = raw_input("What is your username on Transifex.com? ")
if username:
break
except KeyboardInterrupt:
print
sys.exit(1)
while True:
password = os.environ.get('TRANSIFEX_PASS', None)
password = os.environ.get("TRANSIFEX_PASS", None)
if password is not None:
break
try:
password = getpass.getpass(
'What is your password on Transifex.com? '
)
password = getpass.getpass("What is your password on Transifex.com? ")
if password:
break
except KeyboardInterrupt:
@ -64,16 +59,16 @@ def main():
config = ConfigParser.SafeConfigParser()
config.add_section(HOST)
config.set(HOST, 'token', '')
config.set(HOST, 'hostname', HOST)
config.set(HOST, 'username', username)
config.set(HOST, 'password', password)
config.set(HOST, "token", "")
config.set(HOST, "hostname", HOST)
config.set(HOST, "username", username)
config.set(HOST, "password", password)
config.write(open(RCFILE, 'w'))
print('username and password stored in \'{0}\''.format(RCFILE))
config.write(open(RCFILE, "w"))
print("username and password stored in '{0}'".format(RCFILE))
os.chmod(RCFILE, 0600)
print('Secured the permissions on \'{0}\' to 0600'.format(RCFILE))
print("Secured the permissions on '{0}' to 0600".format(RCFILE))
sys.exit(0)
@ -82,24 +77,30 @@ def main():
config.read([RCFILE])
if not config.has_section(HOST):
print('\'~/.transifexrc\' is not properly configured, it\'s missing '
'the {0} section'.format(HOST))
print(
"'~/.transifexrc' is not properly configured, it's missing "
"the {0} section".format(HOST)
)
for setting in ('username', 'password', 'hostname', 'token'):
for setting in ("username", "password", "hostname", "token"):
if not config.has_option(HOST, setting):
print('\'~/.transifexrc\' is not properly configured, it\'s '
'missing the {0} option'.format(setting))
print(
"'~/.transifexrc' is not properly configured, it's "
"missing the {0} option".format(setting)
)
sys.exit(1)
if setting == 'token':
if setting == "token":
# Token should be left empty
continue
if not config.get(HOST, setting):
print('\'~/.transifexrc\' is not properly configured, it\'s '
'missing a value for the {0} option'.format(setting))
print(
"'~/.transifexrc' is not properly configured, it's "
"missing a value for the {0} option".format(setting)
)
sys.exit(1)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -13,18 +13,16 @@ import re
from docutils import nodes
from docutils.parsers.rst.roles import set_classes
from pygments.lexer import RegexLexer, bygroups
from pygments.lexers import get_lexer_by_name
from pygments.token import Literal, Text, Operator, Keyword, Name, Number
from pygments.token import Keyword, Literal, Name, Number, Operator, Text
from pygments.util import ClassNotFound
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.domains import Domain, Index, ObjType
from sphinx.roles import XRefRole
from sphinx.util.docfields import GroupedField, TypedField
from sphinx.util.nodes import make_refnode
class DocRef(object):
@ -44,252 +42,275 @@ class DocRef(object):
location of the RFC which defines some HTTP method.
"""
return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section)
return "{0}#{1}{2}".format(self.base_url, self.anchor, self.section)
#: The URL of the HTTP/1.1 RFC which defines the HTTP methods OPTIONS, GET,
#: HEAD, POST, PUT, DELETE, TRACE, and CONNECT.
RFC2616 = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html'
RFC2616 = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html"
#: The name to use for section anchors in RFC2616.
RFC2616ANCHOR = 'sec'
RFC2616ANCHOR = "sec"
#: The URL of the RFC which defines the HTTP PATCH method.
RFC5789 = 'http://tools.ietf.org/html/rfc5789'
RFC5789 = "http://tools.ietf.org/html/rfc5789"
#: The name to use for section anchors in RFC5789.
RFC5789ANCHOR = 'section-'
RFC5789ANCHOR = "section-"
#: Mapping from lowercase HTTP method name to :class:`DocRef` object which
#: maintains the URL which points to the section of the RFC which defines that
#: HTTP method.
DOCREFS = {
'patch': DocRef(RFC5789, RFC5789ANCHOR, 2),
'options': DocRef(RFC2616, RFC2616ANCHOR, 9.2),
'get': DocRef(RFC2616, RFC2616ANCHOR, 9.3),
'head': DocRef(RFC2616, RFC2616ANCHOR, 9.4),
'post': DocRef(RFC2616, RFC2616ANCHOR, 9.5),
'put': DocRef(RFC2616, RFC2616ANCHOR, 9.6),
'delete': DocRef(RFC2616, RFC2616ANCHOR, 9.7),
'trace': DocRef(RFC2616, RFC2616ANCHOR, 9.8),
'connect': DocRef(RFC2616, RFC2616ANCHOR, 9.9)
"patch": DocRef(RFC5789, RFC5789ANCHOR, 2),
"options": DocRef(RFC2616, RFC2616ANCHOR, 9.2),
"get": DocRef(RFC2616, RFC2616ANCHOR, 9.3),
"head": DocRef(RFC2616, RFC2616ANCHOR, 9.4),
"post": DocRef(RFC2616, RFC2616ANCHOR, 9.5),
"put": DocRef(RFC2616, RFC2616ANCHOR, 9.6),
"delete": DocRef(RFC2616, RFC2616ANCHOR, 9.7),
"trace": DocRef(RFC2616, RFC2616ANCHOR, 9.8),
"connect": DocRef(RFC2616, RFC2616ANCHOR, 9.9),
}
HTTP_STATUS_CODES = {
100: 'Continue',
101: 'Switching Protocols',
102: 'Processing',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
207: 'Multi Status',
226: 'IM Used', # see RFC 3229
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required', # unused
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
418: "I'm a teapot", # see RFC 2324
422: 'Unprocessable Entity',
423: 'Locked',
424: 'Failed Dependency',
426: 'Upgrade Required',
449: 'Retry With', # proprietary MS extension
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
507: 'Insufficient Storage',
510: 'Not Extended'
100: "Continue",
101: "Switching Protocols",
102: "Processing",
200: "OK",
201: "Created",
202: "Accepted",
203: "Non Authoritative Information",
204: "No Content",
205: "Reset Content",
206: "Partial Content",
207: "Multi Status",
226: "IM Used", # see RFC 3229
300: "Multiple Choices",
301: "Moved Permanently",
302: "Found",
303: "See Other",
304: "Not Modified",
305: "Use Proxy",
307: "Temporary Redirect",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required", # unused
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict",
410: "Gone",
411: "Length Required",
412: "Precondition Failed",
413: "Request Entity Too Large",
414: "Request URI Too Long",
415: "Unsupported Media Type",
416: "Requested Range Not Satisfiable",
417: "Expectation Failed",
418: "I'm a teapot", # see RFC 2324
422: "Unprocessable Entity",
423: "Locked",
424: "Failed Dependency",
426: "Upgrade Required",
449: "Retry With", # proprietary MS extension
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway",
503: "Service Unavailable",
504: "Gateway Timeout",
505: "HTTP Version Not Supported",
507: "Insufficient Storage",
510: "Not Extended",
}
http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)',
re.VERBOSE)
http_sig_param_re = re.compile(
r"\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)", re.VERBOSE
)
def http_resource_anchor(method, path):
path = re.sub(r'[<>:/]', '-', path)
return method.lower() + '-' + path
path = re.sub(r"[<>:/]", "-", path)
return method.lower() + "-" + path
class HTTPResource(ObjectDescription):
doc_field_types = [
TypedField('parameter', label='Parameters',
names=('param', 'parameter', 'arg', 'argument'),
typerolename='obj', typenames=('paramtype', 'type')),
TypedField('jsonparameter', label='JSON Parameters',
names=('jsonparameter', 'jsonparam', 'json'),
typerolename='obj', typenames=('jsonparamtype', 'jsontype')),
TypedField('queryparameter', label='Query Parameters',
names=('queryparameter', 'queryparam', 'qparam', 'query'),
typerolename='obj', typenames=('queryparamtype', 'querytype', 'qtype')),
GroupedField('formparameter', label='Form Parameters',
names=('formparameter', 'formparam', 'fparam', 'form')),
GroupedField('requestheader', label='Request Headers',
rolename='mailheader',
names=('reqheader', 'requestheader')),
GroupedField('responseheader', label='Response Headers',
rolename='mailheader',
names=('resheader', 'responseheader')),
GroupedField('statuscode', label='Status Codes',
rolename='statuscode',
names=('statuscode', 'status', 'code'))
TypedField(
"parameter",
label="Parameters",
names=("param", "parameter", "arg", "argument"),
typerolename="obj",
typenames=("paramtype", "type"),
),
TypedField(
"jsonparameter",
label="JSON Parameters",
names=("jsonparameter", "jsonparam", "json"),
typerolename="obj",
typenames=("jsonparamtype", "jsontype"),
),
TypedField(
"queryparameter",
label="Query Parameters",
names=("queryparameter", "queryparam", "qparam", "query"),
typerolename="obj",
typenames=("queryparamtype", "querytype", "qtype"),
),
GroupedField(
"formparameter",
label="Form Parameters",
names=("formparameter", "formparam", "fparam", "form"),
),
GroupedField(
"requestheader",
label="Request Headers",
rolename="mailheader",
names=("reqheader", "requestheader"),
),
GroupedField(
"responseheader",
label="Response Headers",
rolename="mailheader",
names=("resheader", "responseheader"),
),
GroupedField(
"statuscode",
label="Status Codes",
rolename="statuscode",
names=("statuscode", "status", "code"),
),
]
method = NotImplemented
def handle_signature(self, sig, signode):
method = self.method.upper() + ' '
method = self.method.upper() + " "
signode += addnodes.desc_name(method, method)
offset = 0
for match in http_sig_param_re.finditer(sig):
path = sig[offset:match.start()]
path = sig[offset : match.start()]
signode += addnodes.desc_name(path, path)
params = addnodes.desc_parameterlist()
typ = match.group('type')
typ = match.group("type")
if typ:
typ = typ + ': '
typ = typ + ": "
params += addnodes.desc_annotation(typ, typ)
name = match.group('name')
name = match.group("name")
params += addnodes.desc_parameter(name, name)
signode += params
offset = match.end()
if offset < len(sig):
path = sig[offset:len(sig)]
path = sig[offset : len(sig)]
signode += addnodes.desc_name(path, path)
fullname = self.method.upper() + ' ' + path
signode['method'] = self.method
signode['path'] = sig
signode['fullname'] = fullname
fullname = self.method.upper() + " " + path
signode["method"] = self.method
signode["path"] = sig
signode["fullname"] = fullname
return (fullname, self.method, sig)
def needs_arglist(self):
return False
def add_target_and_index(self, name_cls, sig, signode):
signode['ids'].append(http_resource_anchor(*name_cls[1:]))
self.env.domaindata['http'][self.method][sig] = (self.env.docname, '')
signode["ids"].append(http_resource_anchor(*name_cls[1:]))
self.env.domaindata["http"][self.method][sig] = (self.env.docname, "")
def get_index_text(self, modname, name):
return ''
return ""
class HTTPOptions(HTTPResource):
method = 'options'
method = "options"
class HTTPHead(HTTPResource):
method = 'head'
method = "head"
class HTTPPatch(HTTPResource):
method = 'patch'
method = "patch"
class HTTPPost(HTTPResource):
method = 'post'
method = "post"
class HTTPGet(HTTPResource):
method = 'get'
method = "get"
class HTTPPut(HTTPResource):
method = 'put'
method = "put"
class HTTPDelete(HTTPResource):
method = 'delete'
method = "delete"
class HTTPTrace(HTTPResource):
method = 'trace'
method = "trace"
def http_statuscode_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
def http_statuscode_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
if text.isdigit():
code = int(text)
try:
status = HTTP_STATUS_CODES[code]
except KeyError:
msg = inliner.reporter.error('%d is invalid HTTP status code'
% code, lineno=lineno)
msg = inliner.reporter.error(
"%d is invalid HTTP status code" % code, lineno=lineno
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
else:
try:
code, status = re.split(r'\s', text.strip(), 1)
code, status = re.split(r"\s", text.strip(), 1)
code = int(code)
except ValueError:
msg = inliner.reporter.error(
'HTTP status code must be an integer (e.g. `200`) or '
'start with an integer (e.g. `200 OK`); %r is invalid' %
text,
line=lineno
"HTTP status code must be an integer (e.g. `200`) or "
"start with an integer (e.g. `200 OK`); %r is invalid" % text,
line=lineno,
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
nodes.reference(rawtext)
if code == 226:
url = 'http://www.ietf.org/rfc/rfc3229.txt'
url = "http://www.ietf.org/rfc/rfc3229.txt"
if code == 418:
url = 'http://www.ietf.org/rfc/rfc2324.txt'
url = "http://www.ietf.org/rfc/rfc2324.txt"
if code == 449:
url = 'http://msdn.microsoft.com/en-us/library' \
'/dd891478(v=prot.10).aspx'
url = "http://msdn.microsoft.com/en-us/library" "/dd891478(v=prot.10).aspx"
elif code in HTTP_STATUS_CODES:
url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \
'#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100))
url = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html" "#sec10." + (
"%d.%d" % (code // 100, 1 + code % 100)
)
else:
url = ''
url = ""
set_classes(options)
node = nodes.reference(rawtext, '%d %s' % (code, status),
refuri=url, **options)
node = nodes.reference(rawtext, "%d %s" % (code, status), refuri=url, **options)
return [node], []
def http_method_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
def http_method_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
method = str(text).lower()
if method not in DOCREFS:
msg = inliner.reporter.error('%s is not valid HTTP method' % method,
lineno=lineno)
msg = inliner.reporter.error(
"%s is not valid HTTP method" % method, lineno=lineno
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url = str(DOCREFS[method])
@ -298,51 +319,61 @@ def http_method_role(name, rawtext, text, lineno, inliner,
class HTTPXRefRole(XRefRole):
def __init__(self, method, **kwargs):
XRefRole.__init__(self, **kwargs)
self.method = method
def process_link(self, env, refnode, has_explicit_title, title, target):
if not target.startswith('/'):
if not target.startswith("/"):
pass
if not has_explicit_title:
title = self.method.upper() + ' ' + title
title = self.method.upper() + " " + title
return title, target
class HTTPIndex(Index):
name = 'routingtable'
localname = 'HTTP Routing Table'
shortname = 'routing table'
name = "routingtable"
localname = "HTTP Routing Table"
shortname = "routing table"
def __init__(self, *args, **kwargs):
super(HTTPIndex, self).__init__(*args, **kwargs)
self.ignore = [[l for l in x.split('/') if l]
for x in self.domain.env.config['http_index_ignore_prefixes']]
self.ignore = [
[l for l in x.split("/") if l]
for x in self.domain.env.config["http_index_ignore_prefixes"]
]
self.ignore.sort(key=lambda x: -len(x))
def grouping_prefix(self, path):
letters = [x for x in path.split('/') if x]
letters = [x for x in path.split("/") if x]
for prefix in self.ignore:
if letters[:len(prefix)] == prefix:
return '/' + '/'.join(letters[:len(prefix) + 1])
return '/%s' % (letters[0] if letters else '',)
if letters[: len(prefix)] == prefix:
return "/" + "/".join(letters[: len(prefix) + 1])
return "/%s" % (letters[0] if letters else "",)
def generate(self, docnames=None):
content = {}
items = ((method, path, info)
items = (
(method, path, info)
for method, routes in self.domain.routes.items()
for path, info in routes.items())
for path, info in routes.items()
)
items = sorted(items, key=lambda item: item[1])
for method, path, info in items:
entries = content.setdefault(self.grouping_prefix(path), [])
entries.append([
method.upper() + ' ' + path, 0, info[0],
http_resource_anchor(method, path), '', '', info[1]
])
entries.append(
[
method.upper() + " " + path,
0,
info[0],
http_resource_anchor(method, path),
"",
"",
info[1],
]
)
content = sorted(content.items(), key=lambda k: k[0])
return (content, True)
@ -350,53 +381,53 @@ class HTTPIndex(Index):
class HTTPDomain(Domain):
"""HTTP domain."""
name = 'http'
label = 'HTTP'
name = "http"
label = "HTTP"
object_types = {
'options': ObjType('options', 'options', 'obj'),
'head': ObjType('head', 'head', 'obj'),
'post': ObjType('post', 'post', 'obj'),
'get': ObjType('get', 'get', 'obj'),
'put': ObjType('put', 'put', 'obj'),
'patch': ObjType('patch', 'patch', 'obj'),
'delete': ObjType('delete', 'delete', 'obj'),
'trace': ObjType('trace', 'trace', 'obj')
"options": ObjType("options", "options", "obj"),
"head": ObjType("head", "head", "obj"),
"post": ObjType("post", "post", "obj"),
"get": ObjType("get", "get", "obj"),
"put": ObjType("put", "put", "obj"),
"patch": ObjType("patch", "patch", "obj"),
"delete": ObjType("delete", "delete", "obj"),
"trace": ObjType("trace", "trace", "obj"),
}
directives = {
'options': HTTPOptions,
'head': HTTPHead,
'post': HTTPPost,
'get': HTTPGet,
'put': HTTPPut,
'patch': HTTPPatch,
'delete': HTTPDelete,
'trace': HTTPTrace
"options": HTTPOptions,
"head": HTTPHead,
"post": HTTPPost,
"get": HTTPGet,
"put": HTTPPut,
"patch": HTTPPatch,
"delete": HTTPDelete,
"trace": HTTPTrace,
}
roles = {
'options': HTTPXRefRole('options'),
'head': HTTPXRefRole('head'),
'post': HTTPXRefRole('post'),
'get': HTTPXRefRole('get'),
'put': HTTPXRefRole('put'),
'patch': HTTPXRefRole('patch'),
'delete': HTTPXRefRole('delete'),
'trace': HTTPXRefRole('trace'),
'statuscode': http_statuscode_role,
'method': http_method_role
"options": HTTPXRefRole("options"),
"head": HTTPXRefRole("head"),
"post": HTTPXRefRole("post"),
"get": HTTPXRefRole("get"),
"put": HTTPXRefRole("put"),
"patch": HTTPXRefRole("patch"),
"delete": HTTPXRefRole("delete"),
"trace": HTTPXRefRole("trace"),
"statuscode": http_statuscode_role,
"method": http_method_role,
}
initial_data = {
'options': {}, # path: (docname, synopsis)
'head': {},
'post': {},
'get': {},
'put': {},
'patch': {},
'delete': {},
'trace': {}
"options": {}, # path: (docname, synopsis)
"head": {},
"post": {},
"get": {},
"put": {},
"patch": {},
"delete": {},
"trace": {},
}
# indices = [HTTPIndex]
@ -412,17 +443,15 @@ class HTTPDomain(Domain):
if info[0] == docname:
del routes[path]
def resolve_xref(self, env, fromdocname, builder, typ, target,
node, contnode):
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
try:
info = self.data[str(typ)][target]
except KeyError:
return
else:
anchor = http_resource_anchor(typ, target)
title = typ.upper() + ' ' + target
return make_refnode(builder, fromdocname, info[0], anchor,
contnode, title)
title = typ.upper() + " " + target
return make_refnode(builder, fromdocname, info[0], anchor, contnode, title)
def get_objects(self):
for method, routes in self.routes.items():
@ -434,16 +463,16 @@ class HTTPDomain(Domain):
class HTTPLexer(RegexLexer):
"""Lexer for HTTP sessions."""
name = 'HTTP'
aliases = ['http']
name = "HTTP"
aliases = ["http"]
flags = re.DOTALL
def header_callback(self, match):
if match.group(1).lower() == 'content-type':
if match.group(1).lower() == "content-type":
content_type = match.group(5).strip()
if ';' in content_type:
content_type = content_type[:content_type.find(';')].strip()
if ";" in content_type:
content_type = content_type[: content_type.find(";")].strip()
self.content_type = content_type
yield match.start(1), Name.Attribute, match.group(1)
yield match.start(2), Text, match.group(2)
@ -458,11 +487,12 @@ class HTTPLexer(RegexLexer):
yield match.start(3), Text, match.group(3)
def content_callback(self, match):
content_type = getattr(self, 'content_type', None)
content_type = getattr(self, "content_type", None)
content = match.group()
offset = match.start()
if content_type:
from pygments.lexers import get_lexer_for_mimetype
try:
lexer = get_lexer_for_mimetype(content_type)
except ClassNotFound:
@ -474,33 +504,50 @@ class HTTPLexer(RegexLexer):
yield offset, Text, content
tokens = {
'root': [
(r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
(r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
Text, Name.Exception, Text),
'headers'),
"root": [
(
r"(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)"
r"(HTTPS?)(/)(1\.[01])(\r?\n|$)",
bygroups(
Name.Function,
Text,
Name.Namespace,
Text,
Keyword.Reserved,
Operator,
Number,
Text,
),
"headers",
),
(
r"(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)",
bygroups(
Keyword.Reserved,
Operator,
Number,
Text,
Number,
Text,
Name.Exception,
Text,
),
"headers",
),
],
'headers': [
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
(r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
(r'\r?\n', Text, 'content')
"headers": [
(r"([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)", header_callback),
(r"([\t ]+)([^\r\n]+)(\r?\n|$)", continuous_header_callback),
(r"\r?\n", Text, "content"),
],
'content': [
(r'.+', content_callback)
]
"content": [(r".+", content_callback)],
}
def setup(app):
app.add_domain(HTTPDomain)
try:
get_lexer_by_name('http')
get_lexer_by_name("http")
except ClassNotFound:
app.add_lexer('http', HTTPLexer())
app.add_config_value('http_index_ignore_prefixes', [], None)
app.add_lexer("http", HTTPLexer())
app.add_config_value("http_index_ignore_prefixes", [], None)

View file

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
'''
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
@ -7,23 +7,23 @@
~~~~~~~~~~~~~~
Properly handle ``__func_alias__``
'''
"""
# Import Sphinx libs
from sphinx.ext.autodoc import FunctionDocumenter as FunctionDocumenter
class SaltFunctionDocumenter(FunctionDocumenter):
'''
"""
Simple override of sphinx.ext.autodoc.FunctionDocumenter to properly render
salt's aliased function names.
'''
"""
def format_name(self):
'''
"""
Format the function name
'''
if not hasattr(self.module, '__func_alias__'):
"""
if not hasattr(self.module, "__func_alias__"):
# Resume normal sphinx.ext.autodoc operation
return super(FunctionDocumenter, self).format_name()
@ -46,4 +46,4 @@ def setup(app):
# add_autodocumenter() must be called after the initial setup and the
# 'builder-inited' event, as sphinx.ext.autosummary will restore the
# original documenter on 'builder-inited'
app.connect('env-before-read-docs', add_documenter)
app.connect("env-before-read-docs", add_documenter)

View file

@ -2,70 +2,66 @@ import itertools
import os
import re
import salt
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.statemachine import ViewList
from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.domains import python as python_domain
from sphinx.domains.python import PyObject
from sphinx.locale import _
from sphinx.roles import XRefRole
from sphinx.util.nodes import make_refnode
from sphinx.util.nodes import nested_parse_with_titles
from sphinx.util.nodes import set_source_info
from sphinx.domains import python as python_domain
import salt
from sphinx.util.nodes import make_refnode, nested_parse_with_titles, set_source_info
class Event(PyObject):
'''
"""
Document Salt events
'''
domain = 'salt'
"""
domain = "salt"
class LiterateCoding(Directive):
'''
"""
Auto-doc SLS files using literate-style comment/code separation
'''
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
def parse_file(self, fpath):
'''
"""
Read a file on the file system (relative to salt's base project dir)
:returns: A file-like object.
:raises IOError: If the file cannot be found or read.
'''
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__),
os.pardir))
with open(os.path.join(sdir, fpath), 'rb') as f:
"""
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__), os.pardir))
with open(os.path.join(sdir, fpath), "rb") as f:
return f.readlines()
def parse_lit(self, lines):
'''
"""
Parse a string line-by-line delineating comments and code
:returns: An tuple of boolean/list-of-string pairs. True designates a
comment; False designates code.
'''
comment_char = '#' # TODO: move this into a directive option
comment = re.compile(r'^\s*{0}[ \n]'.format(comment_char))
"""
comment_char = "#" # TODO: move this into a directive option
comment = re.compile(r"^\s*{0}[ \n]".format(comment_char))
section_test = lambda val: bool(comment.match(val))
sections = []
for is_doc, group in itertools.groupby(lines, section_test):
if is_doc:
text = [comment.sub('', i).rstrip('\r\n') for i in group]
text = [comment.sub("", i).rstrip("\r\n") for i in group]
else:
text = [i.rstrip('\r\n') for i in group]
text = [i.rstrip("\r\n") for i in group]
sections.append((is_doc, text))
@ -79,33 +75,33 @@ class LiterateCoding(Directive):
return [document.reporter.warning(str(exc), line=self.lineno)]
node = nodes.container()
node['classes'] = ['lit-container']
node["classes"] = ["lit-container"]
node.document = self.state.document
enum = nodes.enumerated_list()
enum['classes'] = ['lit-docs']
enum["classes"] = ["lit-docs"]
node.append(enum)
# make first list item
list_item = nodes.list_item()
list_item['classes'] = ['lit-item']
list_item["classes"] = ["lit-item"]
for is_doc, line in lines:
if is_doc and line == ['']:
if is_doc and line == [""]:
continue
section = nodes.section()
if is_doc:
section['classes'] = ['lit-annotation']
section["classes"] = ["lit-annotation"]
nested_parse_with_titles(self.state, ViewList(line), section)
else:
section['classes'] = ['lit-content']
section["classes"] = ["lit-content"]
code = '\n'.join(line)
code = "\n".join(line)
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
literal["language"] = "yaml"
set_source_info(self, literal)
section.append(literal)
@ -116,42 +112,41 @@ class LiterateCoding(Directive):
if len(list_item.children) == 2:
enum.append(list_item)
list_item = nodes.list_item()
list_item['classes'] = ['lit-item']
list_item["classes"] = ["lit-item"]
# Non-semantic div for styling
bg = nodes.container()
bg['classes'] = ['lit-background']
bg["classes"] = ["lit-background"]
node.append(bg)
return [node]
class LiterateFormula(LiterateCoding):
'''
"""
Customizations to handle finding and parsing SLS files
'''
"""
def parse_file(self, sls_path):
'''
"""
Given a typical Salt SLS path (e.g.: apache.vhosts.standard), find the
file on the file system and parse it
'''
"""
config = self.state.document.settings.env.config
formulas_dirs = config.formulas_dirs
fpath = sls_path.replace('.', '/')
fpath = sls_path.replace(".", "/")
name_options = (
'{0}.sls'.format(fpath),
os.path.join(fpath, 'init.sls')
)
name_options = ("{0}.sls".format(fpath), os.path.join(fpath, "init.sls"))
paths = [os.path.join(fdir, fname)
for fname in name_options
for fdir in formulas_dirs]
paths = [
os.path.join(fdir, fname)
for fname in name_options
for fdir in formulas_dirs
]
for i in paths:
try:
with open(i, 'rb') as f:
with open(i, "rb") as f:
return f.readlines()
except IOError:
pass
@ -160,7 +155,7 @@ class LiterateFormula(LiterateCoding):
class CurrentFormula(Directive):
domain = 'salt'
domain = "salt"
has_content = False
required_arguments = 1
optional_arguments = 0
@ -170,15 +165,15 @@ class CurrentFormula(Directive):
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
if modname == 'None':
env.temp_data['salt:formula'] = None
if modname == "None":
env.temp_data["salt:formula"] = None
else:
env.temp_data['salt:formula'] = modname
env.temp_data["salt:formula"] = modname
return []
class Formula(Directive):
domain = 'salt'
domain = "salt"
has_content = True
required_arguments = 1
@ -186,30 +181,31 @@ class Formula(Directive):
env = self.state.document.settings.env
formname = self.arguments[0].strip()
env.temp_data['salt:formula'] = formname
env.temp_data["salt:formula"] = formname
if 'noindex' in self.options:
if "noindex" in self.options:
return []
env.domaindata['salt']['formulas'][formname] = (
env.docname,
self.options.get('synopsis', ''),
self.options.get('platform', ''),
'deprecated' in self.options)
env.domaindata["salt"]["formulas"][formname] = (
env.docname,
self.options.get("synopsis", ""),
self.options.get("platform", ""),
"deprecated" in self.options,
)
targetnode = nodes.target('', '', ids=['module-' + formname],
ismod=True)
targetnode = nodes.target("", "", ids=["module-" + formname], ismod=True)
self.state.document.note_explicit_target(targetnode)
indextext = u'{0}-formula)'.format(formname)
inode = addnodes.index(entries=[('single', indextext,
'module-' + formname, '')])
indextext = u"{0}-formula)".format(formname)
inode = addnodes.index(
entries=[("single", indextext, "module-" + formname, "")]
)
return [targetnode, inode]
class State(Directive):
domain = 'salt'
domain = "salt"
has_content = True
required_arguments = 1
@ -217,19 +213,18 @@ class State(Directive):
env = self.state.document.settings.env
statename = self.arguments[0].strip()
if 'noindex' in self.options:
if "noindex" in self.options:
return []
targetnode = nodes.target('', '', ids=['module-' + statename],
ismod=True)
targetnode = nodes.target("", "", ids=["module-" + statename], ismod=True)
self.state.document.note_explicit_target(targetnode)
formula = env.temp_data.get('salt:formula')
formula = env.temp_data.get("salt:formula")
indextext = u'{1} ({0}-formula)'.format(formula, statename)
inode = addnodes.index(entries=[
('single', indextext, 'module-{0}'.format(statename), ''),
])
indextext = u"{1} ({0}-formula)".format(formula, statename)
inode = addnodes.index(
entries=[("single", indextext, "module-{0}".format(statename), ""),]
)
return [targetnode, inode]
@ -239,55 +234,56 @@ class SLSXRefRole(XRefRole):
class SaltModuleIndex(python_domain.PythonModuleIndex):
name = 'modindex'
localname = _('Salt Module Index')
shortname = _('all salt modules')
name = "modindex"
localname = _("Salt Module Index")
shortname = _("all salt modules")
class SaltDomain(python_domain.PythonDomain):
name = 'salt'
label = 'Salt'
name = "salt"
label = "Salt"
data_version = 2
object_types = python_domain.PythonDomain.object_types
object_types.update({
'state': ObjType(_('state'), 'state'),
})
object_types.update(
{"state": ObjType(_("state"), "state"),}
)
directives = python_domain.PythonDomain.directives
directives.update({
'event': Event,
'state': State,
'formula': LiterateFormula,
'currentformula': CurrentFormula,
'saltconfig': LiterateCoding,
})
directives.update(
{
"event": Event,
"state": State,
"formula": LiterateFormula,
"currentformula": CurrentFormula,
"saltconfig": LiterateCoding,
}
)
roles = python_domain.PythonDomain.roles
roles.update({
'formula': SLSXRefRole(),
})
roles.update(
{"formula": SLSXRefRole(),}
)
initial_data = python_domain.PythonDomain.initial_data
initial_data.update({
'formulas': {},
})
initial_data.update(
{"formulas": {},}
)
indices = [
SaltModuleIndex,
]
def resolve_xref(self, env, fromdocname, builder, type, target, node,
contnode):
if type == 'formula' and target in self.data['formulas']:
doc, _, _, _ = self.data['formulas'].get(target, (None, None))
def resolve_xref(self, env, fromdocname, builder, type, target, node, contnode):
if type == "formula" and target in self.data["formulas"]:
doc, _, _, _ = self.data["formulas"].get(target, (None, None))
if doc:
return make_refnode(builder, fromdocname, doc, target,
contnode, target)
return make_refnode(builder, fromdocname, doc, target, contnode, target)
else:
super(SaltDomain, self).resolve_xref(env, fromdocname, builder,
type, target, node, contnode)
super(SaltDomain, self).resolve_xref(
env, fromdocname, builder, type, target, node, contnode
)
# Monkey-patch the Python domain remove the python module index
python_domain.PythonDomain.indices = [SaltModuleIndex]
@ -296,18 +292,34 @@ python_domain.PythonDomain.indices = [SaltModuleIndex]
def setup(app):
app.add_domain(SaltDomain)
formulas_path = 'templates/formulas'
formulas_dir = os.path.join(os.path.abspath(os.path.dirname(salt.__file__)),
formulas_path)
app.add_config_value('formulas_dirs', [formulas_dir], 'env')
formulas_path = "templates/formulas"
formulas_dir = os.path.join(
os.path.abspath(os.path.dirname(salt.__file__)), formulas_path
)
app.add_config_value("formulas_dirs", [formulas_dir], "env")
app.add_crossref_type(directivename="conf_master", rolename="conf_master",
indextemplate="pair: %s; conf/master")
app.add_crossref_type(directivename="conf_minion", rolename="conf_minion",
indextemplate="pair: %s; conf/minion")
app.add_crossref_type(directivename="conf_proxy", rolename="conf_proxy",
indextemplate="pair: %s; conf/proxy")
app.add_crossref_type(directivename="conf_log", rolename="conf_log",
indextemplate="pair: %s; conf/logging")
app.add_crossref_type(directivename="jinja_ref", rolename="jinja_ref",
indextemplate="pair: %s; jinja filters")
app.add_crossref_type(
directivename="conf_master",
rolename="conf_master",
indextemplate="pair: %s; conf/master",
)
app.add_crossref_type(
directivename="conf_minion",
rolename="conf_minion",
indextemplate="pair: %s; conf/minion",
)
app.add_crossref_type(
directivename="conf_proxy",
rolename="conf_proxy",
indextemplate="pair: %s; conf/proxy",
)
app.add_crossref_type(
directivename="conf_log",
rolename="conf_log",
indextemplate="pair: %s; conf/logging",
)
app.add_crossref_type(
directivename="jinja_ref",
rolename="jinja_ref",
indextemplate="pair: %s; jinja filters",
)

View file

@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
'''
"""
saltrepo
~~~~~~~~
SaltStack Repository Sphinx directives
'''
"""
def source_read_handler(app, docname, source):
if '|repo_primary_branch|' in source[0]:
if "|repo_primary_branch|" in source[0]:
source[0] = source[0].replace(
'|repo_primary_branch|',
app.config.html_context['repo_primary_branch']
"|repo_primary_branch|", app.config.html_context["repo_primary_branch"]
)
def setup(app):
app.connect('source-read', source_read_handler)
app.connect("source-read", source_read_handler)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
"version": "builtin",
"parallel_read_safe": True,
"parallel_write_safe": True,
}

View file

@ -1,22 +1,24 @@
'''
"""
Short-URL redirects
'''
"""
import json
import os
import sphinx.ext.intersphinx
DOCS_URL = 'http://docs.saltstack.com/en/latest/'
DOCS_URL = "http://docs.saltstack.com/en/latest/"
def write_urls_index(app, exc):
'''
"""
Generate a JSON file to serve as an index for short-URL lookups
'''
inventory = os.path.join(app.builder.outdir, 'objects.inv')
"""
inventory = os.path.join(app.builder.outdir, "objects.inv")
objects = sphinx.ext.intersphinx.fetch_inventory(app, DOCS_URL, inventory)
with open(os.path.join(app.builder.outdir, 'shorturls.json'), 'w') as f:
with open(os.path.join(app.builder.outdir, "shorturls.json"), "w") as f:
json.dump(objects, f)
def setup(app):
app.connect('build-finished', write_urls_index)
app.connect("build-finished", write_urls_index)

View file

@ -36,8 +36,10 @@
from __future__ import division
import re
from docutils import nodes
from docutils.parsers.rst import directives
try:
from sphinx.util.compat import Directive
except ImportError:
@ -140,7 +142,9 @@ class YouTube(Directive):
aspect = None
width = get_size(self.options, "width")
height = get_size(self.options, "height")
return [youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)]
return [
youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)
]
def setup(app):

View file

@ -1,19 +1,19 @@
# -*- coding: utf-8 -*-
# pylint: disable=C0103,W0622
'''
"""
Sphinx documentation for Salt
'''
import sys
"""
import os
import re
import types
import sys
import time
import types
from sphinx.directives import TocTree
class Mock(object):
'''
"""
Mock out specified imports.
This allows autodoc to do its thing without having oodles of req'd
@ -22,8 +22,11 @@ class Mock(object):
This Mock class can be configured to return a specific values at specific names, if required.
http://read-the-docs.readthedocs.org/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
'''
def __init__(self, mapping=None, *args, **kwargs): # pylint: disable=unused-argument
"""
def __init__(
self, mapping=None, *args, **kwargs
): # pylint: disable=unused-argument
"""
Mapping allows autodoc to bypass the Mock object, but actually assign
a specific value, expected by a specific attribute returned.
@ -41,9 +44,9 @@ class Mock(object):
def __getattr__(self, name):
if name in self.__mapping:
data = self.__mapping.get(name)
elif name in ('__file__', '__path__'):
data = '/dev/null'
elif name in ('__mro_entries__', '__qualname__'):
elif name in ("__file__", "__path__"):
data = "/dev/null"
elif name in ("__mro_entries__", "__qualname__"):
raise AttributeError("'Mock' object has no attribute '%s'" % (name))
else:
data = Mock(mapping=self.__mapping)
@ -61,7 +64,7 @@ class Mock(object):
def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-argument
'''
"""
Optionally mock a decorator that takes parameters
E.g.:
@ -69,153 +72,146 @@ def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-arg
@blah(stuff=True)
def things():
pass
'''
"""
def inner(fn, *iargs, **ikwargs): # pylint: disable=unused-argument
if hasattr(fn, '__call__'):
if hasattr(fn, "__call__"):
return fn
return Mock()
return inner
MOCK_MODULES = [
# Python stdlib
'user',
"user",
# salt core
'concurrent',
'Crypto',
'Crypto.Signature',
'Crypto.Cipher',
'Crypto.Hash',
'Crypto.PublicKey',
'Crypto.Random',
'Crypto.Signature',
'Crypto.Signature.PKCS1_v1_5',
'M2Crypto',
'msgpack',
'yaml',
'yaml.constructor',
'yaml.nodes',
'yaml.parser',
'yaml.scanner',
'zmq',
'zmq.eventloop',
'zmq.eventloop.ioloop',
"concurrent",
"Crypto",
"Crypto.Signature",
"Crypto.Cipher",
"Crypto.Hash",
"Crypto.PublicKey",
"Crypto.Random",
"Crypto.Signature",
"Crypto.Signature.PKCS1_v1_5",
"M2Crypto",
"msgpack",
"yaml",
"yaml.constructor",
"yaml.nodes",
"yaml.parser",
"yaml.scanner",
"zmq",
"zmq.eventloop",
"zmq.eventloop.ioloop",
# third-party libs for cloud modules
'libcloud',
'libcloud.compute',
'libcloud.compute.base',
'libcloud.compute.deployment',
'libcloud.compute.providers',
'libcloud.compute.types',
'libcloud.loadbalancer',
'libcloud.loadbalancer.types',
'libcloud.loadbalancer.providers',
'libcloud.common',
'libcloud.common.google',
"libcloud",
"libcloud.compute",
"libcloud.compute.base",
"libcloud.compute.deployment",
"libcloud.compute.providers",
"libcloud.compute.types",
"libcloud.loadbalancer",
"libcloud.loadbalancer.types",
"libcloud.loadbalancer.providers",
"libcloud.common",
"libcloud.common.google",
# third-party libs for netapi modules
'cherrypy',
'cherrypy.lib',
'cherrypy.process',
'cherrypy.wsgiserver',
'cherrypy.wsgiserver.ssl_builtin',
'tornado',
'tornado.concurrent',
'tornado.escape',
'tornado.gen',
'tornado.httpclient',
'tornado.httpserver',
'tornado.httputil',
'tornado.ioloop',
'tornado.iostream',
'tornado.netutil',
'tornado.simple_httpclient',
'tornado.stack_context',
'tornado.web',
'tornado.websocket',
'tornado.locks',
'ws4py',
'ws4py.server',
'ws4py.server.cherrypyserver',
'ws4py.websocket',
"cherrypy",
"cherrypy.lib",
"cherrypy.process",
"cherrypy.wsgiserver",
"cherrypy.wsgiserver.ssl_builtin",
"tornado",
"tornado.concurrent",
"tornado.escape",
"tornado.gen",
"tornado.httpclient",
"tornado.httpserver",
"tornado.httputil",
"tornado.ioloop",
"tornado.iostream",
"tornado.netutil",
"tornado.simple_httpclient",
"tornado.stack_context",
"tornado.web",
"tornado.websocket",
"tornado.locks",
"ws4py",
"ws4py.server",
"ws4py.server.cherrypyserver",
"ws4py.websocket",
# modules, renderers, states, returners, et al
'ClusterShell',
'ClusterShell.NodeSet',
'MySQLdb',
'MySQLdb.cursors',
'OpenSSL',
'avahi',
'boto.regioninfo',
'concurrent',
'dbus',
'django',
'dns',
'dns.resolver',
'dson',
'hjson',
'jnpr',
'jnpr.junos',
'jnpr.junos.utils',
'jnpr.junos.utils.config',
'jnpr.junos.utils.sw',
'keyring',
'libvirt',
'lxml',
'lxml.etree',
'msgpack',
'nagios_json',
'napalm',
'netaddr',
'netaddr.IPAddress',
'netaddr.core',
'netaddr.core.AddrFormatError',
'ntsecuritycon',
'psutil',
'pycassa',
'pyconnman',
'pyiface',
'pymongo',
'pyroute2',
'pyroute2.ipdb',
'rabbitmq_server',
'redis',
'rpm',
'rpmUtils',
'rpmUtils.arch',
'salt.ext.six.moves.winreg',
'twisted',
'twisted.internet',
'twisted.internet.protocol',
'twisted.internet.protocol.DatagramProtocol',
'win32security',
'yum',
'zfs',
"ClusterShell",
"ClusterShell.NodeSet",
"MySQLdb",
"MySQLdb.cursors",
"OpenSSL",
"avahi",
"boto.regioninfo",
"concurrent",
"dbus",
"django",
"dns",
"dns.resolver",
"dson",
"hjson",
"jnpr",
"jnpr.junos",
"jnpr.junos.utils",
"jnpr.junos.utils.config",
"jnpr.junos.utils.sw",
"keyring",
"libvirt",
"lxml",
"lxml.etree",
"msgpack",
"nagios_json",
"napalm",
"netaddr",
"netaddr.IPAddress",
"netaddr.core",
"netaddr.core.AddrFormatError",
"ntsecuritycon",
"psutil",
"pycassa",
"pyconnman",
"pyiface",
"pymongo",
"pyroute2",
"pyroute2.ipdb",
"rabbitmq_server",
"redis",
"rpm",
"rpmUtils",
"rpmUtils.arch",
"salt.ext.six.moves.winreg",
"twisted",
"twisted.internet",
"twisted.internet.protocol",
"twisted.internet.protocol.DatagramProtocol",
"win32security",
"yum",
"zfs",
]
MOCK_MODULES_MAPPING = {
'cherrypy': {'config': mock_decorator_with_params},
'ntsecuritycon': {
'STANDARD_RIGHTS_REQUIRED': 0,
'SYNCHRONIZE': 0,
},
'psutil': {'total': 0}, # Otherwise it will crash Sphinx
"cherrypy": {"config": mock_decorator_with_params},
"ntsecuritycon": {"STANDARD_RIGHTS_REQUIRED": 0, "SYNCHRONIZE": 0,},
"psutil": {"total": 0}, # Otherwise it will crash Sphinx
}
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock(mapping=MOCK_MODULES_MAPPING.get(mod_name))
# Define a fake version attribute for the following libs.
sys.modules['libcloud'].__version__ = '0.0.0'
sys.modules['msgpack'].version = (1, 0, 0)
sys.modules['psutil'].version_info = (3, 0, 0)
sys.modules['pymongo'].version = '0.0.0'
sys.modules['tornado'].version_info = (0, 0, 0)
sys.modules['boto.regioninfo']._load_json_file = {'endpoints': None}
sys.modules["libcloud"].__version__ = "0.0.0"
sys.modules["msgpack"].version = (1, 0, 0)
sys.modules["psutil"].version_info = (3, 0, 0)
sys.modules["pymongo"].version = "0.0.0"
sys.modules["tornado"].version_info = (0, 0, 0)
sys.modules["boto.regioninfo"]._load_json_file = {"endpoints": None}
# -- Add paths to PYTHONPATH ---------------------------------------------------
@ -224,102 +220,114 @@ try:
except NameError:
# sphinx-intl and six execute some code which will raise this NameError
# assume we're in the doc/ directory
docs_basepath = os.path.abspath(os.path.dirname('.'))
docs_basepath = os.path.abspath(os.path.dirname("."))
addtl_paths = (
os.pardir, # salt itself (for autodoc)
'_ext', # custom Sphinx extensions
"_ext", # custom Sphinx extensions
)
for addtl_path in addtl_paths:
sys.path.insert(0, os.path.abspath(os.path.join(docs_basepath, addtl_path)))
# We're now able to import salt
import salt.version
import salt.version # isort:skip
formulas_dir = os.path.join(os.pardir, docs_basepath, 'formulas')
formulas_dir = os.path.join(os.pardir, docs_basepath, "formulas")
# ----- Intersphinx Settings ------------------------------------------------>
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None)
}
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
# <---- Intersphinx Settings -------------------------------------------------
# -- General Configuration -----------------------------------------------------
# Set a var if we're building docs for the live site or not
on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
on_saltstack = "SALT_ON_SALTSTACK" in os.environ
project = 'Salt'
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
project = "Salt"
repo_primary_branch = (
"master" # This is the default branch on GitHub for the Salt project
)
version = salt.version.__version__
latest_release = os.environ.get('LATEST_RELEASE', 'latest_release') # latest release (2019.2.3)
previous_release = os.environ.get('PREVIOUS_RELEASE', 'previous_release') # latest release from previous branch (2018.3.5)
previous_release_dir = os.environ.get('PREVIOUS_RELEASE_DIR', 'previous_release_dir') # path on web server for previous branch (2018.3)
next_release = '' # next release
next_release_dir = '' # path on web server for next release branch
latest_release = os.environ.get(
"LATEST_RELEASE", "latest_release"
) # latest release (2019.2.3)
previous_release = os.environ.get(
"PREVIOUS_RELEASE", "previous_release"
) # latest release from previous branch (2018.3.5)
previous_release_dir = os.environ.get(
"PREVIOUS_RELEASE_DIR", "previous_release_dir"
) # path on web server for previous branch (2018.3)
next_release = "" # next release
next_release_dir = "" # path on web server for next release branch
today = ''
copyright = ''
today = ""
copyright = ""
if on_saltstack:
today = "Generated on " + time.strftime("%B %d, %Y") + " at " + time.strftime("%X %Z") + "."
today = (
"Generated on "
+ time.strftime("%B %d, %Y")
+ " at "
+ time.strftime("%X %Z")
+ "."
)
copyright = time.strftime("%Y")
# < --- START do not merge these settings to other branches START ---> #
build_type = os.environ.get('BUILD_TYPE', repo_primary_branch) # latest, previous, master, next
build_type = os.environ.get(
"BUILD_TYPE", repo_primary_branch
) # latest, previous, master, next
# < --- END do not merge these settings to other branches END ---> #
# Set google custom search engine
if build_type == repo_primary_branch:
release = latest_release
search_cx = '011515552685726825874:v1had6i279q' # master
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == 'next':
search_cx = "011515552685726825874:v1had6i279q" # master
# search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == "next":
release = next_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif build_type == 'previous':
search_cx = "011515552685726825874:ht0p8miksrm" # latest
elif build_type == "previous":
release = previous_release
if release.startswith('3000'):
search_cx = '011515552685726825874:3skhaozjtyn' # 3000
elif release.startswith('2019.2'):
search_cx = '011515552685726825874:huvjhlpptnm' # 2019.2
elif release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
if release.startswith("3000"):
search_cx = "011515552685726825874:3skhaozjtyn" # 3000
elif release.startswith("2019.2"):
search_cx = "011515552685726825874:huvjhlpptnm" # 2019.2
elif release.startswith("2018.3"):
search_cx = "011515552685726825874:vadptdpvyyu" # 2018.3
elif release.startswith("2017.7"):
search_cx = "011515552685726825874:w-hxmnbcpou" # 2017.7
elif release.startswith("2016.11"):
search_cx = "011515552685726825874:dlsj745pvhq" # 2016.11
else:
search_cx = '011515552685726825874:ht0p8miksrm' # latest
else: # latest or something else
search_cx = "011515552685726825874:ht0p8miksrm" # latest
else: # latest or something else
release = latest_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
search_cx = "011515552685726825874:ht0p8miksrm" # latest
needs_sphinx = '1.3'
needs_sphinx = "1.3"
spelling_lang = 'en_US'
language = 'en'
spelling_lang = "en_US"
language = "en"
locale_dirs = [
'_locale',
"_locale",
]
master_doc = 'contents'
templates_path = ['_templates']
exclude_patterns = ['_build', '_incl/*', 'ref/cli/_includes/*.rst']
master_doc = "contents"
templates_path = ["_templates"]
exclude_patterns = ["_build", "_incl/*", "ref/cli/_includes/*.rst"]
extensions = [
'saltdomain', # Must come early
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'httpdomain',
'youtube',
'saltrepo'
"saltdomain", # Must come early
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.autosummary",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"httpdomain",
"youtube",
"saltrepo"
#'saltautodoc', # Must be AFTER autodoc
#'shorturls',
]
@ -329,14 +337,14 @@ try:
except ImportError:
pass
else:
extensions += ['sphinxcontrib.spelling']
extensions += ["sphinxcontrib.spelling"]
modindex_common_prefix = ['salt.']
modindex_common_prefix = ["salt."]
autosummary_generate = True
# strip git rev as there won't necessarily be a release based on it
stripped_release = re.sub(r'-\d+-g[0-9a-f]+$', '', release)
stripped_release = re.sub(r"-\d+-g[0-9a-f]+$", "", release)
# Define a substitution for linking to the latest release tarball
rst_prolog = """\
@ -374,87 +382,91 @@ rst_prolog = """\
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
| <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
""".format(release=stripped_release)
""".format(
release=stripped_release
)
# A shortcut for linking to tickets on the GitHub issue tracker
extlinks = {
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
"blob": (
"https://github.com/saltstack/salt/blob/%s/%%s" % repo_primary_branch,
None,
),
"issue": ("https://github.com/saltstack/salt/issues/%s", "issue #"),
"pull": ("https://github.com/saltstack/salt/pull/%s", "PR #"),
"formula_url": ("https://github.com/saltstack-formulas/%s", ""),
}
# ----- Localization -------------------------------------------------------->
locale_dirs = ['locale/']
locale_dirs = ["locale/"]
gettext_compact = False
# <---- Localization ---------------------------------------------------------
### HTML options
# set 'HTML_THEME=saltstack' to use previous theme
html_theme = os.environ.get('HTML_THEME', 'saltstack2')
html_theme_path = ['_themes']
html_title = u''
html_short_title = 'Salt'
html_theme = os.environ.get("HTML_THEME", "saltstack2")
html_theme_path = ["_themes"]
html_title = u""
html_short_title = "Salt"
html_static_path = ['_static']
html_logo = None # specified in the theme layout.html
html_favicon = 'favicon.ico'
html_static_path = ["_static"]
html_logo = None # specified in the theme layout.html
html_favicon = "favicon.ico"
smartquotes = False
# Use Google customized search or use Sphinx built-in JavaScript search
if on_saltstack:
html_search_template = 'googlesearch.html'
html_search_template = "googlesearch.html"
else:
html_search_template = 'searchbox.html'
html_search_template = "searchbox.html"
html_additional_pages = {
'404': '404.html',
"404": "404.html",
}
html_default_sidebars = [
html_search_template,
'version.html',
'localtoc.html',
'relations.html',
'sourcelink.html',
'saltstack.html',
"version.html",
"localtoc.html",
"relations.html",
"sourcelink.html",
"saltstack.html",
]
html_sidebars = {
'ref/**/all/salt.*': [
"ref/**/all/salt.*": [
html_search_template,
'version.html',
'modules-sidebar.html',
'localtoc.html',
'relations.html',
'sourcelink.html',
'saltstack.html',
],
'ref/formula/all/*': [
"version.html",
"modules-sidebar.html",
"localtoc.html",
"relations.html",
"sourcelink.html",
"saltstack.html",
],
"ref/formula/all/*": [],
}
html_context = {
'on_saltstack': on_saltstack,
'html_default_sidebars': html_default_sidebars,
'github_base': 'https://github.com/saltstack/salt',
'github_issues': 'https://github.com/saltstack/salt/issues',
'github_downloads': 'https://github.com/saltstack/salt/downloads',
'latest_release': latest_release,
'previous_release': previous_release,
'previous_release_dir': previous_release_dir,
'next_release': next_release,
'next_release_dir': next_release_dir,
'search_cx': search_cx,
'build_type': build_type,
'today': today,
'copyright': copyright,
'repo_primary_branch': repo_primary_branch
"on_saltstack": on_saltstack,
"html_default_sidebars": html_default_sidebars,
"github_base": "https://github.com/saltstack/salt",
"github_issues": "https://github.com/saltstack/salt/issues",
"github_downloads": "https://github.com/saltstack/salt/downloads",
"latest_release": latest_release,
"previous_release": previous_release,
"previous_release_dir": previous_release_dir,
"next_release": next_release,
"next_release_dir": next_release_dir,
"search_cx": search_cx,
"build_type": build_type,
"today": today,
"copyright": copyright,
"repo_primary_branch": repo_primary_branch,
}
html_use_index = True
html_last_updated_fmt = '%b %d, %Y'
html_last_updated_fmt = "%b %d, %Y"
html_show_sourcelink = False
html_show_sphinx = True
html_show_copyright = True
@ -462,20 +474,20 @@ html_show_copyright = True
### Latex options
latex_documents = [
('contents', 'Salt.tex', 'Salt Documentation', 'SaltStack, Inc.', 'manual'),
("contents", "Salt.tex", "Salt Documentation", "SaltStack, Inc.", "manual"),
]
latex_logo = '_static/salt-logo.png'
latex_logo = "_static/salt-logo.png"
latex_elements = {
'inputenc': '', # use XeTeX instead of the inputenc LaTeX package.
'utf8extra': '',
'preamble': r'''
"inputenc": "", # use XeTeX instead of the inputenc LaTeX package.
"utf8extra": "",
"preamble": r"""
\usepackage{fontspec}
\setsansfont{Linux Biolinum O}
\setromanfont{Linux Libertine O}
\setmonofont{Source Code Pro}
''',
""",
}
### Linux Biolinum, Linux Libertine: http://www.linuxlibertine.org/
### Source Code Pro: https://github.com/adobe-fonts/source-code-pro/releases
@ -483,34 +495,34 @@ latex_elements = {
### Linkcheck options
linkcheck_ignore = [
r'http://127.0.0.1',
r'http://salt:\d+',
r'http://local:\d+',
r'https://console.aws.amazon.com',
r'http://192.168.33.10',
r'http://domain:\d+',
r'http://123.456.789.012:\d+',
r'http://localhost',
r'https://groups.google.com/forum/#!forum/salt-users',
r'http://logstash.net/docs/latest/inputs/udp',
r'http://logstash.net/docs/latest/inputs/zeromq',
r'http://www.youtube.com/saltstack',
r'https://raven.readthedocs.io',
r'https://getsentry.com',
r'https://salt-cloud.readthedocs.io',
r'https://salt.readthedocs.io',
r'http://www.pip-installer.org/',
r'http://www.windowsazure.com/',
r'https://github.com/watching',
r'dash-feed://',
r'https://github.com/saltstack/salt/',
r'http://bootstrap.saltstack.org',
r'https://bootstrap.saltstack.com',
r'https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh',
r'media.readthedocs.org/dash/salt/latest/salt.xml',
r'https://portal.aws.amazon.com/gp/aws/securityCredentials',
r'https://help.github.com/articles/fork-a-repo',
r'dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml',
r"http://127.0.0.1",
r"http://salt:\d+",
r"http://local:\d+",
r"https://console.aws.amazon.com",
r"http://192.168.33.10",
r"http://domain:\d+",
r"http://123.456.789.012:\d+",
r"http://localhost",
r"https://groups.google.com/forum/#!forum/salt-users",
r"http://logstash.net/docs/latest/inputs/udp",
r"http://logstash.net/docs/latest/inputs/zeromq",
r"http://www.youtube.com/saltstack",
r"https://raven.readthedocs.io",
r"https://getsentry.com",
r"https://salt-cloud.readthedocs.io",
r"https://salt.readthedocs.io",
r"http://www.pip-installer.org/",
r"http://www.windowsazure.com/",
r"https://github.com/watching",
r"dash-feed://",
r"https://github.com/saltstack/salt/",
r"http://bootstrap.saltstack.org",
r"https://bootstrap.saltstack.com",
r"https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh",
r"media.readthedocs.org/dash/salt/latest/salt.xml",
r"https://portal.aws.amazon.com/gp/aws/securityCredentials",
r"https://help.github.com/articles/fork-a-repo",
r"dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml",
]
linkcheck_anchors = False
@ -519,53 +531,53 @@ linkcheck_anchors = False
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
authors = [
'Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file',
"Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file",
]
man_pages = [
('contents', 'salt', 'Salt Documentation', authors, 7),
('ref/cli/salt', 'salt', 'salt', authors, 1),
('ref/cli/salt-master', 'salt-master', 'salt-master Documentation', authors, 1),
('ref/cli/salt-minion', 'salt-minion', 'salt-minion Documentation', authors, 1),
('ref/cli/salt-key', 'salt-key', 'salt-key Documentation', authors, 1),
('ref/cli/salt-cp', 'salt-cp', 'salt-cp Documentation', authors, 1),
('ref/cli/salt-call', 'salt-call', 'salt-call Documentation', authors, 1),
('ref/cli/salt-proxy', 'salt-proxy', 'salt-proxy Documentation', authors, 1),
('ref/cli/salt-syndic', 'salt-syndic', 'salt-syndic Documentation', authors, 1),
('ref/cli/salt-run', 'salt-run', 'salt-run Documentation', authors, 1),
('ref/cli/salt-ssh', 'salt-ssh', 'salt-ssh Documentation', authors, 1),
('ref/cli/salt-cloud', 'salt-cloud', 'Salt Cloud Command', authors, 1),
('ref/cli/salt-api', 'salt-api', 'salt-api Command', authors, 1),
('ref/cli/salt-unity', 'salt-unity', 'salt-unity Command', authors, 1),
('ref/cli/spm', 'spm', 'Salt Package Manager Command', authors, 1),
("contents", "salt", "Salt Documentation", authors, 7),
("ref/cli/salt", "salt", "salt", authors, 1),
("ref/cli/salt-master", "salt-master", "salt-master Documentation", authors, 1),
("ref/cli/salt-minion", "salt-minion", "salt-minion Documentation", authors, 1),
("ref/cli/salt-key", "salt-key", "salt-key Documentation", authors, 1),
("ref/cli/salt-cp", "salt-cp", "salt-cp Documentation", authors, 1),
("ref/cli/salt-call", "salt-call", "salt-call Documentation", authors, 1),
("ref/cli/salt-proxy", "salt-proxy", "salt-proxy Documentation", authors, 1),
("ref/cli/salt-syndic", "salt-syndic", "salt-syndic Documentation", authors, 1),
("ref/cli/salt-run", "salt-run", "salt-run Documentation", authors, 1),
("ref/cli/salt-ssh", "salt-ssh", "salt-ssh Documentation", authors, 1),
("ref/cli/salt-cloud", "salt-cloud", "Salt Cloud Command", authors, 1),
("ref/cli/salt-api", "salt-api", "salt-api Command", authors, 1),
("ref/cli/salt-unity", "salt-unity", "salt-unity Command", authors, 1),
("ref/cli/spm", "spm", "Salt Package Manager Command", authors, 1),
]
### epub options
epub_title = 'Salt Documentation'
epub_author = 'SaltStack, Inc.'
epub_title = "Salt Documentation"
epub_author = "SaltStack, Inc."
epub_publisher = epub_author
epub_copyright = copyright
epub_scheme = 'URL'
epub_identifier = 'http://saltstack.com/'
epub_scheme = "URL"
epub_identifier = "http://saltstack.com/"
epub_tocdup = False
#epub_tocdepth = 3
# epub_tocdepth = 3
def skip_mod_init_member(app, what, name, obj, skip, options):
# pylint: disable=too-many-arguments,unused-argument
if name.startswith('_'):
if name.startswith("_"):
return True
if isinstance(obj, types.FunctionType) and obj.__name__ == 'mod_init':
if isinstance(obj, types.FunctionType) and obj.__name__ == "mod_init":
return True
return False
def _normalize_version(args):
_, path = args
return '.'.join([x.zfill(4) for x in (path.split('/')[-1].split('.'))])
return ".".join([x.zfill(4) for x in (path.split("/")[-1].split("."))])
class ReleasesTree(TocTree):
@ -573,12 +585,12 @@ class ReleasesTree(TocTree):
def run(self):
rst = super(ReleasesTree, self).run()
entries = rst[0][0]['entries'][:]
entries = rst[0][0]["entries"][:]
entries.sort(key=_normalize_version, reverse=True)
rst[0][0]['entries'][:] = entries
rst[0][0]["entries"][:] = entries
return rst
def setup(app):
app.add_directive('releasestree', ReleasesTree)
app.connect('autodoc-skip-member', skip_mod_init_member)
app.add_directive("releasestree", ReleasesTree)
app.connect("autodoc-skip-member", skip_mod_init_member)

View file

@ -2,6 +2,12 @@
Command Line Reference
======================
salt-api
========
.. toctree::
salt-api
salt-call
=========
.. toctree::
@ -80,12 +86,6 @@ salt-unity
salt-unity
salt-api
========
.. toctree::
salt-api
spm
===
.. toctree::

View file

@ -105,6 +105,14 @@ Options
Pass a JID to be used instead of generating one.
.. option:: --pre-flight
Run the ssh_pre_flight script defined in the roster.
By default this script will only run if the thin dir
does not exist on the target minion. This option will
force the script to run regardless of the thin dir
existing or not.
Authentication Options
----------------------

View file

@ -19,6 +19,7 @@ cloud modules
gce
gogrid
joyent
libvirt
linode
lxc
msazure

View file

@ -0,0 +1,6 @@
=========================
salt.cloud.clouds.libvirt
=========================
.. automodule:: salt.cloud.clouds.libvirt
:members:

View file

@ -484,6 +484,22 @@ grains for the master.
enable_gpu_grains: True
.. conf_master:: skip_grains
``skip_grains``
---------------------
Default: ``False``
MasterMinions should omit grains. A MasterMinion is "a minion function object
for generic use on the master" that omit pillar. A RunnerClient creates a
MasterMinion omitting states and renderer. Setting to True can improve master
performance.
.. code-block:: yaml
skip_grains: True
.. conf_master:: job_cache
``job_cache``
@ -1341,6 +1357,15 @@ salt-ssh.
groupA: minion1,minion2
groupB: minion1,minion3
.. conf_master:: ssh_run_pre_flight
Default: False
Run the ssh_pre_flight script defined in the salt-ssh roster. By default
the script will only run when the thin dir does not exist on the targeted
minion. This will force the script to run and not check if the thin dir
exists first.
.. conf_master:: thin_extra_mods
``thin_extra_mods``

View file

@ -831,12 +831,28 @@ Default: ``False``
The minion can locally cache grain data instead of refreshing the data
each time the grain is referenced. By default this feature is disabled,
to enable set grains_cache to ``True``.
to enable set ``grains_cache`` to ``True``.
.. code-block:: yaml
grains_cache: False
.. conf_minion:: grains_cache_expiration
``grains_cache_expiration``
---------------------------
Default: ``300``
Grains cache expiration, in seconds. If the cache file is older than this number
of seconds then the grains cache will be dumped and fully re-populated with
fresh data. Defaults to 5 minutes. Will have no effect if
:conf_minion:`grains_cache` is not enabled.
.. code-block:: yaml
grains_cache_expiration: 300
.. conf_minion:: grains_deep_merge
``grains_deep_merge``
@ -2207,6 +2223,9 @@ auto-loading modules when states run, set this value to ``False``.
.. conf_minion:: clean_dynamic_modules
``clean_dynamic_modules``
-------------------------
Default: ``True``
clean_dynamic_modules keeps the dynamic modules on the minion in sync with
@ -2700,7 +2719,7 @@ minion to clean the keys.
Default: ``''``
Fingerprint of the master public key to validate the identity of your Salt master
before the initial key exchange. The master fingerprint can be found by running
before the initial key exchange. The master fingerprint can be found as ``master.pub`` by running
"salt-key -F master" on the Salt master.
.. code-block:: yaml

View file

@ -10,8 +10,8 @@ grains modules
:toctree:
:template: autosummary.rst.tmpl
cimc
chronos
cimc
core
disks
esxi

View file

@ -13,6 +13,7 @@ execution modules
salt.modules.pkg
salt.modules.service
salt.modules.shadow
salt.modules.sysctl
salt.modules.user
.. currentmodule:: salt.modules
@ -112,10 +113,10 @@ execution modules
ddns
deb_apache
deb_postgres
debuild_pkgbuild
debconfmod
debian_ip
debian_service
debuild_pkgbuild
defaults
devmap
dig
@ -174,12 +175,12 @@ execution modules
hg
hosts
http
icinga2
ifttt
ilo
icinga2
incron
influxdbmod
influxdb08mod
influxdbmod
infoblox
ini_manage
inspectlib
@ -237,11 +238,9 @@ execution modules
lxd
mac_assistive
mac_brew_pkg
macdefaults
mac_desktop
mac_group
mac_keychain
macpackage
mac_pkgutil
mac_portspkg
mac_power
@ -253,6 +252,8 @@ execution modules
mac_timezone
mac_user
mac_xattr
macdefaults
macpackage
makeconf
mandrill
marathon
@ -276,16 +277,15 @@ execution modules
nacl
nagios
nagios_rpc
namecheap_domains_dns
namecheap_domains
namecheap_domains_dns
namecheap_domains_ns
namecheap_ssl
namecheap_users
napalm_mod
napalm_netacl
napalm_bgp
napalm_formula
napalm_mod
napalm_netacl
napalm_network
napalm_ntp
napalm_probes
@ -413,6 +413,7 @@ execution modules
smbios
smf_service
smtp
snapper
solaris_fmadm
solaris_group
solaris_shadow
@ -428,7 +429,6 @@ execution modules
ssh
ssh_pkg
ssh_service
snapper
state
status
statuspage
@ -449,8 +449,8 @@ execution modules
telemetry
temp
test
testinframod
test_virtual
testinframod
textfsm_mod
timezone
tls
@ -496,7 +496,6 @@ execution modules
win_pki
win_powercfg
win_psget
winrepo
win_servermanager
win_service
win_shadow
@ -509,6 +508,7 @@ execution modules
win_useradd
win_wua
win_wusa
winrepo
x509
xapi_virt
xbpspkg

View file

@ -4,4 +4,4 @@ salt.modules.saltcheck
.. automodule:: salt.modules.saltcheck
:members:
:exclude-members: SaltCheck, StateTestLoader
:exclude-members: SaltCheck, StateTestLoader, run_state_tests_ssh

View file

@ -0,0 +1,20 @@
.. _virtual-sysctl:
===================
salt.modules.sysctl
===================
.. py:module:: salt.modules.sysctl
:synopsis: A virtual module for managing sysctl parameters
``sysctl`` is a virtual module that is fulfilled by one of the following modules:
============================================ ========================================
Execution Module Used for
============================================ ========================================
:py:mod:`~salt.modules.freebsd_sysctl` FreeBSD
:py:mod:`~salt.modules.linux_sysctl` Linux
:py:mod:`~salt.modules.mac_sysctl` macOS
:py:mod:`~salt.modules.netbsd_sysctl` NetBSD
:py:mod:`~salt.modules.openbsd_sysctl` OpenBSD
============================================ ========================================

View file

@ -11,7 +11,6 @@ proxy modules
:template: autosummary.rst.tmpl
arista_pyeapi
cimc
chronos
cimc
cisconso

View file

@ -90,9 +90,9 @@ state modules
elasticsearch_index_template
environ
eselect
esxi
etcd_mod
ethtool
esxi
event
file
firewall
@ -168,10 +168,10 @@ state modules
lvs_service
lxc
mac_assistive
macdefaults
mac_keychain
macpackage
mac_xattr
macdefaults
macpackage
makeconf
marathon_app
mdadm_raid
@ -188,13 +188,13 @@ state modules
mysql_grants
mysql_query
mysql_user
net_napalm_yang
netacl
netconfig
netntp
netsnmp
netusers
network
net_napalm_yang
neutron_network
neutron_secgroup
neutron_secgroup_rule
@ -215,8 +215,8 @@ state modules
pagerduty_user
panos
pcs
pecl
pdbedit
pecl
pip_state
pkg
pkgbuild
@ -329,8 +329,8 @@ state modules
zabbix_valuemap
zcbuildout
zenoss
zk_concurrency
zfs
zk_concurrency
zone
zookeeper
zpool

View file

@ -68,7 +68,7 @@ first line in the stanza) or the ``- name`` parameter.
- require:
- pkg: vim
Glog matching in requisites
Glob matching in requisites
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.9.8
@ -1039,8 +1039,8 @@ if the gluster commands return a 0 ret value.
- /etc/crontab
- 'entry1'
runas
~~~~~
listen
~~~~~~
.. versionadded:: 2014.7.0

View file

@ -1,8 +1,8 @@
.. _all-salt.tokens:
============
auth modules
============
=============
token modules
=============
.. currentmodule:: salt.tokens

View file

@ -13,7 +13,6 @@ Security disclosure policy
.. code-block:: text
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
mQINBFO15mMBEADa3CfQwk5ED9wAQ8fFDku277CegG3U1hVGdcxqKNvucblwoKCb
hRK6u9ihgaO9V9duV2glwgjytiBI/z6lyWqdaD37YXG/gTL+9Md+qdSDeaOa/9eg
@ -39,30 +38,43 @@ Security disclosure policy
El1BLAg+m+1UpE1L7zJT1il6PqVyEFAWBxW46wXCCkGssFsvz2yRp0PDX8A6u4yq
rTkt09uYht1is61joLDJ/kq3+6k8gJWkDOW+2NMrmf+/qcdYCMYXmrtOpg/wF27W
GMNAkbdyzgeX/MbUBCGCMdzhevRuivOI5bu4vT5s3KdshG+yhzV45bapKRd5VN+1
mZRquQINBFO15mMBEAC5UuLii9ZLz6qHfIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3
f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlFVvYdzjUq6c82GUUjW1dnqgUvFwdmM835
1n0YQ2TonmyaF882RvsRZrbJ65uvy7SQxlouXaAYOdqwLsPxBEOyOnMPSktW5V2U
IWyxsNP3sADchWIGq9p5D3Y/loyIMsS1dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu
9r3I9o6mDkuPpAljuMc8r09Im6az2egtK/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3
wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO+NNmyzFVyIwOLziwjmEcw62S57wYKUVn
HD2nglMsQa8Ve0e6ABBMEY7zGEGStva59rfgeh0jUMJiccGiUDTMs0tdkC6knYKb
u/fdRqNYFoNuDcSeLEw4DdCuP01l2W4yY+fiK6hAcL25amjzc+yYo9eaaqTn6RAT
bzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR65Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8
vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiVuXR+f755ZAndyQfRtowMdQIoiXuJEXYw
6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwAR
AQABiQIlBBgBAgAPBQJTteZjAhsMBQkHhh+AAAoJENVw8tNOoHk91rcQAIhxLv4g
duF/J1Cyf6Wixz4rqslBQ7DgNztdIUMjCThg3eB6pvIzY5d3DNROmwU5JvGP1rEw
hNiJhgBDFaB0J/y28uSci+orhKDTHb/cn30IxfuAuqrv9dujvmlgM7JUswOtLZhs
5FYGa6v1RORRWhUx2PQsF6ORg22QAaagc7OlaO3BXBoiE/FWsnEQCUsc7GnnPqi7
um45OJl/pJntsBUKvivEU20fj7j1UpjmeWz56NcjXoKtEvGh99gM5W2nSMLE3aPw
vcKhS4yRyLjOe19NfYbtID8m8oshUDji0XjQ1z5NdGcf2V1YNGHU5xyK6zwyGxgV
xZqaWnbhDTu1UnYBna8BiUobkuqclb4T9k2WjbrUSmTwKixokCOirFDZvqISkgmN
r6/g3w2TRi11/LtbUciF0FN2pd7rj5mWrOBPEFYJmrB6SQeswWNhr5RIsXrQd/Ho
zvNm0HnUNEe6w5YBfA6sXQy8B0Zs6pcgLogkFB15TuHIIIpxIsVRv5z8SlEnB7HQ
Io9hZT58yjhekJuzVQB9loU0C/W0lzci/pXTt6fd9puYQe1DG37pSifRG6kfHxrR
if6nRyrfdTlawqbqdkoqFDmEybAM9/hv3BqriGahGGH/hgplNQbYoXfNwYMYaHuB
aSkJvrOQW8bpuAzgVyd7TyNFv+t1kLlfaRYJ
=wBTJ
mZRqiQJVBBMBAgA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw
9LJL/yqvkNVw8tNOoHk9BQJb0e5rBQkL3m8IAAoJENVw8tNOoHk9fzMP/ApQtkQD
BmoYEBTF6BH1bywzDw5OHpnBSLbuoYtA3gkhnm/83MzFDcGn22pgo2Fv0MuHltWI
G2oExzje7szmcM6Xg3ZTKapJ3/p2J+P33tkJA1LWpg+DdgdQlqrjlXKwEnikszuB
9IMhbjoPeBzwiUtsBQmcwbVgwMzbscwoV5DJ/gLDCkgF4rp2uKEYAcBi8s9NGX6p
zQsb9Sb0/bKdCrszAcvUn4WYB6WbAPttvutYHtg/nZfXEeX/SgBueXo3lO9vzFlO
r3Zgk7WeucsEqa9Qo0VLOq28HykixM5mEJKsAQrNIqM1DqXgfDch8RJAHzgMBHFH
Qi9hJXk1/6OA2FPXQGcA9Td5Dt0i1Z7wMrAUMj3s9gNMVCD0hQqEKfUtpyV7KBAj
AO5j8Wr8KafnRm6czBCkcV0SRzHQSHdYyncozWwPgWOaRC9AY9fEDz8lBaSoB/C+
dyO/xZMTWoaWqkHozVoHIrCc4CAtZTye/5mxFhq15Q1Iy/NjelrMTCD1kql1dNIP
oOgfOYl1xLMQIBwrrCrgeRIvxEgKRf9KOLbSrS7+3vOKoxf+LD4AQfLci8dFyH+I
t0Z43nk93yTOI82RTdz5GwUXIKcvGhsJ8bgNlGTxM1R/Sl8Sg8diE2PRAp/fk7+g
CwOM8VkeyrDM2k1cy64d8USkbR7YtT3otyFQuQINBFO15mMBEAC5UuLii9ZLz6qH
fIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlF
VvYdzjUq6c82GUUjW1dnqgUvFwdmM8351n0YQ2TonmyaF882RvsRZrbJ65uvy7SQ
xlouXaAYOdqwLsPxBEOyOnMPSktW5V2UIWyxsNP3sADchWIGq9p5D3Y/loyIMsS1
dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu9r3I9o6mDkuPpAljuMc8r09Im6az2egt
K/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO
+NNmyzFVyIwOLziwjmEcw62S57wYKUVnHD2nglMsQa8Ve0e6ABBMEY7zGEGStva5
9rfgeh0jUMJiccGiUDTMs0tdkC6knYKbu/fdRqNYFoNuDcSeLEw4DdCuP01l2W4y
Y+fiK6hAcL25amjzc+yYo9eaaqTn6RATbzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR6
5Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiV
uXR+f755ZAndyQfRtowMdQIoiXuJEXYw6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8
HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwARAQABiQI8BBgBAgAmAhsMFiEEir5O/PD0
skv/Kq+Q1XDy006geT0FAlvR7oMFCQvebyAACgkQ1XDy006geT2Hxw//Zha8j8Uc
4B+DmHhZIvPmHp9aFI4DWhC7CBDrYKztBz42H6eX+UsBu4p+uBDKdW9xJH+Qt/zF
nf/zB5Bhc/wFceVRCAkWxPdiIQeo5XQGjZeORjle7E9iunTko+5q1q9I7IgqWYrn
jRmulDvRhO7AoUrqGACDrV6t0F1/XPB8seR2i6axFmFlt1qBHasRq11yksdgNYiD
KXaovf7csDGPGOCWEKMX7BFGpdK/dWdNYfH0Arfom0U5TqNfvGtP4yRPx2bcs7/1
VXPj7IqhBgOtA9pwtMjFki8HGkqj7bB2ErFBOnSwqqNnNcbnhiO6D74SHVGAHhKZ
whaMPDg76EvjAezoLHg7KWYOyUkWJSLa+YoM9r4+PJuEuW/XuaZCNbrAhek+p3pD
ywhElvZe/2UFk619qKzwSbTzk7a90rxLQ2wwtd0vxAW/GyjWl4/kOMZhI5+LAk1l
REucE0fSQxzCTeXu2ObvFR9ic02IYGH3Koz8CrGReEI1J05041Y5IhKxdsvGOD2W
e7ymcblYW4Gz8eYFlLeNJkj/38R7qmNZ028XHzAZDCAWDiTFrnCoglyk+U0JRHfg
HTsdvoc8mBdT/s24LhnfAbpLizlrZZquuOF6NLQSkbuLtmIwf+h9ynEEJxEkGGWg
7JqB1tMjNHLkRpveO/DTYB+iffpba1nCguk=
=OgRY
-----END PGP PUBLIC KEY BLOCK-----
The SaltStack Security Team is available at security@saltstack.com for

View file

@ -71,20 +71,20 @@ The profile can be realized now with a salt command:
.. code-block:: bash
# salt-cloud -p centos7 my-centos7-clone
salt-cloud -p centos7 my-centos7-clone
This will create an instance named ``my-centos7-clone`` on the cloud host. Also
the minion id will be set to ``my-centos7-clone``.
If the command was executed on the salt-master, its Salt key will automatically
be signed on the master.
be accepted on the master.
Once the instance has been created with salt-minion installed, connectivity to
it can be verified with Salt:
.. code-block:: bash
# salt my-centos7-clone test.version
salt my-centos7-clone test.version
Required Settings
@ -97,26 +97,76 @@ The following settings are always required for libvirt:
provider: local-kvm
# the domain to clone
base_domain: base-centos7-64
# how to obtain the IP address of the cloned instance
# ip-learning or qemu-agent
ip_source: ip-learning
The ``ip_source`` setting controls how the IP address of the cloned instance is determined.
When using ``ip-learning`` the IP is requested from libvirt. This needs a recent libvirt
version and may only work for NAT networks. Another option is to use ``qemu-agent`` this requires
that the qemu-agent is installed and configured to run at startup in the base domain.
SSH Key Authentication
======================
Instead of specifying a password, an authorized key can be used for the minion setup. Ensure that
the ssh user of your base image has the public key you want to use in ~/.ssh/authorized_keys. If
you want to use a non-root user you will likely want to configure salt-cloud to use sudo.
An example using root:
.. code-block:: yaml
centos7:
provider: local-kvm
# the domain to clone
base_domain: base-centos7-64
ssh_username: root
private_key: /path/to/private/key
An example using a non-root user:
.. code-block:: yaml
centos7:
provider: local-kvm
# the domain to clone
base_domain: base-centos7-64
ssh_username: centos
private_key: /path/to/private/key
sudo: True
sudo_password: "--redacted--"
Optional Settings
=================
.. code-block:: yaml
# Username and password
ssh_username: root
password: my-secret-password
centos7:
# ssh settings
# use forwarded agent instead of a local key
ssh_agent: True
ssh_port: 4910
# Cloning strategy: full or quick
clone_strategy: quick
# credentials
ssh_username: root
# password will be used for sudo if defined, use sudo_password if using ssh keys
password: my-secret-password
private_key: /path/to/private/key
sudo: True
sudo_password: "--redacted--"
# bootstrap options
deploy_command: sh /tmp/.saltcloud/deploy.sh
script_args: -F
# minion config
grains:
sushi: more tasty
# point at the another master at another port
minion:
master: 192.168.16.1
master_port: 5506
# libvirt settings
# clone_strategy: [ quick | full ] # default is full
clone_strategy: quick
# ip_source: [ ip-learning | qemu-agent ] # default is ip-learning
ip_source: qemu-agent
# validate_xml: [ false | true ] # default is true
validate_xml: false
The ``clone_strategy`` controls how the clone is done. In case of ``full`` the disks
are copied creating a standalone clone. If ``quick`` is used the disks of the base domain
@ -126,3 +176,13 @@ the expense of slower write performance. The quick strategy has a number of requ
* The disks must be of type qcow2
* The base domain must be turned off
* The base domain must not change after creating the clone
The ``ip_source`` setting controls how the IP address of the cloned instance is determined.
When using ``ip-learning`` the IP is requested from libvirt. This needs a recent libvirt
version and may only work for NAT/routed networks where libvirt runs the dhcp server.
Another option is to use ``qemu-agent`` this requires that the qemu-agent is installed and
configured to run at startup in the base domain.
The ``validate_xml`` setting is available to disable xml validation by libvirt when cloning.
See also :mod:`salt.cloud.clouds.libvirt`

View file

@ -259,7 +259,7 @@ Fork a Repo Guide_>`_ and is well worth reading.
take a couple of hours. Depending on your GitHub notification settings
you may also receive an email message about the test results.
Test progress and results can be found at http://jenkins.saltstack.com/.
Test progress and results can be found at https://jenkinsci.saltstack.com/.
.. _which-salt-branch:

View file

@ -2,8 +2,8 @@
Salt Release Process
====================
The goal for Salt projects is to cut a new feature release every six
months. This document outlines the process for these releases, and the
The goal for Salt projects is to cut a new feature release every three to
four months. This document outlines the process for these releases, and the
subsequent bug fix releases which follow.
@ -11,44 +11,21 @@ Feature Release Process
=======================
When a new release is ready to be cut, the person responsible for cutting the
release will follow the following steps (written using the 2019.2.0 release as an
release will follow the following steps (written using the 3000 release as an
example):
#. Create first public draft of release notes with major features.
#. Remove any deprecations for the upcoming release.
#. Notify salt-users and salt-announce google groups when the feature freeze
branch creation will occur so they can try to get their work merged.
#. Create QA test plan. Review features planned for the release and determine if
there is sufficient test coverage.
#. Ensure all required features are merged.
#. Complete one last merge forward from the previous branch.
#. Create feature release branch with the name of the release. (ex. fluorine)
#. Create issue to start the process of deprecating for the next feature release.
#. Create jenkins jobs to test the new feature release branch.
#. Inform salt-users and salt-announce google groups feature branch and
freeze is complete.
#. Add new feature branch to salt-jenkins repo and the kitchen yaml file.
#. Fix tests failing in jenkins test runs.
#. Finalize QA test plan and add all required tests.
#. Run through a manual test run based off of the head of the feature branch.
#. Convert the feature release branch to the version number. For example (v2019.2)
This is based off of the year and month that is planned to release.
#. Migrate both the jenkins test jobs and salt-jenkins repo to the new branch number.
#. Notify salt-users and salt-announce google groups of the new version branch
number and migrate any PRs to the new branch.
#. Delete old feature release branch name (ex. fluorine)
#. Update all name references to version number in the docs. For example
all fluorine references in the docs needs to be moved to v2019.2.0
#. Create RC release branch. (ex. 2019.2.0.rc)
#. Create new jenkins test jobs with new RC release branch
#. Notify salt-users and salt-announce google groups of the new RC branch.
#. Fix tests failing in jenkins test runs.
all neon references in the docs needs to be moved to v3000
#. Review the release notes with major features.
#. Generate the new man pages for the release.
#. Create internal RC tag for testing.
#. Create internal RC tag for testing from the head of the master branch.
#. Build latest windows, mac, ubuntu, debian and redhat packages.
#. Run manual and package tests against new RC packages.
#. Update release candidate docs with the new version. (ex. 2019.2.0rc1)
#. Push the internal tag live to salt's repo.
#. Publish release archive to pypi based off tag.
#. Push the RC packages live.
@ -56,15 +33,14 @@ example):
#. Triage incoming issues based on the new RC release.
#. Fix RC issues once they are categorized as a release blocker.
#. Depending on the issues found during the RC process make a decesion
on whether to release based off the RC or go through another RC process,
repeating the steps starting at ensuring the tests are not failing.
on whether to release based off the RC or go through another RC process
#. If a RC is categorized as stable, build all required packages.
#. Test all release packages.
#. Test links from `repo.saltstack.com`_.
#. Update installation instructions with new release number at `repo.saltstack.com`_.
#. Update and build docs to include new version (2019.2) as the latest.
#. Update and build docs to include new version (3000) as the latest.
#. Pre-announce on salt-users google group that we are about to update our repo.
#. Publish release (v2019.2.0) archive to pypi based off tag.
#. Publish release (v3000) archive to pypi based off tag.
#. Publish all packages live to repo.
#. Publish the docs.
#. Create release at `github`_
@ -73,39 +49,39 @@ example):
community channel.
Maintenance and Bugfix Releases
===============================
Bugfix Releases
===============
Once a feature release branch has been cut from ``develop``, the branch moves
into a "feature freeze" state. The new release branch enters the ``merge-forward``
chain and only bugfixes should be applied against the new branch. Once major bugs
have been fixed, a bugfix release can be cut:
Once a feature release branch has been cut from the ``master`` branch, if
serious bugs or a CVE is found for the most recent release a bugfix release
will need to be cut. A temporary branch will be created based off of the previous
release tag. For example, if it is determined that a 3000.1 release needs to occur
a 3000.1 branch will be created based off of the v3000 tag. The fixes that need
to go into 3000.1 will be added and merged into this branch. Here are the steps
for a bugfix release.
#. Ensure all required bug fixes are merged.
#. Inform salt-users and salt-announce we are going to branch for the release.
#. Complete one last merge forward from the previous branch.
#. Create release branch with the version of the release. (ex. 2019.2.1)
#. Create release branch with the version of the release. (ex. 3000.1)
#. Create jenkins jobs that test the new release branch.
#. Fix tests failing in jeknins test runs.
#. Run through a manual test run based off of the head of the branch.
#. Generate the new man pages for the release.
#. Create internal tag for testing.(ex v2019.2.1)
#. Create internal tag for testing.(ex v3000.1)
#. Build all release packages.
#. Run manual and package tests against new packages.
#. Update installation instructions with new release number at `repo.saltstack.com`_.
#. Update and build docs to include new version. (ex. 2019.2.1)
#. Update and build docs to include new version. (ex. 3000.1)
#. Pre-announce on salt-users google groups that we are about to update our repo.
#. Push the internal tag live to salt's repo.
#. Publish release archive to pypi based off tag.
#. Push the packages live.
#. Publish release (v2019.2.1) archive to pypi based off tag.
#. Publish release (v3000) archive to pypi based off tag.
#. Publish all packages live to repo.
#. Publish the docs.
#. Create release at `github`_
#. Update win-repo-ng with new salt versions.
#. Announce release is live to irc, salt-users, salt-announce and release slack channel.
For more information about the difference between the ``develop`` branch and
For more information about the difference between the ``master`` branch and
bugfix release branches, please refer to the :ref:`Which Salt Branch?
<which-salt-branch>` section of Salt's :ref:`Contributing <contributing>`
documentation.

View file

@ -359,7 +359,7 @@ As soon as the pull request is merged, the changes will be added to the
next branch test run on Jenkins.
For a full list of currently running test environments, go to
http://jenkins.saltstack.com.
https://jenkinsci.saltstack.com.
Using Salt-Cloud on Jenkins

View file

@ -120,7 +120,7 @@ Salt should run on any Unix-like platform so long as the dependencies are met.
Salt defaults to the `ZeroMQ`_ transport. The ``--salt-transport`` installation
option is available, but currently only supports the ``szeromq`` option. This
option is available, but currently only supports the ``zeromq`` option. This
may be expanded in the future.
.. code-block:: bash

View file

@ -134,20 +134,10 @@ For openSUSE 15.1 run the following as root:
SUSE Linux Enterprise
---------------------
For SLE 12 run the following as root:
For SLE 15 and above run the following as root:
.. code-block:: bash
zypper addrepo http://download.opensuse.org/repositories/systemsmanagement:/saltstack/SLE_12/systemsmanagement:saltstack.repo
zypper refresh
zypper install salt salt-minion salt-master
For SLE 11 SP4 run the following as root:
.. code-block:: bash
zypper addrepo http://download.opensuse.org/repositories/systemsmanagement:/saltstack/SLE_11_SP4/systemsmanagement:saltstack.repo
zypper refresh
zypper install salt salt-minion salt-master
Now go to the :ref:`Configuring Salt<configuring-salt>` page.

View file

@ -9,17 +9,192 @@ Version 3000.1 is a bugfix release for :ref:`3000 <release-3000>`.
Statistics
==========
- Total Merges: **39**
- Total Issue References: **14**
- Total PR References: **40**
- Contributors: **15** (`Ch3LL`_, `UtahDave`_, `cmcmarrow`_, `dwoz`_, `frogunder`_, `garethgreenaway`_, `lorengordon`_, `mchugh19`_, `oeuftete`_, `raddessi`_, `s0undt3ch`_, `sjorge`_, `terminalmage`_, `twangboy`_, `waynew`_)
- Total Merges: **53**
- Total Issue References: **15**
- Total PR References: **54**
- Contributors: **16** (`Ch3LL`_, `UtahDave`_, `bryceml`_, `cmcmarrow`_, `dwoz`_, `frogunder`_, `garethgreenaway`_, `lorengordon`_, `mchugh19`_, `oeuftete`_, `raddessi`_, `s0undt3ch`_, `sjorge`_, `terminalmage`_, `twangboy`_, `waynew`_)
Changelog for v3000..v3000.1
============================
*Generated at: 2020-03-13 17:24:15 UTC*
*Generated at: 2020-03-27 16:48:41 UTC*
* **PR** `#56455`_: (`s0undt3ch`_) Fix gitpython windows requirement
@ *2020-03-27 16:31:57 UTC*
* c5a700e01e Merge pull request `#56455`_ from s0undt3ch/hotfix/gitpython
* d9791c393f Revert and fix windows requirements
* 4b573c1c94 Revert "Fix win deps"
* **PR** `#56446`_: (`frogunder`_) 3000.1 releasenotes updates
@ *2020-03-24 20:28:23 UTC*
* 7ba36325d9 Merge pull request `#56446`_ from frogunder/releasenotes_3000.1_updates
* 6b47f474af 3000.1 releasenotes updates
* **PR** `#56435`_: (`twangboy`_) Update mac build scripts
@ *2020-03-24 19:47:40 UTC*
* 4d2bc7004d Merge pull request `#56435`_ from twangboy/update_mac_build_scripts
* 5d244b0aa6 Merge branch 'master' into update_mac_build_scripts
* 8fdf52a243 Update gitpython to 2.1.15
* afcedc5232 Remove --ignore-installed
* e6cc2a6192 Separate pyobjc requirements
* 262ad2e98a Compile static requirements
* 224f72d0f2 Update noxfile and .precommit
* 68a36f2e37 Add req_ext.txt
* 5851a5c2cd Roll back changes, fix dependencies
* **ISSUE** `#56433`_: (`Ch3LL`_) integration.states.test_pip_state.PipStateTest.test_pip_installed_specific_env failing on MAC (refs: `#56436`_)
* **PR** `#56436`_: (`Ch3LL`_) Fix `#56433`_ - test_pip_installed_specific_env
@ *2020-03-24 19:47:20 UTC*
* 8a8ae8ebe4 Merge pull request `#56436`_ from Ch3LL/pip_custom_pypi
* 55655ff96c Merge branch 'master' into pip_custom_pypi
* **PR** `#56423`_: (`Ch3LL`_) Update changelog with package fixes
@ *2020-03-24 16:42:17 UTC*
* 3a993d73a9 Merge pull request `#56423`_ from Ch3LL/changelog_again
* 963c16e1a7 update pr number for mac build changes in changelog
* 83e22b77c7 Update changelog with package fixes
* **PR** `#56417`_: (`twangboy`_) Update mac build scripts
@ *2020-03-23 18:45:34 UTC*
* d2a5bd8add Merge pull request `#56417`_ from twangboy/update_mac_build_scripts
* 3bda8ddb82 Update noxfile and pre-commit
* de58c52d66 Merge branch 'update_mac_build_scripts' of https://github.com/twangboy/salt into update_mac_build_scripts
* e7f08d5349 Update static requirements
* a53977de5b Merge branch 'update_mac_build_scripts' of github.com:twangboy/salt into update_mac_build_scripts
* 04e5cde9dd `pkg/osx/req_ext.txt` no longer exists
* be4a272d07 Update static requirements
* 02dfe4119c Add pyobjc in its own requirements file
* 6b2ac2be64 Remove pyobjc, enforce pip install location
* 30ca5d04ec Remove cryptography, it gets installed by pyopenssl
* fda8abf4a8 Update static requirements
* 732ecc83c1 Update pre-commit config
* 1fa9864e3d Don't set a version for pyopenssl
* 461b198978 Update dependencies, remove req_ext.txt
* **PR** `#56418`_: (`Ch3LL`_) Ensure version.py included before we install
@ *2020-03-23 18:27:46 UTC*
* 74575a6993 Merge pull request `#56418`_ from Ch3LL/egg_version
* 85d7c784ef Fix integration setup egg test
* a8a22a4040 Ensure version.py included before we install
* 86fe450c82 Fix `#56433`_ - test_pip_installed_specific_env
* **PR** `#56403`_: (`frogunder`_) update 3000.1 releasenotes
@ *2020-03-17 23:50:21 UTC*
* 249367b462 Merge pull request `#56403`_ from frogunder/update_releasenotes_3000.1
* 9d972c96e8 update 3000.1 releasenotes
* **PR** `#56398`_: (`Ch3LL`_) Add additional PRs to 3000.1 changelog
@ *2020-03-17 18:08:15 UTC*
* 0de5c1e136 Merge pull request `#56398`_ from Ch3LL/change_3000.1
* 79c337b3db Add additional PRs to 3000.1 changelog
* **PR** `#56376`_: (`twangboy`_) Fix win deps
@ *2020-03-17 17:02:46 UTC*
* 5ac09decb9 Merge pull request `#56376`_ from twangboy/fix_win_deps
* 6c83beeb9e Fix win deps
* **PR** `#56378`_: (`Ch3LL`_) Include _version.py if building wheel
@ *2020-03-17 17:01:33 UTC*
* e72a8d2cbc Merge pull request `#56378`_ from Ch3LL/wheel_version
* 22cccd2107 Use virtualenv test helper that already exists and fix setup.py
* 293b1fddf2 cleanup code
* ddfb065bfb add bdist_wheel test
* fceff0287c ensure name is included in new version
* 0cbf6d3246 Ensure SaltStackVersion attributes in _version.py correct
* 39cdf5382d Include _version.py if building wheel
* **PR** `#56387`_: (`bryceml`_) update gpg key expiration date
@ *2020-03-17 16:59:30 UTC*
* 6a9326fb70 Merge pull request `#56387`_ from bryceml/update_gpg_key
* b74b26ccf3 update gpg key expiration date
* **PR** `#55822`_: (`cmcmarrow`_) fix_Indefinitely_code
@ *2020-03-16 17:34:11 UTC*
* fcee692c4a Merge pull request `#55822`_ from cmcmarrow/win_task_repeat
* e257fb2804 Merge branch 'master' into win_task_repeat
* 07cada0f8f Merge branch 'master' into win_task_repeat
* 4b80301338 Merge branch 'master' into win_task_repeat
* 9df94569c4 Merge branch 'master' into win_task_repeat
* 22a2d0b9ec Merge branch 'master' into win_task_repeat
* dee9c134e7 Merge branch 'master' into win_task_repeat
* beaf05d514 Update test_win_task.py
* 6e923f75fc Merge branch 'master' into win_task_repeat
* 5839da81ee add test
* 2aa6338ed0 fix Indefinitely code
* **PR** `#56373`_: (`frogunder`_) update 3000.1 releasenotes
@ *2020-03-13 18:58:43 UTC*
* c11ef89200 Merge pull request `#56373`_ from frogunder/releasenotes_3000.1
* a5ca8b5277 Fix doc test errors
* 47e483187e update 3000.1 releasenotes
* **PR** `#56365`_: (`Ch3LL`_) Update 3000.1 changelog
@ *2020-03-13 17:21:02 UTC*
@ -405,7 +580,7 @@ Changelog for v3000..v3000.1
* **ISSUE** `#56121`_: (`githubcdr`_) salt-minion broken after upgrade to 3000 (refs: `#56143`_)
* **ISSUE** `#51854`_: (`Oloremo`_) Fluorine: minion_pillar_cache: True leads to exception (refs: `#52195`_, `#56143`_)
* **ISSUE** `#51854`_: (`Oloremo`_) Fluorine: minion_pillar_cache: True leads to exception (refs: `#56143`_, `#52195`_)
* **PR** `#56143`_: (`waynew`_) Use encoding when caching pillar data
@ *2020-03-10 01:33:37 UTC*
@ -478,6 +653,7 @@ Changelog for v3000..v3000.1
.. _`#52195`: https://github.com/saltstack/salt/pull/52195
.. _`#53152`: https://github.com/saltstack/salt/issues/53152
.. _`#55185`: https://github.com/saltstack/salt/issues/55185
.. _`#55822`: https://github.com/saltstack/salt/pull/55822
.. _`#55888`: https://github.com/saltstack/salt/pull/55888
.. _`#55894`: https://github.com/saltstack/salt/pull/55894
.. _`#55906`: https://github.com/saltstack/salt/pull/55906
@ -528,9 +704,24 @@ Changelog for v3000..v3000.1
.. _`#56358`: https://github.com/saltstack/salt/pull/56358
.. _`#56360`: https://github.com/saltstack/salt/pull/56360
.. _`#56365`: https://github.com/saltstack/salt/pull/56365
.. _`#56373`: https://github.com/saltstack/salt/pull/56373
.. _`#56376`: https://github.com/saltstack/salt/pull/56376
.. _`#56378`: https://github.com/saltstack/salt/pull/56378
.. _`#56387`: https://github.com/saltstack/salt/pull/56387
.. _`#56398`: https://github.com/saltstack/salt/pull/56398
.. _`#56403`: https://github.com/saltstack/salt/pull/56403
.. _`#56417`: https://github.com/saltstack/salt/pull/56417
.. _`#56418`: https://github.com/saltstack/salt/pull/56418
.. _`#56423`: https://github.com/saltstack/salt/pull/56423
.. _`#56433`: https://github.com/saltstack/salt/issues/56433
.. _`#56435`: https://github.com/saltstack/salt/pull/56435
.. _`#56436`: https://github.com/saltstack/salt/pull/56436
.. _`#56446`: https://github.com/saltstack/salt/pull/56446
.. _`#56455`: https://github.com/saltstack/salt/pull/56455
.. _`Ch3LL`: https://github.com/Ch3LL
.. _`Oloremo`: https://github.com/Oloremo
.. _`UtahDave`: https://github.com/UtahDave
.. _`bryceml`: https://github.com/bryceml
.. _`cmcmarrow`: https://github.com/cmcmarrow
.. _`dwoz`: https://github.com/dwoz
.. _`finalduty`: https://github.com/finalduty

View file

@ -16,3 +16,47 @@ also support the syntax used in :py:mod:`module.run <salt.states.module.run>`.
The old syntax for the mine_function - as a dict, or as a list with dicts that
contain more than exactly one key - is still supported but discouraged in favor
of the more uniform syntax of module.run.
New Grains
==========
systempath
----------
This grain provides the same information as the ``path`` grain, only formatted
as a list of directories.
================
Salt-SSH updates
================
A new Salt-SSH roster option `ssh_pre_flight` has been added. This enables you to run a
script before Salt-SSH tries to run any commands. You can set this option in the roster
for a specific minion or use the `roster_defaults` to set it for all minions.
Example for setting `ssh_pre_flight` for specific host in roster file
.. code-block:: yaml
minion1:
host: localhost
user: root
passwd: P@ssword
ssh_pre_flight: /srv/salt/pre_flight.sh
Example for setting `ssh_pre_flight` using roster_defaults, so all minions
run this script.
.. code-block:: yaml
roster_defaults:
ssh_pre_flight: /srv/salt/pre_flight.sh
The `ssh_pre_flight` script will only run if the thin dir is not currently on the
minion. If you want to force the script to run you have the following options:
* Wipe the thin dir on the targeted minion using the -w arg.
* Set ssh_run_pre_flight to True in the config.
* Run salt-ssh with the --pre-flight arg.

View file

@ -242,6 +242,59 @@ Boolean-style options should be specified in their YAML representation.
At last you can create ``~/.salt/Saltfile`` and ``salt-ssh``
will automatically load it by default.
Advanced options with salt-ssh
==============================
Salt's ability to allow users to have custom grains and custom modules
is also applicable to using salt-ssh. This is done through first packing
the custom grains into the thin tarball before it is deployed on the system.
For this to happen, the ``config`` file must be explicit enough to indicate
where the custom grains are located on the machine like so:
.. code-block:: yaml
file_client: local
file_roots:
base:
- /home/user/.salt
- /home/user/.salt/_states
- /home/user/.salt/_grains
module_dirs:
- /home/user/.salt
pillar_roots:
base:
- /home/user/.salt/_pillar
root_dir: /tmp/.salt-root
It's better to be explicit rather than implicit in this situation. This will
allow urls all under `salt://` to be resolved such as `salt://_grains/custom_grain.py`.
One can confirm this action by executing a properly setup salt-ssh minion with
`salt-ssh minion grains.items`. During this process, a `saltutil.sync_all` is
ran to discover the thin tarball and then consumed. Output similar to this
indicates a successful sync with custom grains.
.. code-block:: yaml
local:
----------
...
executors:
grains:
- grains.custom_grain
log_handlers:
...
This is especially important when using a custom `file_roots` that differ from
`/etc/salt/`.
.. note::
Please see https://docs.saltstack.com/en/latest/topics/grains/ for more
information on grains and custom grains.
Debugging salt-ssh
==================

View file

@ -61,6 +61,27 @@ The information which can be stored in a roster ``target`` is the following:
# components. Defaults to /tmp/salt-<hash>.
cmd_umask: # umask to enforce for the salt-call command. Should be in
# octal (so for 0o077 in YAML you would do 0077, or 63)
ssh_pre_flight: # Path to a script that will run before all other salt-ssh
# commands. Will only run the first time when the thin dir
# does not exist, unless --pre-flight is passed to salt-ssh
# command or ssh_run_pre_flight is set to true in the config
# Added in Sodium Release.
.. _ssh_pre_flight:
ssh_pre_flight
--------------
A Salt-SSH roster option `ssh_pre_flight` was added in the Sodium release. This enables
you to run a script before Salt-SSH tries to run any commands. You can set this option
in the roster for a specific minion or use the `roster_defaults` to set it for all minions.
This script will only run if the thin dir is not currently on the minion. This means it will
only run on the first run of salt-ssh or if you have recently wiped out your thin dir. If
you want to intentionally run the script again you have a couple of options:
* Wipe out your thin dir by using the -w salt-ssh arg.
* Set ssh_run_pre_flight to True in the config
* Run salt-ssh with the --pre-flight arg.
.. _roster_defaults:

Some files were not shown because too many files have changed in this diff Show more