mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into master-port-49955
This commit is contained in:
commit
962daf491c
160 changed files with 3952 additions and 1205 deletions
|
@ -304,6 +304,207 @@ repos:
|
|||
- --py-version=3.7
|
||||
- --platform=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.8-zmq-requirements
|
||||
name: Linux Py3.8 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=linux
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-darwin-py3.8-zmq-requirements
|
||||
name: Darwin Py3.8 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=darwin
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- --passthrough-line-from-input=^pyobjc(.*)$
|
||||
|
||||
# Commented out since pywin32 and pymssql do not have packages or support for Py >= 3.8
|
||||
# - id: pip-tools-compile
|
||||
# alias: compile-windows-py3.8-zmq-requirements
|
||||
# name: Windows Py3.8 ZeroMQ Requirements
|
||||
# files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
# args:
|
||||
# - -v
|
||||
# - --py-version=3.8
|
||||
# - --platform=windows
|
||||
# - --include=pkg/windows/req.txt
|
||||
# - --include=pkg/windows/req_win.txt
|
||||
# - --include=requirements/base.txt
|
||||
# - --include=requirements/zeromq.txt
|
||||
# - --include=requirements/pytest.txt
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-cloud-py3.8-requirements
|
||||
name: Cloud Py3.8 Requirements
|
||||
files: ^requirements/(static/cloud\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-doc-requirements
|
||||
name: Docs Py3.8 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/docs\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-crypto-py3.8-requirements
|
||||
name: Linux Py3.8 Crypto Requirements
|
||||
files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=linux
|
||||
- --out-prefix=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-darwin-crypto-py3.8-requirements
|
||||
name: Darwin Py3.8 Crypto Requirements
|
||||
files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=darwin
|
||||
- --out-prefix=darwin
|
||||
|
||||
# Commented out since pywin32 and pymssql do not have packages or support for Py >= 3.8
|
||||
# - id: pip-tools-compile
|
||||
# alias: compile-windows-crypto-py3.8-requirements
|
||||
# name: Windows Py3.8 Crypto Requirements
|
||||
# files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
# args:
|
||||
# - -v
|
||||
# - --py-version=3.8
|
||||
# - --platform=windows
|
||||
# - --out-prefix=windows
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-lint-py3.8-requirements
|
||||
name: Lint Py3.8 Requirements
|
||||
files: ^requirements/static/lint\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.8
|
||||
- --platform=linux
|
||||
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.9-zmq-requirements
|
||||
name: Linux Py3.9 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=linux
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-darwin-py3.9-zmq-requirements
|
||||
name: Darwin Py3.9 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=darwin
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- --passthrough-line-from-input=^pyobjc(.*)$
|
||||
|
||||
# Commented out since pywin32 and pymssql do not have packages or support for Py >= 3.8
|
||||
# - id: pip-tools-compile
|
||||
# alias: compile-windows-py3.9-zmq-requirements
|
||||
# name: Windows Py3.9 ZeroMQ Requirements
|
||||
# files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
# args:
|
||||
# - -v
|
||||
# - --py-version=3.9
|
||||
# - --platform=windows
|
||||
# - --include=pkg/windows/req.txt
|
||||
# - --include=pkg/windows/req_win.txt
|
||||
# - --include=requirements/base.txt
|
||||
# - --include=requirements/zeromq.txt
|
||||
# - --include=requirements/pytest.txt
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-cloud-py3.9-requirements
|
||||
name: Cloud Py3.9 Requirements
|
||||
files: ^requirements/(static/cloud\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-doc-requirements
|
||||
name: Docs Py3.9 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/docs\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-crypto-py3.9-requirements
|
||||
name: Linux Py3.9 Crypto Requirements
|
||||
files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=linux
|
||||
- --out-prefix=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-darwin-crypto-py3.9-requirements
|
||||
name: Darwin Py3.9 Crypto Requirements
|
||||
files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=darwin
|
||||
- --out-prefix=darwin
|
||||
|
||||
# Commented out since pywin32 and pymssql do not have packages or support for Py >= 3.8
|
||||
# - id: pip-tools-compile
|
||||
# alias: compile-windows-crypto-py3.9-requirements
|
||||
# name: Windows Py3.9 Crypto Requirements
|
||||
# files: ^requirements/(crypto\.txt|static/crypto\.in)$
|
||||
# args:
|
||||
# - -v
|
||||
# - --py-version=3.9
|
||||
# - --platform=windows
|
||||
# - --out-prefix=windows
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-lint-py3.9-requirements
|
||||
name: Lint Py3.9 Requirements
|
||||
files: ^requirements/static/lint\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.9
|
||||
- --platform=linux
|
||||
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: "1e78a9acf3110e1f9721feb591f89a451fc9876a"
|
||||
hooks:
|
||||
|
@ -319,7 +520,7 @@ repos:
|
|||
)$
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
rev: stable
|
||||
hooks:
|
||||
- id: black
|
||||
# This tells pre-commit not to pass files to black.
|
||||
|
@ -331,6 +532,7 @@ repos:
|
|||
tests/kitchen/.*
|
||||
)$
|
||||
|
||||
|
||||
- repo: https://github.com/saltstack/salt-nox-pre-commit
|
||||
rev: master
|
||||
hooks:
|
||||
|
|
|
@ -33,6 +33,7 @@ Versions are `MAJOR.PATCH`.
|
|||
### Deprecated
|
||||
|
||||
### Changed
|
||||
- [#56730](https://github.com/saltstack/salt/pull/56730) - Backport #52992
|
||||
|
||||
### Fixed
|
||||
|
||||
|
|
158
COPYING
158
COPYING
|
@ -1,158 +0,0 @@
|
|||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: salt
|
||||
Upstream-Contact: salt-users@googlegroups.com
|
||||
Source: https://github.com/saltstack/salt
|
||||
|
||||
Files: *
|
||||
Copyright: 2014 SaltStack Team
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
|
||||
Files: debian/*
|
||||
Copyright: 2013 Joe Healy <joehealy@gmail.com>
|
||||
2012 Michael Prokop <mika@debian.org>
|
||||
2012 Christian Hofstaedtler <christian@hofstaedtler.name>
|
||||
2012 Ulrich Dangel <mru@spamt.net>
|
||||
2012 Corey Quinn <corey@sequestered.net>
|
||||
2011 Aaron Toponce <aaron.toponce@gmail.com>
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
|
||||
Files: salt/auth/pam.py
|
||||
Copyright: 2007 Chris AtLee <chris@atlee.ca>
|
||||
License: MIT License
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
Files: doc/_ext/youtube.py
|
||||
Copyright: 2009 Chris Pickel <sfiera@gmail.com>
|
||||
License: BSD-2-clause
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
.
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Files: salt/ext/six.py
|
||||
Copyright: 2010-2014 Benjamin Peterson
|
||||
License: MIT License
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
Files: doc/_ext/images
|
||||
Copyright: 2013 SaltStack Team
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
.
|
||||
Files in this directory were created in-house.
|
||||
|
||||
Files: tests/utils/cptestcase.py
|
||||
Copyright: (c) 2014 Adam Hajari
|
||||
The MIT License (MIT)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
353
LICENSE
353
LICENSE
|
@ -1,6 +1,192 @@
|
|||
Salt - Remote execution system
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright 2014-2019 SaltStack Team
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@ -14,3 +200,166 @@
|
|||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
===========================================================================
|
||||
|
||||
Below is a summary of the licensing used by external modules that are
|
||||
bundled with SaltStack.
|
||||
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: salt
|
||||
Upstream-Contact: salt-users@googlegroups.com
|
||||
Source: https://github.com/saltstack/salt
|
||||
|
||||
Files: *
|
||||
Copyright: 2014 SaltStack Team
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
|
||||
Files: debian/*
|
||||
Copyright: 2013 Joe Healy <joehealy@gmail.com>
|
||||
2012 Michael Prokop <mika@debian.org>
|
||||
2012 Christian Hofstaedtler <christian@hofstaedtler.name>
|
||||
2012 Ulrich Dangel <mru@spamt.net>
|
||||
2012 Corey Quinn <corey@sequestered.net>
|
||||
2011 Aaron Toponce <aaron.toponce@gmail.com>
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
|
||||
Files: salt/auth/pam.py
|
||||
Copyright: 2007 Chris AtLee <chris@atlee.ca>
|
||||
License: MIT License
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
Files: doc/_ext/youtube.py
|
||||
Copyright: 2009 Chris Pickel <sfiera@gmail.com>
|
||||
License: BSD-2-clause
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
.
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Files: salt/ext/six.py
|
||||
Copyright: 2010-2014 Benjamin Peterson
|
||||
License: MIT License
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
Files: doc/_ext/images
|
||||
Copyright: 2013 SaltStack Team
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License, Version 2.0 can be
|
||||
found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
.
|
||||
Files in this directory were created in-house.
|
||||
|
||||
Files: tests/utils/cptestcase.py
|
||||
Copyright: (c) 2014 Adam Hajari
|
||||
The MIT License (MIT)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
|
@ -78,3 +78,10 @@ services`_ offerings.
|
|||
.. _PyCryptodome: https://pypi.org/project/pycryptodome/
|
||||
.. _Issue #52674: https://github.com/saltstack/salt/issues/52674
|
||||
.. _Issue #54115: https://github.com/saltstack/salt/issues/54115
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
SaltStack is licensed by the SaltStack Team under the Apache 2.0 license. Please see the
|
||||
LICENSE file for the full text of the Apache license, followed by a full summary
|
||||
of the licensing used by external modules.
|
||||
|
|
1
changelog/56186.fixed
Normal file
1
changelog/56186.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Pillar data is correctly included from `init.sls` file.
|
1
changelog/56844.fixed
Normal file
1
changelog/56844.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Allow correct failure information to show up when calling `win_interfaces`
|
1
changelog/7424.added
Normal file
1
changelog/7424.added
Normal file
|
@ -0,0 +1 @@
|
|||
Added `validate` to tls module.
|
1
changelog/8875.added
Normal file
1
changelog/8875.added
Normal file
|
@ -0,0 +1 @@
|
|||
Pillar relative includes.
|
|
@ -378,6 +378,63 @@ exactly like the ``require`` requisite (the watching state will execute if
|
|||
|
||||
.. note::
|
||||
|
||||
If the watching state ``changes`` key contains values, then ``mod_watch``
|
||||
will not be called. If you're using ``watch`` or ``watch_in`` then it's a
|
||||
good idea to have a state that only enforces one attribute - such as
|
||||
splitting out ``service.running`` into its own state and have
|
||||
``service.enabled`` in another.
|
||||
|
||||
One common source of confusion is expecting ``mod_watch`` to be called for
|
||||
every necessary change. You might be tempted to write something like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
httpd:
|
||||
service.running:
|
||||
- enable: True
|
||||
- watch:
|
||||
- file: httpd-config
|
||||
|
||||
httpd-config:
|
||||
file.managed:
|
||||
- name: /etc/httpd/conf/httpd.conf
|
||||
- source: salt://httpd/files/apache.conf
|
||||
|
||||
If your service is already running but not enabled, you might expect that Salt
|
||||
will be able to tell that since the config file changed your service needs to
|
||||
be restarted. This is not the case. Because the service needs to be enabled,
|
||||
that change will be made and ``mod_watch`` will never be triggered. In this
|
||||
case, changes to your ``apache.conf`` will fail to be loaded. If you want to
|
||||
ensure that your service always reloads the correct way to handle this is
|
||||
either ensure that your service is not running before applying your state, or
|
||||
simply make sure that ``service.running`` is in a state on its own:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
enable-httpd:
|
||||
service.enabled:
|
||||
- name: httpd
|
||||
|
||||
start-httpd:
|
||||
service.running:
|
||||
- name: httpd
|
||||
- watch:
|
||||
- file: httpd-config
|
||||
|
||||
httpd-config:
|
||||
file.managed:
|
||||
- name: /etc/httpd/conf/httpd.conf
|
||||
- source: salt://httpd/files/apache.conf
|
||||
|
||||
Now that ``service.running`` is its own state, changes to ``service.enabled``
|
||||
will no longer prevent ``mod_watch`` from getting triggered, so your ``httpd``
|
||||
service will get restarted like you want.
|
||||
|
||||
.. _requisites-listen:
|
||||
|
||||
listen
|
||||
~~~~~~
|
||||
|
||||
Not all state modules contain ``mod_watch``. If ``mod_watch`` is absent
|
||||
from the watching state module, the ``watch`` requisite behaves exactly
|
||||
like a ``require`` requisite.
|
||||
|
|
14
noxfile.py
14
noxfile.py
|
@ -45,7 +45,7 @@ SITECUSTOMIZE_DIR = os.path.join(REPO_ROOT, "tests", "support", "coverage")
|
|||
IS_DARWIN = sys.platform.lower().startswith("darwin")
|
||||
IS_WINDOWS = sys.platform.lower().startswith("win")
|
||||
# Python versions to run against
|
||||
_PYTHON_VERSIONS = ("2", "2.7", "3", "3.4", "3.5", "3.6", "3.7")
|
||||
_PYTHON_VERSIONS = ("2", "2.7", "3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9")
|
||||
|
||||
# Nox options
|
||||
# Reuse existing virtualenvs
|
||||
|
@ -889,9 +889,11 @@ def _pytest(session, coverage, cmd_args):
|
|||
|
||||
try:
|
||||
if coverage is True:
|
||||
_run_with_coverage(session, "coverage", "run", "-m", "py.test", *cmd_args)
|
||||
_run_with_coverage(
|
||||
session, "python", "-m", "coverage", "run", "-m", "pytest", *cmd_args
|
||||
)
|
||||
else:
|
||||
session.run("py.test", *cmd_args, env=env)
|
||||
session.run("python", "-m", "pytest", *cmd_args, env=env)
|
||||
except CommandFailed: # pylint: disable=try-except-raise
|
||||
# Not rerunning failed tests for now
|
||||
raise
|
||||
|
@ -905,9 +907,11 @@ def _pytest(session, coverage, cmd_args):
|
|||
cmd_args[idx] = parg.replace(".xml", "-rerun-failed.xml")
|
||||
cmd_args.append("--lf")
|
||||
if coverage is True:
|
||||
_run_with_coverage(session, "coverage", "run", "-m", "py.test", *cmd_args)
|
||||
_run_with_coverage(
|
||||
session, "python", "-m", "coverage", "run", "-m", "pytest", *cmd_args
|
||||
)
|
||||
else:
|
||||
session.run("py.test", *cmd_args, env=env)
|
||||
session.run("python", "-m", "pytest", *cmd_args, env=env)
|
||||
# pylint: enable=unreachable
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
-r req_win.txt
|
||||
backports-abc==0.5; python_version < '3.0'
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
backports.ssl-match-hostname==3.7.0.1; python_version < '3.7'
|
||||
certifi
|
||||
cffi==1.12.2
|
||||
CherryPy==17.4.1
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
mock
|
||||
boto
|
||||
boto3
|
||||
moto
|
||||
SaltPyLint>=v2017.6.22
|
||||
apache-libcloud
|
||||
virtualenv
|
||||
|
||||
# Needed for archive, which is gated for Redhat
|
||||
# rarfile
|
||||
# Needed for keystone
|
||||
# python-keystoneclient
|
|
@ -1,2 +1,2 @@
|
|||
pywin32==224
|
||||
pywin32==227
|
||||
WMI==1.4.9
|
||||
|
|
|
@ -10,7 +10,7 @@ atomicwrites==1.3.0 # via pytest
|
|||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
backports.ssl-match-hostname==3.7.0.1 ; python_version < "3.7"
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
|
@ -88,7 +88,7 @@ python-jose==2.0.2 # via moto
|
|||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==224
|
||||
pywin32==227
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
|
|
|
@ -10,7 +10,7 @@ atomicwrites==1.3.0 # via pytest
|
|||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
backports.ssl-match-hostname==3.7.0.1 ; python_version < "3.7"
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
|
@ -87,7 +87,7 @@ python-jose==2.0.2 # via moto
|
|||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==224
|
||||
pywin32==227
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
|
|
|
@ -10,7 +10,6 @@ atomicwrites==1.3.0 # via pytest
|
|||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
|
@ -87,7 +86,7 @@ python-jose==2.0.2 # via moto
|
|||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==224
|
||||
pywin32==227
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
|
|
115
requirements/static/py3.8/cloud.txt
Normal file
115
requirements/static/py3.8/cloud.txt
Normal file
|
@ -0,0 +1,115 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/cloud.txt -v requirements/static/cloud.in
|
||||
#
|
||||
adal==1.2.1 # via azure-datalake-store, msrestazure
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
azure-applicationinsights==0.1.0 # via azure
|
||||
azure-batch==4.1.3 # via azure
|
||||
azure-common==1.1.18 # via azure-applicationinsights, azure-batch, azure-cosmosdb-table, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-loganalytics, azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cdn, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-compute, azure-mgmt-consumption, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-analytics, azure-mgmt-datalake-store, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-eventhub, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-media, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-network, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-rdbms, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web, azure-servicebus, azure-servicefabric, azure-servicemanagement-legacy, azure-storage-blob, azure-storage-common, azure-storage-file, azure-storage-queue
|
||||
azure-cosmosdb-nspkg==2.0.2 # via azure-cosmosdb-table
|
||||
azure-cosmosdb-table==1.0.5 # via azure
|
||||
azure-datalake-store==0.0.44 # via azure
|
||||
azure-eventgrid==1.2.0 # via azure
|
||||
azure-graphrbac==0.40.0 # via azure
|
||||
azure-keyvault==1.1.0 # via azure
|
||||
azure-loganalytics==0.1.0 # via azure
|
||||
azure-mgmt-advisor==1.0.1 # via azure-mgmt
|
||||
azure-mgmt-applicationinsights==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-authorization==0.50.0 # via azure-mgmt
|
||||
azure-mgmt-batch==5.0.1 # via azure-mgmt
|
||||
azure-mgmt-batchai==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-billing==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-cdn==3.1.0 # via azure-mgmt
|
||||
azure-mgmt-cognitiveservices==3.0.0 # via azure-mgmt
|
||||
azure-mgmt-commerce==1.0.1 # via azure-mgmt
|
||||
azure-mgmt-compute==4.6.0 # via azure-mgmt
|
||||
azure-mgmt-consumption==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-containerinstance==1.4.1 # via azure-mgmt
|
||||
azure-mgmt-containerregistry==2.7.0 # via azure-mgmt
|
||||
azure-mgmt-containerservice==4.4.0 # via azure-mgmt
|
||||
azure-mgmt-cosmosdb==0.4.1 # via azure-mgmt
|
||||
azure-mgmt-datafactory==0.6.0 # via azure-mgmt
|
||||
azure-mgmt-datalake-analytics==0.6.0 # via azure-mgmt
|
||||
azure-mgmt-datalake-nspkg==3.0.1 # via azure-mgmt-datalake-analytics, azure-mgmt-datalake-store
|
||||
azure-mgmt-datalake-store==0.5.0 # via azure-mgmt
|
||||
azure-mgmt-datamigration==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-devspaces==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-devtestlabs==2.2.0 # via azure-mgmt
|
||||
azure-mgmt-dns==2.1.0 # via azure-mgmt
|
||||
azure-mgmt-eventgrid==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-eventhub==2.5.0 # via azure-mgmt
|
||||
azure-mgmt-hanaonazure==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-iotcentral==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-iothub==0.5.0 # via azure-mgmt
|
||||
azure-mgmt-iothubprovisioningservices==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-keyvault==1.1.0 # via azure-mgmt
|
||||
azure-mgmt-loganalytics==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-logic==3.0.0 # via azure-mgmt
|
||||
azure-mgmt-machinelearningcompute==0.4.1 # via azure-mgmt
|
||||
azure-mgmt-managementgroups==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-managementpartner==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-maps==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-marketplaceordering==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-media==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-monitor==0.5.2 # via azure-mgmt
|
||||
azure-mgmt-msi==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-network==2.6.0 # via azure-mgmt
|
||||
azure-mgmt-notificationhubs==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-nspkg==3.0.2 # via azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-consumption, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-nspkg, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web
|
||||
azure-mgmt-policyinsights==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-powerbiembedded==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-rdbms==1.8.0 # via azure-mgmt
|
||||
azure-mgmt-recoveryservices==0.3.0 # via azure-mgmt
|
||||
azure-mgmt-recoveryservicesbackup==0.3.0 # via azure-mgmt
|
||||
azure-mgmt-redis==5.0.0 # via azure-mgmt
|
||||
azure-mgmt-relay==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-reservations==0.2.1 # via azure-mgmt
|
||||
azure-mgmt-resource==2.1.0 # via azure-mgmt
|
||||
azure-mgmt-scheduler==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-search==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-servicebus==0.5.3 # via azure-mgmt
|
||||
azure-mgmt-servicefabric==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-signalr==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-sql==0.9.1 # via azure-mgmt
|
||||
azure-mgmt-storage==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-subscription==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-trafficmanager==0.50.0 # via azure-mgmt
|
||||
azure-mgmt-web==0.35.0 # via azure-mgmt
|
||||
azure-mgmt==4.0.0 # via azure
|
||||
azure-nspkg==3.0.2 # via azure-applicationinsights, azure-batch, azure-cosmosdb-nspkg, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-loganalytics, azure-mgmt-nspkg, azure-servicebus, azure-servicefabric, azure-servicemanagement-legacy
|
||||
azure-servicebus==0.21.1 # via azure
|
||||
azure-servicefabric==6.3.0.0 # via azure
|
||||
azure-servicemanagement-legacy==0.20.6 # via azure
|
||||
azure-storage-blob==1.5.0 # via azure
|
||||
azure-storage-common==1.4.0 # via azure-cosmosdb-table, azure-storage-blob, azure-storage-file, azure-storage-queue
|
||||
azure-storage-file==1.4.0 # via azure
|
||||
azure-storage-queue==1.4.0 # via azure
|
||||
azure==4.0.0
|
||||
certifi==2019.3.9 # via msrest, requests
|
||||
cffi==1.12.2 # via azure-datalake-store, cryptography
|
||||
chardet==3.0.4 # via requests
|
||||
cryptography==2.6.1 # via adal, azure-cosmosdb-table, azure-keyvault, azure-storage-common, requests-ntlm, smbprotocol
|
||||
idna==2.8 # via requests
|
||||
isodate==0.6.0 # via msrest
|
||||
msrest==0.6.6 # via azure-applicationinsights, azure-eventgrid, azure-keyvault, azure-loganalytics, azure-mgmt-cdn, azure-mgmt-compute, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-dns, azure-mgmt-eventhub, azure-mgmt-keyvault, azure-mgmt-media, azure-mgmt-network, azure-mgmt-rdbms, azure-mgmt-resource, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-servicefabric, msrestazure
|
||||
msrestazure==0.6.0 # via azure-batch, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cdn, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-compute, azure-mgmt-consumption, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-analytics, azure-mgmt-datalake-store, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-eventhub, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-media, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-network, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-rdbms, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web
|
||||
netaddr==0.7.19
|
||||
ntlm-auth==1.3.0 # via requests-ntlm, smbprotocol
|
||||
oauthlib==3.0.1 # via requests-oauthlib
|
||||
profitbricks==4.1.3
|
||||
pyasn1==0.4.5 # via smbprotocol
|
||||
pycparser==2.19 # via cffi
|
||||
pyjwt==1.7.1 # via adal
|
||||
pypsexec==0.1.0
|
||||
python-dateutil==2.8.0 # via adal, azure-cosmosdb-table, azure-storage-common
|
||||
pywinrm==0.3.0
|
||||
requests-ntlm==1.1.0 # via pywinrm
|
||||
requests-oauthlib==1.2.0 # via msrest
|
||||
requests==2.21.0 # via adal, azure-cosmosdb-table, azure-datalake-store, azure-keyvault, azure-servicebus, azure-servicemanagement-legacy, azure-storage-common, msrest, profitbricks, pywinrm, requests-ntlm, requests-oauthlib
|
||||
six==1.12.0 # via cryptography, isodate, profitbricks, pypsexec, python-dateutil, pywinrm, smbprotocol
|
||||
smbprotocol==0.1.1 # via pypsexec
|
||||
urllib3==1.24.2 # via requests
|
||||
xmltodict==0.12.0 # via pywinrm
|
8
requirements/static/py3.8/darwin-crypto.txt
Normal file
8
requirements/static/py3.8/darwin-crypto.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodomex==3.9.0
|
123
requirements/static/py3.8/darwin.txt
Normal file
123
requirements/static/py3.8/darwin.txt
Normal file
|
@ -0,0 +1,123 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.7.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9
|
||||
certvalidator==0.11.1 # via vcert
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheetah3==3.1.0
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==17.4.1
|
||||
click==7.0
|
||||
clustershell==1.8.1
|
||||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitpython==2.1.15
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4
|
||||
jsondiff==1.1.1 # via moto
|
||||
jsonpickle==1.1 # via aws-xray-sdk
|
||||
jsonschema==2.6.0
|
||||
junos-eznc==2.2.0
|
||||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==3.0.5
|
||||
more-itertools==5.0.0
|
||||
moto==1.3.7
|
||||
msgpack-python==0.5.6
|
||||
msgpack==0.5.6
|
||||
ncclient==0.6.4 # via junos-eznc
|
||||
netaddr==0.7.19 # via junos-eznc
|
||||
oscrypto==1.2.0 # via certvalidator
|
||||
packaging==19.2 # via pytest
|
||||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pathtools==0.1.2 # via watchdog
|
||||
pluggy==0.13.1 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.6
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pynacl==1.3.0 # via paramiko
|
||||
pyopenssl==19.0.0
|
||||
pyparsing==2.4.5 # via packaging
|
||||
pyserial==3.4 # via junos-eznc
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.7.10
|
||||
pytest-salt==2019.12.27
|
||||
pytest-tempdir==2019.10.12
|
||||
pytest==4.6.6
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
rfc3987==1.3.8
|
||||
rsa==4.0 # via google-auth
|
||||
s3transfer==0.2.0 # via boto3
|
||||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.6 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
yamlordereddictloader==0.4.0
|
||||
zc.lockfile==1.4 # via cherrypy
|
30
requirements/static/py3.8/docs.txt
Normal file
30
requirements/static/py3.8/docs.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/docs.txt -v requirements/static/docs.in
|
||||
#
|
||||
alabaster==0.7.12 # via sphinx
|
||||
babel==2.7.0 # via sphinx
|
||||
certifi==2019.3.9 # via requests
|
||||
chardet==3.0.4 # via requests
|
||||
docutils==0.14 # via sphinx
|
||||
idna==2.8 # via requests
|
||||
imagesize==1.1.0 # via sphinx
|
||||
jinja2==2.10.1 # via sphinx
|
||||
markupsafe==1.1.1 # via jinja2
|
||||
packaging==19.0 # via sphinx
|
||||
pygments==2.4.2 # via sphinx
|
||||
pyparsing==2.4.0 # via packaging
|
||||
pytz==2019.1 # via babel
|
||||
requests==2.22.0 # via sphinx
|
||||
six==1.12.0 # via packaging
|
||||
snowballstemmer==1.2.1 # via sphinx
|
||||
sphinx==2.0.1
|
||||
sphinxcontrib-applehelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-devhelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-htmlhelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||
sphinxcontrib-qthelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.3 # via sphinx
|
||||
urllib3==1.25.3 # via requests
|
16
requirements/static/py3.8/lint.txt
Normal file
16
requirements/static/py3.8/lint.txt
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/lint.txt -v requirements/static/lint.in
|
||||
#
|
||||
astroid==2.3.3 # via pylint
|
||||
isort==4.3.17 # via pylint
|
||||
lazy-object-proxy==1.4.3 # via astroid
|
||||
mccabe==0.6.1 # via pylint
|
||||
modernize==0.5 # via saltpylint
|
||||
pycodestyle==2.5.0 # via saltpylint
|
||||
pylint==2.4.4
|
||||
saltpylint==2019.11.14
|
||||
six==1.12.0 # via astroid
|
||||
wrapt==1.11.1 # via astroid
|
8
requirements/static/py3.8/linux-crypto.txt
Normal file
8
requirements/static/py3.8/linux-crypto.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/linux-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodomex==3.9.3
|
119
requirements/static/py3.8/linux.txt
Normal file
119
requirements/static/py3.8/linux.txt
Normal file
|
@ -0,0 +1,119 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.8/linux.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/linux.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9
|
||||
certvalidator==0.11.1 # via vcert
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheetah3==3.1.0
|
||||
cheroot==6.5.4 # via cherrypy
|
||||
cherrypy==17.3.0
|
||||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pyopenssl, vcert
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
hgtools==8.1.1
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4
|
||||
jsondiff==1.1.1 # via moto
|
||||
jsonpickle==1.1 # via aws-xray-sdk
|
||||
jsonschema==2.6.0
|
||||
junos-eznc==2.2.0
|
||||
jxmlease==1.0.1
|
||||
kazoo==2.6.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.7.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.1.0
|
||||
markupsafe==1.1.1
|
||||
mock==3.0.5
|
||||
more-itertools==5.0.0
|
||||
moto==1.3.7
|
||||
msgpack==0.5.6
|
||||
ncclient==0.6.4 # via junos-eznc
|
||||
netaddr==0.7.19 # via junos-eznc
|
||||
oscrypto==1.2.0 # via certvalidator
|
||||
packaging==19.2 # via pytest
|
||||
paramiko==2.4.2
|
||||
pathtools==0.1.2 # via watchdog
|
||||
pluggy==0.13.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pygit2==0.28.2
|
||||
pyinotify==0.9.6
|
||||
pynacl==1.3.0 # via paramiko
|
||||
pyopenssl==19.0.0
|
||||
pyparsing==2.4.5 # via packaging
|
||||
pyserial==3.4 # via junos-eznc
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.7.10
|
||||
pytest-salt==2019.12.27
|
||||
pytest-tempdir==2019.10.12
|
||||
pytest==4.6.6
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
rfc3987==1.3.8
|
||||
rsa==4.0 # via google-auth
|
||||
s3transfer==0.2.0 # via boto3
|
||||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
setuptools-scm==3.2.0
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kazoo, kubernetes, mock, more-itertools, moto, ncclient, packaging, pygit2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.6 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
zc.lockfile==1.4 # via cherrypy
|
115
requirements/static/py3.9/cloud.txt
Normal file
115
requirements/static/py3.9/cloud.txt
Normal file
|
@ -0,0 +1,115 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/cloud.txt -v requirements/static/cloud.in
|
||||
#
|
||||
adal==1.2.1 # via azure-datalake-store, msrestazure
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
azure-applicationinsights==0.1.0 # via azure
|
||||
azure-batch==4.1.3 # via azure
|
||||
azure-common==1.1.18 # via azure-applicationinsights, azure-batch, azure-cosmosdb-table, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-loganalytics, azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cdn, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-compute, azure-mgmt-consumption, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-analytics, azure-mgmt-datalake-store, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-eventhub, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-media, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-network, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-rdbms, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web, azure-servicebus, azure-servicefabric, azure-servicemanagement-legacy, azure-storage-blob, azure-storage-common, azure-storage-file, azure-storage-queue
|
||||
azure-cosmosdb-nspkg==2.0.2 # via azure-cosmosdb-table
|
||||
azure-cosmosdb-table==1.0.5 # via azure
|
||||
azure-datalake-store==0.0.44 # via azure
|
||||
azure-eventgrid==1.2.0 # via azure
|
||||
azure-graphrbac==0.40.0 # via azure
|
||||
azure-keyvault==1.1.0 # via azure
|
||||
azure-loganalytics==0.1.0 # via azure
|
||||
azure-mgmt-advisor==1.0.1 # via azure-mgmt
|
||||
azure-mgmt-applicationinsights==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-authorization==0.50.0 # via azure-mgmt
|
||||
azure-mgmt-batch==5.0.1 # via azure-mgmt
|
||||
azure-mgmt-batchai==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-billing==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-cdn==3.1.0 # via azure-mgmt
|
||||
azure-mgmt-cognitiveservices==3.0.0 # via azure-mgmt
|
||||
azure-mgmt-commerce==1.0.1 # via azure-mgmt
|
||||
azure-mgmt-compute==4.6.0 # via azure-mgmt
|
||||
azure-mgmt-consumption==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-containerinstance==1.4.1 # via azure-mgmt
|
||||
azure-mgmt-containerregistry==2.7.0 # via azure-mgmt
|
||||
azure-mgmt-containerservice==4.4.0 # via azure-mgmt
|
||||
azure-mgmt-cosmosdb==0.4.1 # via azure-mgmt
|
||||
azure-mgmt-datafactory==0.6.0 # via azure-mgmt
|
||||
azure-mgmt-datalake-analytics==0.6.0 # via azure-mgmt
|
||||
azure-mgmt-datalake-nspkg==3.0.1 # via azure-mgmt-datalake-analytics, azure-mgmt-datalake-store
|
||||
azure-mgmt-datalake-store==0.5.0 # via azure-mgmt
|
||||
azure-mgmt-datamigration==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-devspaces==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-devtestlabs==2.2.0 # via azure-mgmt
|
||||
azure-mgmt-dns==2.1.0 # via azure-mgmt
|
||||
azure-mgmt-eventgrid==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-eventhub==2.5.0 # via azure-mgmt
|
||||
azure-mgmt-hanaonazure==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-iotcentral==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-iothub==0.5.0 # via azure-mgmt
|
||||
azure-mgmt-iothubprovisioningservices==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-keyvault==1.1.0 # via azure-mgmt
|
||||
azure-mgmt-loganalytics==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-logic==3.0.0 # via azure-mgmt
|
||||
azure-mgmt-machinelearningcompute==0.4.1 # via azure-mgmt
|
||||
azure-mgmt-managementgroups==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-managementpartner==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-maps==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-marketplaceordering==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-media==1.0.0 # via azure-mgmt
|
||||
azure-mgmt-monitor==0.5.2 # via azure-mgmt
|
||||
azure-mgmt-msi==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-network==2.6.0 # via azure-mgmt
|
||||
azure-mgmt-notificationhubs==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-nspkg==3.0.2 # via azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-consumption, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-nspkg, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web
|
||||
azure-mgmt-policyinsights==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-powerbiembedded==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-rdbms==1.8.0 # via azure-mgmt
|
||||
azure-mgmt-recoveryservices==0.3.0 # via azure-mgmt
|
||||
azure-mgmt-recoveryservicesbackup==0.3.0 # via azure-mgmt
|
||||
azure-mgmt-redis==5.0.0 # via azure-mgmt
|
||||
azure-mgmt-relay==0.1.0 # via azure-mgmt
|
||||
azure-mgmt-reservations==0.2.1 # via azure-mgmt
|
||||
azure-mgmt-resource==2.1.0 # via azure-mgmt
|
||||
azure-mgmt-scheduler==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-search==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-servicebus==0.5.3 # via azure-mgmt
|
||||
azure-mgmt-servicefabric==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-signalr==0.1.1 # via azure-mgmt
|
||||
azure-mgmt-sql==0.9.1 # via azure-mgmt
|
||||
azure-mgmt-storage==2.0.0 # via azure-mgmt
|
||||
azure-mgmt-subscription==0.2.0 # via azure-mgmt
|
||||
azure-mgmt-trafficmanager==0.50.0 # via azure-mgmt
|
||||
azure-mgmt-web==0.35.0 # via azure-mgmt
|
||||
azure-mgmt==4.0.0 # via azure
|
||||
azure-nspkg==3.0.2 # via azure-applicationinsights, azure-batch, azure-cosmosdb-nspkg, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-loganalytics, azure-mgmt-nspkg, azure-servicebus, azure-servicefabric, azure-servicemanagement-legacy
|
||||
azure-servicebus==0.21.1 # via azure
|
||||
azure-servicefabric==6.3.0.0 # via azure
|
||||
azure-servicemanagement-legacy==0.20.6 # via azure
|
||||
azure-storage-blob==1.5.0 # via azure
|
||||
azure-storage-common==1.4.0 # via azure-cosmosdb-table, azure-storage-blob, azure-storage-file, azure-storage-queue
|
||||
azure-storage-file==1.4.0 # via azure
|
||||
azure-storage-queue==1.4.0 # via azure
|
||||
azure==4.0.0
|
||||
certifi==2019.3.9 # via msrest, requests
|
||||
cffi==1.12.2 # via azure-datalake-store, cryptography
|
||||
chardet==3.0.4 # via requests
|
||||
cryptography==2.6.1 # via adal, azure-cosmosdb-table, azure-keyvault, azure-storage-common, requests-ntlm, smbprotocol
|
||||
idna==2.8 # via requests
|
||||
isodate==0.6.0 # via msrest
|
||||
msrest==0.6.6 # via azure-applicationinsights, azure-eventgrid, azure-keyvault, azure-loganalytics, azure-mgmt-cdn, azure-mgmt-compute, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-dns, azure-mgmt-eventhub, azure-mgmt-keyvault, azure-mgmt-media, azure-mgmt-network, azure-mgmt-rdbms, azure-mgmt-resource, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-servicefabric, msrestazure
|
||||
msrestazure==0.6.0 # via azure-batch, azure-eventgrid, azure-graphrbac, azure-keyvault, azure-mgmt-advisor, azure-mgmt-applicationinsights, azure-mgmt-authorization, azure-mgmt-batch, azure-mgmt-batchai, azure-mgmt-billing, azure-mgmt-cdn, azure-mgmt-cognitiveservices, azure-mgmt-commerce, azure-mgmt-compute, azure-mgmt-consumption, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-datafactory, azure-mgmt-datalake-analytics, azure-mgmt-datalake-store, azure-mgmt-datamigration, azure-mgmt-devspaces, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-eventgrid, azure-mgmt-eventhub, azure-mgmt-hanaonazure, azure-mgmt-iotcentral, azure-mgmt-iothub, azure-mgmt-iothubprovisioningservices, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-logic, azure-mgmt-machinelearningcompute, azure-mgmt-managementgroups, azure-mgmt-managementpartner, azure-mgmt-maps, azure-mgmt-marketplaceordering, azure-mgmt-media, azure-mgmt-monitor, azure-mgmt-msi, azure-mgmt-network, azure-mgmt-notificationhubs, azure-mgmt-policyinsights, azure-mgmt-powerbiembedded, azure-mgmt-rdbms, azure-mgmt-recoveryservices, azure-mgmt-recoveryservicesbackup, azure-mgmt-redis, azure-mgmt-relay, azure-mgmt-reservations, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-search, azure-mgmt-servicebus, azure-mgmt-servicefabric, azure-mgmt-signalr, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-subscription, azure-mgmt-trafficmanager, azure-mgmt-web
|
||||
netaddr==0.7.19
|
||||
ntlm-auth==1.3.0 # via requests-ntlm, smbprotocol
|
||||
oauthlib==3.0.1 # via requests-oauthlib
|
||||
profitbricks==4.1.3
|
||||
pyasn1==0.4.5 # via smbprotocol
|
||||
pycparser==2.19 # via cffi
|
||||
pyjwt==1.7.1 # via adal
|
||||
pypsexec==0.1.0
|
||||
python-dateutil==2.8.0 # via adal, azure-cosmosdb-table, azure-storage-common
|
||||
pywinrm==0.3.0
|
||||
requests-ntlm==1.1.0 # via pywinrm
|
||||
requests-oauthlib==1.2.0 # via msrest
|
||||
requests==2.21.0 # via adal, azure-cosmosdb-table, azure-datalake-store, azure-keyvault, azure-servicebus, azure-servicemanagement-legacy, azure-storage-common, msrest, profitbricks, pywinrm, requests-ntlm, requests-oauthlib
|
||||
six==1.12.0 # via cryptography, isodate, profitbricks, pypsexec, python-dateutil, pywinrm, smbprotocol
|
||||
smbprotocol==0.1.1 # via pypsexec
|
||||
urllib3==1.24.2 # via requests
|
||||
xmltodict==0.12.0 # via pywinrm
|
8
requirements/static/py3.9/darwin-crypto.txt
Normal file
8
requirements/static/py3.9/darwin-crypto.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodomex==3.9.0
|
123
requirements/static/py3.9/darwin.txt
Normal file
123
requirements/static/py3.9/darwin.txt
Normal file
|
@ -0,0 +1,123 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.7.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9
|
||||
certvalidator==0.11.1 # via vcert
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheetah3==3.1.0
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==17.4.1
|
||||
click==7.0
|
||||
clustershell==1.8.1
|
||||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitpython==2.1.15
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4
|
||||
jsondiff==1.1.1 # via moto
|
||||
jsonpickle==1.1 # via aws-xray-sdk
|
||||
jsonschema==2.6.0
|
||||
junos-eznc==2.2.0
|
||||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==3.0.5
|
||||
more-itertools==5.0.0
|
||||
moto==1.3.7
|
||||
msgpack-python==0.5.6
|
||||
msgpack==0.5.6
|
||||
ncclient==0.6.4 # via junos-eznc
|
||||
netaddr==0.7.19 # via junos-eznc
|
||||
oscrypto==1.2.0 # via certvalidator
|
||||
packaging==19.2 # via pytest
|
||||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pathtools==0.1.2 # via watchdog
|
||||
pluggy==0.13.1 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.6
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pynacl==1.3.0 # via paramiko
|
||||
pyopenssl==19.0.0
|
||||
pyparsing==2.4.5 # via packaging
|
||||
pyserial==3.4 # via junos-eznc
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.7.10
|
||||
pytest-salt==2019.12.27
|
||||
pytest-tempdir==2019.10.12
|
||||
pytest==4.6.6
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
rfc3987==1.3.8
|
||||
rsa==4.0 # via google-auth
|
||||
s3transfer==0.2.0 # via boto3
|
||||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.6 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
yamlordereddictloader==0.4.0
|
||||
zc.lockfile==1.4 # via cherrypy
|
30
requirements/static/py3.9/docs.txt
Normal file
30
requirements/static/py3.9/docs.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/docs.txt -v requirements/static/docs.in
|
||||
#
|
||||
alabaster==0.7.12 # via sphinx
|
||||
babel==2.7.0 # via sphinx
|
||||
certifi==2019.3.9 # via requests
|
||||
chardet==3.0.4 # via requests
|
||||
docutils==0.14 # via sphinx
|
||||
idna==2.8 # via requests
|
||||
imagesize==1.1.0 # via sphinx
|
||||
jinja2==2.10.1 # via sphinx
|
||||
markupsafe==1.1.1 # via jinja2
|
||||
packaging==19.0 # via sphinx
|
||||
pygments==2.4.2 # via sphinx
|
||||
pyparsing==2.4.0 # via packaging
|
||||
pytz==2019.1 # via babel
|
||||
requests==2.22.0 # via sphinx
|
||||
six==1.12.0 # via packaging
|
||||
snowballstemmer==1.2.1 # via sphinx
|
||||
sphinx==2.0.1
|
||||
sphinxcontrib-applehelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-devhelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-htmlhelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||
sphinxcontrib-qthelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.3 # via sphinx
|
||||
urllib3==1.25.3 # via requests
|
16
requirements/static/py3.9/lint.txt
Normal file
16
requirements/static/py3.9/lint.txt
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/lint.txt -v requirements/static/lint.in
|
||||
#
|
||||
astroid==2.3.3 # via pylint
|
||||
isort==4.3.17 # via pylint
|
||||
lazy-object-proxy==1.4.3 # via astroid
|
||||
mccabe==0.6.1 # via pylint
|
||||
modernize==0.5 # via saltpylint
|
||||
pycodestyle==2.5.0 # via saltpylint
|
||||
pylint==2.4.4
|
||||
saltpylint==2019.11.14
|
||||
six==1.12.0 # via astroid
|
||||
wrapt==1.11.1 # via astroid
|
8
requirements/static/py3.9/linux-crypto.txt
Normal file
8
requirements/static/py3.9/linux-crypto.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/linux-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodomex==3.9.3
|
119
requirements/static/py3.9/linux.txt
Normal file
119
requirements/static/py3.9/linux.txt
Normal file
|
@ -0,0 +1,119 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.9/linux.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/linux.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9
|
||||
certvalidator==0.11.1 # via vcert
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheetah3==3.1.0
|
||||
cheroot==6.5.4 # via cherrypy
|
||||
cherrypy==17.3.0
|
||||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pyopenssl, vcert
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
hgtools==8.1.1
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4
|
||||
jsondiff==1.1.1 # via moto
|
||||
jsonpickle==1.1 # via aws-xray-sdk
|
||||
jsonschema==2.6.0
|
||||
junos-eznc==2.2.0
|
||||
jxmlease==1.0.1
|
||||
kazoo==2.6.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.7.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.1.0
|
||||
markupsafe==1.1.1
|
||||
mock==3.0.5
|
||||
more-itertools==5.0.0
|
||||
moto==1.3.7
|
||||
msgpack==0.5.6
|
||||
ncclient==0.6.4 # via junos-eznc
|
||||
netaddr==0.7.19 # via junos-eznc
|
||||
oscrypto==1.2.0 # via certvalidator
|
||||
packaging==19.2 # via pytest
|
||||
paramiko==2.4.2
|
||||
pathtools==0.1.2 # via watchdog
|
||||
pluggy==0.13.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pygit2==0.28.2
|
||||
pyinotify==0.9.6
|
||||
pynacl==1.3.0 # via paramiko
|
||||
pyopenssl==19.0.0
|
||||
pyparsing==2.4.5 # via packaging
|
||||
pyserial==3.4 # via junos-eznc
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.7.10
|
||||
pytest-salt==2019.12.27
|
||||
pytest-tempdir==2019.10.12
|
||||
pytest==4.6.6
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==5.1.2
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
rfc3987==1.3.8
|
||||
rsa==4.0 # via google-auth
|
||||
s3transfer==0.2.0 # via boto3
|
||||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
setuptools-scm==3.2.0
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kazoo, kubernetes, mock, more-itertools, moto, ncclient, packaging, pygit2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.6 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
zc.lockfile==1.4 # via cherrypy
|
|
@ -1,6 +1,6 @@
|
|||
# coding: utf-8 -*-
|
||||
'''
|
||||
This directory contains external modules shipping with Salt. They are governed
|
||||
under their respective licenses. See the COPYING file included with this
|
||||
under their respective licenses. See the LICENSE file included with this
|
||||
distribution for more information.
|
||||
'''
|
||||
|
|
|
@ -46,7 +46,7 @@ from zope.interface import implementer # type: ignore
|
|||
from salt.ext.tornado.concurrent import Future
|
||||
from salt.ext.tornado.escape import utf8
|
||||
from salt.ext.tornado import gen
|
||||
import tornado.ioloop
|
||||
import salt.ext.tornado.ioloop
|
||||
from salt.ext.tornado.log import app_log
|
||||
from salt.ext.tornado.netutil import Resolver
|
||||
from salt.ext.tornado.stack_context import NullContext, wrap
|
||||
|
@ -128,7 +128,7 @@ class TornadoReactor(PosixReactorBase):
|
|||
"""
|
||||
def __init__(self, io_loop=None):
|
||||
if not io_loop:
|
||||
io_loop = tornado.ioloop.IOLoop.current()
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop.current()
|
||||
self._io_loop = io_loop
|
||||
self._readers = {} # map of reader objects to fd
|
||||
self._writers = {} # map of writer objects to fd
|
||||
|
@ -352,7 +352,7 @@ def install(io_loop=None):
|
|||
|
||||
"""
|
||||
if not io_loop:
|
||||
io_loop = tornado.ioloop.IOLoop.current()
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop.current()
|
||||
reactor = TornadoReactor(io_loop)
|
||||
from twisted.internet.main import installReactor # type: ignore
|
||||
installReactor(reactor)
|
||||
|
@ -374,22 +374,22 @@ class _FD(object):
|
|||
|
||||
def doRead(self):
|
||||
if not self.lost:
|
||||
self.handler(self.fileobj, tornado.ioloop.IOLoop.READ)
|
||||
self.handler(self.fileobj, salt.ext.tornado.ioloop.IOLoop.READ)
|
||||
|
||||
def doWrite(self):
|
||||
if not self.lost:
|
||||
self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE)
|
||||
self.handler(self.fileobj, salt.ext.tornado.ioloop.IOLoop.WRITE)
|
||||
|
||||
def connectionLost(self, reason):
|
||||
if not self.lost:
|
||||
self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR)
|
||||
self.handler(self.fileobj, salt.ext.tornado.ioloop.IOLoop.ERROR)
|
||||
self.lost = True
|
||||
|
||||
def logPrefix(self):
|
||||
return ''
|
||||
|
||||
|
||||
class TwistedIOLoop(tornado.ioloop.IOLoop):
|
||||
class TwistedIOLoop(salt.ext.tornado.ioloop.IOLoop):
|
||||
"""IOLoop implementation that runs on Twisted.
|
||||
|
||||
`TwistedIOLoop` implements the Tornado IOLoop interface on top of
|
||||
|
@ -434,16 +434,16 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
|
|||
raise ValueError('fd %s added twice' % fd)
|
||||
fd, fileobj = self.split_fd(fd)
|
||||
self.fds[fd] = _FD(fd, fileobj, wrap(handler))
|
||||
if events & tornado.ioloop.IOLoop.READ:
|
||||
if events & salt.ext.tornado.ioloop.IOLoop.READ:
|
||||
self.fds[fd].reading = True
|
||||
self.reactor.addReader(self.fds[fd])
|
||||
if events & tornado.ioloop.IOLoop.WRITE:
|
||||
if events & salt.ext.tornado.ioloop.IOLoop.WRITE:
|
||||
self.fds[fd].writing = True
|
||||
self.reactor.addWriter(self.fds[fd])
|
||||
|
||||
def update_handler(self, fd, events):
|
||||
fd, fileobj = self.split_fd(fd)
|
||||
if events & tornado.ioloop.IOLoop.READ:
|
||||
if events & salt.ext.tornado.ioloop.IOLoop.READ:
|
||||
if not self.fds[fd].reading:
|
||||
self.fds[fd].reading = True
|
||||
self.reactor.addReader(self.fds[fd])
|
||||
|
@ -451,7 +451,7 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
|
|||
if self.fds[fd].reading:
|
||||
self.fds[fd].reading = False
|
||||
self.reactor.removeReader(self.fds[fd])
|
||||
if events & tornado.ioloop.IOLoop.WRITE:
|
||||
if events & salt.ext.tornado.ioloop.IOLoop.WRITE:
|
||||
if not self.fds[fd].writing:
|
||||
self.fds[fd].writing = True
|
||||
self.reactor.addWriter(self.fds[fd])
|
||||
|
@ -534,7 +534,7 @@ class TwistedResolver(Resolver):
|
|||
self.io_loop = io_loop or IOLoop.current()
|
||||
# partial copy of twisted.names.client.createResolver, which doesn't
|
||||
# allow for a reactor to be passed in.
|
||||
self.reactor = tornado.platform.twisted.TornadoReactor(io_loop)
|
||||
self.reactor = salt.ext.tornado.platform.twisted.TornadoReactor(io_loop)
|
||||
|
||||
host_resolver = twisted.names.hosts.Resolver('/etc/hosts')
|
||||
cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
import tornado.escape
|
||||
import salt.ext.tornado.escape
|
||||
|
||||
from salt.ext.tornado.escape import utf8, xhtml_escape, xhtml_unescape, url_escape, url_unescape, to_unicode, json_decode, json_encode, squeeze, recursive_unicode
|
||||
from salt.ext.tornado.util import unicode_type
|
||||
|
@ -136,7 +136,7 @@ linkify_tests = [
|
|||
class EscapeTestCase(unittest.TestCase):
|
||||
def test_linkify(self):
|
||||
for text, kwargs, html in linkify_tests:
|
||||
linked = tornado.escape.linkify(text, **kwargs)
|
||||
linked = salt.ext.tornado.escape.linkify(text, **kwargs)
|
||||
self.assertEqual(linked, html)
|
||||
|
||||
def test_xhtml_escape(self):
|
||||
|
|
|
@ -9,32 +9,32 @@ class ImportTest(unittest.TestCase):
|
|||
# Some of our modules are not otherwise tested. Import them
|
||||
# all (unless they have external dependencies) here to at
|
||||
# least ensure that there are no syntax errors.
|
||||
import tornado.auth
|
||||
import tornado.autoreload
|
||||
import tornado.concurrent
|
||||
import tornado.escape
|
||||
import tornado.gen
|
||||
import tornado.http1connection
|
||||
import tornado.httpclient
|
||||
import tornado.httpserver
|
||||
import tornado.httputil
|
||||
import tornado.ioloop
|
||||
import tornado.iostream
|
||||
import tornado.locale
|
||||
import tornado.log
|
||||
import tornado.netutil
|
||||
import tornado.options
|
||||
import tornado.process
|
||||
import tornado.simple_httpclient
|
||||
import tornado.stack_context
|
||||
import tornado.tcpserver
|
||||
import tornado.tcpclient
|
||||
import tornado.template
|
||||
import tornado.testing
|
||||
import tornado.util
|
||||
import tornado.web
|
||||
import tornado.websocket
|
||||
import tornado.wsgi
|
||||
import salt.ext.tornado.auth
|
||||
import salt.ext.tornado.autoreload
|
||||
import salt.ext.tornado.concurrent
|
||||
import salt.ext.tornado.escape
|
||||
import salt.ext.tornado.gen
|
||||
import salt.ext.tornado.http1connection
|
||||
import salt.ext.tornado.httpclient
|
||||
import salt.ext.tornado.httpserver
|
||||
import salt.ext.tornado.httputil
|
||||
import salt.ext.tornado.ioloop
|
||||
import salt.ext.tornado.iostream
|
||||
import salt.ext.tornado.locale
|
||||
import salt.ext.tornado.log
|
||||
import salt.ext.tornado.netutil
|
||||
import salt.ext.tornado.options
|
||||
import salt.ext.tornado.process
|
||||
import salt.ext.tornado.simple_httpclient
|
||||
import salt.ext.tornado.stack_context
|
||||
import salt.ext.tornado.tcpserver
|
||||
import salt.ext.tornado.tcpclient
|
||||
import salt.ext.tornado.template
|
||||
import salt.ext.tornado.testing
|
||||
import salt.ext.tornado.util
|
||||
import salt.ext.tornado.web
|
||||
import salt.ext.tornado.websocket
|
||||
import salt.ext.tornado.wsgi
|
||||
|
||||
# for modules with dependencies, if those dependencies can be loaded,
|
||||
# load them too.
|
||||
|
@ -45,4 +45,4 @@ class ImportTest(unittest.TestCase):
|
|||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
import tornado.curl_httpclient
|
||||
import salt.ext.tornado.curl_httpclient
|
||||
|
|
|
@ -6,7 +6,7 @@ import os
|
|||
import shutil
|
||||
import tempfile
|
||||
|
||||
import tornado.locale
|
||||
import salt.ext.tornado.locale
|
||||
from salt.ext.tornado.escape import utf8, to_unicode
|
||||
from salt.ext.tornado.test.util import unittest, skipOnAppEngine
|
||||
from salt.ext.tornado.util import unicode_type
|
||||
|
@ -17,25 +17,25 @@ class TranslationLoaderTest(unittest.TestCase):
|
|||
SAVE_VARS = ['_translations', '_supported_locales', '_use_gettext']
|
||||
|
||||
def clear_locale_cache(self):
|
||||
if hasattr(tornado.locale.Locale, '_cache'):
|
||||
del tornado.locale.Locale._cache
|
||||
if hasattr(salt.ext.tornado.locale.Locale, '_cache'):
|
||||
del salt.ext.tornado.locale.Locale._cache
|
||||
|
||||
def setUp(self):
|
||||
self.saved = {}
|
||||
for var in TranslationLoaderTest.SAVE_VARS:
|
||||
self.saved[var] = getattr(tornado.locale, var)
|
||||
self.saved[var] = getattr(salt.ext.tornado.locale, var)
|
||||
self.clear_locale_cache()
|
||||
|
||||
def tearDown(self):
|
||||
for k, v in self.saved.items():
|
||||
setattr(tornado.locale, k, v)
|
||||
setattr(salt.ext.tornado.locale, k, v)
|
||||
self.clear_locale_cache()
|
||||
|
||||
def test_csv(self):
|
||||
tornado.locale.load_translations(
|
||||
salt.ext.tornado.locale.load_translations(
|
||||
os.path.join(os.path.dirname(__file__), 'csv_translations'))
|
||||
locale = tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, tornado.locale.CSVLocale))
|
||||
locale = salt.ext.tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, salt.ext.tornado.locale.CSVLocale))
|
||||
self.assertEqual(locale.translate("school"), u"\u00e9cole")
|
||||
|
||||
# tempfile.mkdtemp is not available on app engine.
|
||||
|
@ -53,19 +53,19 @@ class TranslationLoaderTest(unittest.TestCase):
|
|||
try:
|
||||
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
|
||||
f.write(char_data.encode(encoding))
|
||||
tornado.locale.load_translations(tmpdir)
|
||||
locale = tornado.locale.get('fr_FR')
|
||||
self.assertIsInstance(locale, tornado.locale.CSVLocale)
|
||||
salt.ext.tornado.locale.load_translations(tmpdir)
|
||||
locale = salt.ext.tornado.locale.get('fr_FR')
|
||||
self.assertIsInstance(locale, salt.ext.tornado.locale.CSVLocale)
|
||||
self.assertEqual(locale.translate("school"), u"\u00e9cole")
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
def test_gettext(self):
|
||||
tornado.locale.load_gettext_translations(
|
||||
salt.ext.tornado.locale.load_gettext_translations(
|
||||
os.path.join(os.path.dirname(__file__), 'gettext_translations'),
|
||||
"tornado_test")
|
||||
locale = tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, tornado.locale.GettextLocale))
|
||||
locale = salt.ext.tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, salt.ext.tornado.locale.GettextLocale))
|
||||
self.assertEqual(locale.translate("school"), u"\u00e9cole")
|
||||
self.assertEqual(locale.pgettext("law", "right"), u"le droit")
|
||||
self.assertEqual(locale.pgettext("good", "right"), u"le bien")
|
||||
|
@ -77,7 +77,7 @@ class TranslationLoaderTest(unittest.TestCase):
|
|||
|
||||
class LocaleDataTest(unittest.TestCase):
|
||||
def test_non_ascii_name(self):
|
||||
name = tornado.locale.LOCALE_NAMES['es_LA']['name']
|
||||
name = salt.ext.tornado.locale.LOCALE_NAMES['es_LA']['name']
|
||||
self.assertTrue(isinstance(name, unicode_type))
|
||||
self.assertEqual(name, u'Espa\u00f1ol')
|
||||
self.assertEqual(utf8(name), b'Espa\xc3\xb1ol')
|
||||
|
@ -85,7 +85,7 @@ class LocaleDataTest(unittest.TestCase):
|
|||
|
||||
class EnglishTest(unittest.TestCase):
|
||||
def test_format_date(self):
|
||||
locale = tornado.locale.get('en_US')
|
||||
locale = salt.ext.tornado.locale.get('en_US')
|
||||
date = datetime.datetime(2013, 4, 28, 18, 35)
|
||||
self.assertEqual(locale.format_date(date, full_format=True),
|
||||
'April 28, 2013 at 6:35 pm')
|
||||
|
@ -114,18 +114,18 @@ class EnglishTest(unittest.TestCase):
|
|||
'%s %d, %d' % (locale._months[date.month - 1], date.day, date.year))
|
||||
|
||||
def test_friendly_number(self):
|
||||
locale = tornado.locale.get('en_US')
|
||||
locale = salt.ext.tornado.locale.get('en_US')
|
||||
self.assertEqual(locale.friendly_number(1000000), '1,000,000')
|
||||
|
||||
def test_list(self):
|
||||
locale = tornado.locale.get('en_US')
|
||||
locale = salt.ext.tornado.locale.get('en_US')
|
||||
self.assertEqual(locale.list([]), '')
|
||||
self.assertEqual(locale.list(['A']), 'A')
|
||||
self.assertEqual(locale.list(['A', 'B']), 'A and B')
|
||||
self.assertEqual(locale.list(['A', 'B', 'C']), 'A, B and C')
|
||||
|
||||
def test_format_day(self):
|
||||
locale = tornado.locale.get('en_US')
|
||||
locale = salt.ext.tornado.locale.get('en_US')
|
||||
date = datetime.datetime(2013, 4, 28, 18, 35)
|
||||
self.assertEqual(locale.format_day(date=date, dow=True), 'Sunday, April 28')
|
||||
self.assertEqual(locale.format_day(date=date, dow=False), 'April 28')
|
||||
|
|
|
@ -165,7 +165,7 @@ def main():
|
|||
add_parse_callback(
|
||||
lambda: logging.getLogger().handlers[0].addFilter(log_counter))
|
||||
|
||||
import tornado.testing
|
||||
import salt.ext.tornado.testing
|
||||
kwargs = {}
|
||||
if sys.version_info >= (3, 2):
|
||||
# HACK: unittest.main will make its own changes to the warning
|
||||
|
@ -176,7 +176,7 @@ def main():
|
|||
kwargs['warnings'] = False
|
||||
kwargs['testRunner'] = TornadoTextTestRunner
|
||||
try:
|
||||
tornado.testing.main(**kwargs)
|
||||
salt.ext.tornado.testing.main(**kwargs)
|
||||
finally:
|
||||
# The tests should run clean; consider it a failure if they logged
|
||||
# any warnings or errors. We'd like to ban info logs too, but
|
||||
|
|
|
@ -5,7 +5,7 @@ import re
|
|||
import sys
|
||||
import datetime
|
||||
|
||||
import tornado.escape
|
||||
import salt.ext.tornado.escape
|
||||
from salt.ext.tornado.escape import utf8
|
||||
from salt.ext.tornado.util import raise_exc_info, Configurable, exec_in, ArgReplacer, timedelta_to_seconds, import_object, re_unescape, is_finalizing, PY3
|
||||
from salt.ext.tornado.test.util import unittest
|
||||
|
@ -194,13 +194,13 @@ class ImportObjectTest(unittest.TestCase):
|
|||
self.assertIs(import_object(u'tornado.escape.utf8'), utf8)
|
||||
|
||||
def test_import_module(self):
|
||||
self.assertIs(import_object('tornado.escape'), tornado.escape)
|
||||
self.assertIs(import_object('tornado.escape'), salt.ext.tornado.escape)
|
||||
|
||||
def test_import_module_unicode(self):
|
||||
# The internal implementation of __import__ differs depending on
|
||||
# whether the thing being imported is a module or not.
|
||||
# This variant requires a byte string in python 2.
|
||||
self.assertIs(import_object(u'tornado.escape'), tornado.escape)
|
||||
self.assertIs(import_object(u'tornado.escape'), salt.ext.tornado.escape)
|
||||
|
||||
|
||||
class ReUnescapeTest(unittest.TestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@ from salt.ext.tornado.test.util import unittest, skipBefore35, exec_test
|
|||
from salt.ext.tornado.web import Application, RequestHandler
|
||||
|
||||
try:
|
||||
import tornado.websocket # noqa
|
||||
import salt.ext.tornado.websocket # noqa
|
||||
from salt.ext.tornado.util import _websocket_mask_python
|
||||
except ImportError:
|
||||
# The unittest module presents misleading errors on ImportError
|
||||
|
|
|
@ -75,12 +75,12 @@ import stat
|
|||
import sys
|
||||
import threading
|
||||
import time
|
||||
import salt.ext.tornado as tornado
|
||||
import traceback
|
||||
import types
|
||||
from inspect import isclass
|
||||
from io import BytesIO
|
||||
|
||||
import salt.ext.tornado
|
||||
from salt.ext.tornado.concurrent import Future
|
||||
from salt.ext.tornado import escape
|
||||
from salt.ext.tornado import gen
|
||||
|
@ -288,7 +288,7 @@ class RequestHandler(object):
|
|||
def clear(self):
|
||||
"""Resets all headers and content for this response."""
|
||||
self._headers = httputil.HTTPHeaders({
|
||||
"Server": "TornadoServer/%s" % tornado.version,
|
||||
"Server": "TornadoServer/%s" % salt.ext.tornado.version,
|
||||
"Content-Type": "text/html; charset=UTF-8",
|
||||
"Date": httputil.format_timestamp(time.time()),
|
||||
})
|
||||
|
|
|
@ -296,7 +296,7 @@ class WSGIContainer(object):
|
|||
if "content-type" not in header_set:
|
||||
headers.append(("Content-Type", "text/html; charset=UTF-8"))
|
||||
if "server" not in header_set:
|
||||
headers.append(("Server", "TornadoServer/%s" % tornado.version))
|
||||
headers.append(("Server", "TornadoServer/%s" % salt.ext.tornado.version))
|
||||
|
||||
start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
|
||||
header_obj = httputil.HTTPHeaders()
|
||||
|
|
|
@ -38,6 +38,7 @@ import salt.log
|
|||
# of the modules are loaded and are generally available for any usage.
|
||||
import salt.modules.cmdmod
|
||||
import salt.modules.smbios
|
||||
import salt.utils.args
|
||||
import salt.utils.dns
|
||||
import salt.utils.files
|
||||
import salt.utils.network
|
||||
|
@ -280,7 +281,7 @@ def _linux_gpu_data():
|
|||
|
||||
gpus = []
|
||||
for gpu in devs:
|
||||
vendor_strings = gpu["Vendor"].lower().split()
|
||||
vendor_strings = re.split("[^A-Za-z0-9]", gpu["Vendor"].lower())
|
||||
# default vendor to 'unknown', overwrite if we match a known one
|
||||
vendor = "unknown"
|
||||
for name in known_vendors:
|
||||
|
@ -3061,3 +3062,26 @@ def default_gateway():
|
|||
except Exception: # pylint: disable=broad-except
|
||||
continue
|
||||
return grains
|
||||
|
||||
|
||||
def kernelparams():
|
||||
"""
|
||||
Return the kernel boot parameters
|
||||
"""
|
||||
try:
|
||||
with salt.utils.files.fopen("/proc/cmdline", "r") as fhr:
|
||||
cmdline = fhr.read()
|
||||
grains = {"kernelparams": []}
|
||||
for data in [
|
||||
item.split("=") for item in salt.utils.args.shlex_split(cmdline)
|
||||
]:
|
||||
value = None
|
||||
if len(data) == 2:
|
||||
value = data[1].strip('"')
|
||||
|
||||
grains["kernelparams"] += [(data[0], value)]
|
||||
except IOError as exc:
|
||||
grains = {}
|
||||
log.debug("Failed to read /proc/cmdline: %s", exc)
|
||||
|
||||
return grains
|
||||
|
|
|
@ -223,9 +223,7 @@ def install(name=None, refresh=False, pkgs=None, version=None, test=False, **kwa
|
|||
return {}
|
||||
|
||||
if pkgs:
|
||||
log.debug(
|
||||
"Removing these fileset(s)/rpm package(s) {0}: {1}".format(name, targets)
|
||||
)
|
||||
log.debug("Removing these fileset(s)/rpm package(s) %s: %s", name, targets)
|
||||
|
||||
# Get a list of the currently installed pkgs.
|
||||
old = list_pkgs()
|
||||
|
@ -310,9 +308,7 @@ def remove(name=None, pkgs=None, **kwargs):
|
|||
return {}
|
||||
|
||||
if pkgs:
|
||||
log.debug(
|
||||
"Removing these fileset(s)/rpm package(s) {0}: {1}".format(name, targets)
|
||||
)
|
||||
log.debug("Removing these fileset(s)/rpm package(s) %s: %s", name, targets)
|
||||
|
||||
errors = []
|
||||
|
||||
|
|
|
@ -177,7 +177,7 @@ def latest_version(*names, **kwargs):
|
|||
"""
|
||||
refresh = salt.utils.data.is_true(kwargs.pop("refresh", True))
|
||||
|
||||
if len(names) == 0:
|
||||
if not names:
|
||||
return ""
|
||||
|
||||
ret = {}
|
||||
|
|
|
@ -270,7 +270,7 @@ def latest_version(*names, **kwargs):
|
|||
fromrepo = kwargs.pop("fromrepo", None)
|
||||
cache_valid_time = kwargs.pop("cache_valid_time", 0)
|
||||
|
||||
if len(names) == 0:
|
||||
if not names:
|
||||
return ""
|
||||
ret = {}
|
||||
# Initialize the dict with empty strings
|
||||
|
@ -634,7 +634,7 @@ def install(
|
|||
if not fromrepo and repo:
|
||||
fromrepo = repo
|
||||
|
||||
if pkg_params is None or len(pkg_params) == 0:
|
||||
if not pkg_params:
|
||||
return {}
|
||||
|
||||
cmd_prefix = []
|
||||
|
|
|
@ -121,7 +121,7 @@ def _check_load_paths(load_path):
|
|||
else:
|
||||
log.info("Invalid augeas_cfg load_path entry: %s removed", _path)
|
||||
|
||||
if len(_paths) == 0:
|
||||
if not _paths:
|
||||
return None
|
||||
|
||||
return ":".join(_paths)
|
||||
|
|
|
@ -238,8 +238,7 @@ def delete_queue(name, region, opts=None, user=None):
|
|||
queues = list_queues(region, opts, user)
|
||||
url_map = _parse_queue_list(queues)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug("map %s", six.text_type(url_map))
|
||||
log.debug("map %s", url_map)
|
||||
if name in url_map:
|
||||
delete = {"queue-url": url_map[name]}
|
||||
|
||||
|
|
|
@ -191,7 +191,7 @@ def default_security_rule_get(name, security_group, resource_group, **kwargs):
|
|||
"error": "Unable to find {0} in {1}!".format(name, security_group)
|
||||
}
|
||||
except KeyError as exc:
|
||||
log.error("Unable to find {0} in {1}!".format(name, security_group))
|
||||
log.error("Unable to find %s in %s!", name, security_group)
|
||||
result = {"error": str(exc)}
|
||||
|
||||
return result
|
||||
|
@ -227,7 +227,7 @@ def default_security_rules_list(security_group, resource_group, **kwargs):
|
|||
try:
|
||||
result = secgroup["default_security_rules"]
|
||||
except KeyError as exc:
|
||||
log.error("No default security rules found for {0}!".format(security_group))
|
||||
log.error("No default security rules found for %s!", security_group)
|
||||
result = {"error": str(exc)}
|
||||
|
||||
return result
|
||||
|
@ -362,9 +362,7 @@ def security_rule_create_or_update(
|
|||
# pylint: disable=eval-used
|
||||
if not eval(params[0]) and not eval(params[1]):
|
||||
log.error(
|
||||
"Either the {0} or {1} parameter must be provided!".format(
|
||||
params[0], params[1]
|
||||
)
|
||||
"Either the %s or %s parameter must be provided!", params[0], params[1]
|
||||
)
|
||||
return False
|
||||
# pylint: disable=eval-used
|
||||
|
|
|
@ -105,10 +105,7 @@ def attach_(dev=None):
|
|||
if "cache" in data:
|
||||
res[dev] = attach_(dev)
|
||||
|
||||
if res:
|
||||
return res
|
||||
else:
|
||||
return None
|
||||
return res if res else None
|
||||
|
||||
bcache = uuid(dev)
|
||||
if bcache:
|
||||
|
@ -158,10 +155,7 @@ def detach(dev=None):
|
|||
if "cache" in data:
|
||||
res[dev] = detach(dev)
|
||||
|
||||
if res:
|
||||
return res
|
||||
else:
|
||||
return None
|
||||
return res if res else None
|
||||
|
||||
log.debug("Detaching %s", dev)
|
||||
if not _bcsys(dev, "detach", "goaway", "error", "Error detaching {0}".format(dev)):
|
||||
|
@ -737,7 +731,7 @@ def _bdev(dev=None):
|
|||
if not dev:
|
||||
return False
|
||||
else:
|
||||
return _devbase(os.path.realpath(os.path.join(dev, "../")))
|
||||
return _devbase(os.path.dirname(dev))
|
||||
|
||||
|
||||
def _bcpath(dev):
|
||||
|
|
|
@ -163,7 +163,7 @@ def add(name, beacon_data, **kwargs):
|
|||
else:
|
||||
beacon_name = name
|
||||
|
||||
if beacon_name not in list_available(return_yaml=False):
|
||||
if beacon_name not in list_available(return_yaml=False, **kwargs):
|
||||
ret["comment"] = 'Beacon "{0}" is not available.'.format(beacon_name)
|
||||
return ret
|
||||
|
||||
|
@ -201,7 +201,9 @@ def add(name, beacon_data, **kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon add failed."
|
||||
return ret
|
||||
|
||||
try:
|
||||
with salt.utils.event.get_event(
|
||||
|
@ -234,6 +236,7 @@ def add(name, beacon_data, **kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon add failed."
|
||||
return ret
|
||||
|
||||
|
@ -262,7 +265,7 @@ def modify(name, beacon_data, **kwargs):
|
|||
|
||||
if "test" in kwargs and kwargs["test"]:
|
||||
ret["result"] = True
|
||||
ret["comment"] = "Beacon: {0} would be added.".format(name)
|
||||
ret["comment"] = "Beacon: {0} would be modified.".format(name)
|
||||
else:
|
||||
try:
|
||||
# Attempt to load the beacon module so we have access to the validate function
|
||||
|
@ -289,13 +292,15 @@ def modify(name, beacon_data, **kwargs):
|
|||
ret["result"] = False
|
||||
ret["comment"] = (
|
||||
"Beacon {0} configuration invalid, "
|
||||
"not adding.\n{1}".format(name, vcomment)
|
||||
"not modifying.\n{1}".format(name, vcomment)
|
||||
)
|
||||
return ret
|
||||
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon modify failed."
|
||||
return ret
|
||||
|
||||
if not valid:
|
||||
ret["result"] = False
|
||||
|
@ -364,7 +369,8 @@ def modify(name, beacon_data, **kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["comment"] = "Event module not available. Beacon add failed."
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon modify failed."
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -421,13 +427,14 @@ def delete(name, **kwargs):
|
|||
)
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon add failed."
|
||||
return ret
|
||||
|
||||
|
||||
def save(**kwargs):
|
||||
"""
|
||||
Save all beacons on the minion
|
||||
Save all configured beacons to the minion config
|
||||
|
||||
:return: Boolean and status message on success or failure of save.
|
||||
|
||||
|
@ -461,7 +468,7 @@ def save(**kwargs):
|
|||
except (IOError, OSError):
|
||||
ret[
|
||||
"comment"
|
||||
] = "Unable to write to beacons file at {0}. Check " "permissions.".format(sfn)
|
||||
] = "Unable to write to beacons file at {0}. Check permissions.".format(sfn)
|
||||
ret["result"] = False
|
||||
return ret
|
||||
|
||||
|
@ -513,6 +520,7 @@ def enable(**kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacons enable job failed."
|
||||
return ret
|
||||
|
||||
|
@ -564,6 +572,7 @@ def disable(**kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacons enable job failed."
|
||||
return ret
|
||||
|
||||
|
@ -650,13 +659,14 @@ def enable_beacon(name, **kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon enable job failed."
|
||||
return ret
|
||||
|
||||
|
||||
def disable_beacon(name, **kwargs):
|
||||
"""
|
||||
Disable beacon on the minion
|
||||
Disable a beacon on the minion
|
||||
|
||||
:name: Name of the beacon to disable.
|
||||
:return: Boolean and status message on success or failure of disable.
|
||||
|
@ -676,7 +686,7 @@ def disable_beacon(name, **kwargs):
|
|||
return ret
|
||||
|
||||
if "test" in kwargs and kwargs["test"]:
|
||||
ret["comment"] = "Beacons would be enabled."
|
||||
ret["comment"] = "Beacons would be disabled."
|
||||
else:
|
||||
_beacons = list_(return_yaml=False, **kwargs)
|
||||
if name not in _beacons:
|
||||
|
@ -724,6 +734,7 @@ def disable_beacon(name, **kwargs):
|
|||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon disable job failed."
|
||||
return ret
|
||||
|
||||
|
@ -762,9 +773,14 @@ def reset(**kwargs):
|
|||
if ret is not None:
|
||||
ret["comment"] = event_ret["comment"]
|
||||
else:
|
||||
ret["comment"] = "Beacon reset event never received"
|
||||
ret[
|
||||
"comment"
|
||||
] = "Did not receive the beacon reset event before the timeout of {}s".format(
|
||||
kwargs.get("timeout", default_event_wait)
|
||||
)
|
||||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret["comment"] = "Event module not available. Beacon disable job failed."
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Event module not available. Beacon reset job failed."
|
||||
return ret
|
||||
|
|
|
@ -191,7 +191,7 @@ def _delete_resource(
|
|||
orig_wait = wait
|
||||
while wait > 0:
|
||||
r = s(name=name, conn=conn)
|
||||
if not r or (r and r[0].get(status_param) == status_gone):
|
||||
if not r or r[0].get(status_param) == status_gone:
|
||||
log.info("%s %s deleted.", desc.title(), name)
|
||||
return True
|
||||
sleep = wait if wait % 60 == wait else 60
|
||||
|
|
|
@ -485,7 +485,7 @@ def update(
|
|||
_asg.resume_processes()
|
||||
# suspend any that are specified. Note that the boto default of empty
|
||||
# list suspends all; don't do that.
|
||||
if suspended_processes is not None and len(suspended_processes) > 0:
|
||||
if suspended_processes:
|
||||
_asg.suspend_processes(suspended_processes)
|
||||
log.info("Updated ASG %s", name)
|
||||
# ### scaling policies
|
||||
|
|
|
@ -220,7 +220,7 @@ def describe(Name, region=None, key=None, keyid=None, profile=None):
|
|||
try:
|
||||
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
|
||||
trails = conn.describe_trails(trailNameList=[Name])
|
||||
if trails and len(trails.get("trailList", [])) > 0:
|
||||
if trails and trails.get("trailList"):
|
||||
keys = (
|
||||
"Name",
|
||||
"S3BucketName",
|
||||
|
|
|
@ -95,7 +95,7 @@ def get_alarm(name, region=None, key=None, keyid=None, profile=None):
|
|||
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
|
||||
|
||||
alarms = conn.describe_alarms(alarm_names=[name])
|
||||
if len(alarms) == 0:
|
||||
if not alarms:
|
||||
return None
|
||||
if len(alarms) > 1:
|
||||
log.error("multiple alarms matched name '%s'", name)
|
||||
|
|
|
@ -102,7 +102,7 @@ def exists(Name, region=None, key=None, keyid=None, profile=None):
|
|||
|
||||
try:
|
||||
events = conn.list_rules(NamePrefix=Name)
|
||||
if len(events) == 0:
|
||||
if not events:
|
||||
return {"exists": False}
|
||||
for rule in events.get("Rules", []):
|
||||
if rule.get("Name", None) == Name:
|
||||
|
|
|
@ -339,7 +339,7 @@ def extract_index(index_data, global_index=False):
|
|||
"read": parsed_data["read_capacity_units"],
|
||||
"write": parsed_data["write_capacity_units"],
|
||||
}
|
||||
if parsed_data["name"] and len(keys) > 0:
|
||||
if parsed_data["name"] and keys:
|
||||
if global_index:
|
||||
if parsed_data.get("keys_only") and parsed_data.get("includes"):
|
||||
raise SaltInvocationError(
|
||||
|
|
|
@ -942,7 +942,7 @@ def get_tags(instance_id=None, keyid=None, key=None, profile=None, region=None):
|
|||
tags = []
|
||||
client = _get_conn(key=key, keyid=keyid, profile=profile, region=region)
|
||||
result = client.get_all_tags(filters={"resource-id": instance_id})
|
||||
if len(result) > 0:
|
||||
if result:
|
||||
for tag in result:
|
||||
tags.append({tag.name: tag.value})
|
||||
else:
|
||||
|
@ -1538,7 +1538,7 @@ def get_attribute(
|
|||
if len(instances) > 1:
|
||||
log.error("Found more than one EC2 instance matching the criteria.")
|
||||
return False
|
||||
elif len(instances) < 1:
|
||||
elif not instances:
|
||||
log.error("Found no EC2 instance matching the criteria.")
|
||||
return False
|
||||
instance_id = instances[0]
|
||||
|
|
|
@ -1188,9 +1188,8 @@ def describe_event_source_mapping(
|
|||
salt myminion boto_lambda.describe_event_source_mapping uuid
|
||||
|
||||
"""
|
||||
|
||||
ids = _get_ids(UUID, EventSourceArn=EventSourceArn, FunctionName=FunctionName)
|
||||
if len(ids) < 1:
|
||||
if not ids:
|
||||
return {"event_source_mapping": None}
|
||||
|
||||
UUID = ids[0]
|
||||
|
|
|
@ -2948,7 +2948,7 @@ def route_exists(
|
|||
"vpc_peering_connection_id": vpc_peering_connection_id,
|
||||
}
|
||||
route_comp = set(route_dict.items()) ^ set(route_check.items())
|
||||
if len(route_comp) == 0:
|
||||
if not route_comp:
|
||||
log.info("Route %s exists.", destination_cidr_block)
|
||||
return {"exists": True}
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ def _import_platform_generator(platform):
|
|||
The generator class is identified looking under the <platform> module
|
||||
for a class inheriting the `ACLGenerator` class.
|
||||
"""
|
||||
log.debug("Using platform: {plat}".format(plat=platform))
|
||||
log.debug("Using platform: %s", platform)
|
||||
for mod_name, mod_obj in inspect.getmembers(capirca.aclgen):
|
||||
if mod_name == platform and inspect.ismodule(mod_obj):
|
||||
for plat_obj_name, plat_obj in inspect.getmembers(
|
||||
|
@ -216,15 +216,9 @@ def _import_platform_generator(platform):
|
|||
if inspect.isclass(plat_obj) and issubclass(
|
||||
plat_obj, capirca.lib.aclgenerator.ACLGenerator
|
||||
):
|
||||
log.debug(
|
||||
"Identified Capirca class {cls} for {plat}".format(
|
||||
cls=plat_obj, plat=platform
|
||||
)
|
||||
)
|
||||
log.debug("Identified Capirca class %s for %s", plat_obj, platform)
|
||||
return plat_obj
|
||||
log.error(
|
||||
"Unable to identify any Capirca plaform class for {plat}".format(plat=platform)
|
||||
)
|
||||
log.error("Unable to identify any Capirca plaform class for %s", platform)
|
||||
|
||||
|
||||
def _get_services_mapping():
|
||||
|
@ -267,9 +261,8 @@ def _get_services_mapping():
|
|||
log.error("Did not read that properly:")
|
||||
log.error(line)
|
||||
log.error(
|
||||
"Please report the above error: {port} does not seem a valid port value!".format(
|
||||
port=port
|
||||
)
|
||||
"Please report the above error: %s does not seem a valid port value!",
|
||||
port,
|
||||
)
|
||||
_SERVICES[srv_name]["protocol"].append(protocol)
|
||||
return _SERVICES
|
||||
|
@ -501,11 +494,7 @@ def _get_term_object(
|
|||
"""
|
||||
Return an instance of the ``_Term`` class given the term options.
|
||||
"""
|
||||
log.debug(
|
||||
"Generating config for term {tname} under filter {fname}".format(
|
||||
tname=term_name, fname=filter_name
|
||||
)
|
||||
)
|
||||
log.debug("Generating config for term %s under filter %s", term_name, filter_name)
|
||||
term = _Term()
|
||||
term.name = term_name
|
||||
term_opts = {}
|
||||
|
@ -588,7 +577,7 @@ def _get_policy_object(
|
|||
log.debug(six.text_type(policy))
|
||||
platform_generator = _import_platform_generator(platform)
|
||||
policy_config = platform_generator(policy, 2)
|
||||
log.debug("Generating policy config for {platform}:".format(platform=platform))
|
||||
log.debug("Generating policy config for %s:", platform)
|
||||
log.debug(six.text_type(policy_config))
|
||||
return policy_config
|
||||
|
||||
|
|
|
@ -215,6 +215,6 @@ def _exec_cmd(*args, **kwargs):
|
|||
]
|
||||
)
|
||||
cmd_exec = "{0}{1}".format(cmd_args, cmd_kwargs)
|
||||
log.debug("Chef command: {0}".format(cmd_exec))
|
||||
log.debug("Chef command: %s", cmd_exec)
|
||||
|
||||
return __salt__["cmd.run_all"](cmd_exec, python_shell=False)
|
||||
|
|
|
@ -16,12 +16,12 @@ import tempfile
|
|||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.platform
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
from salt.exceptions import (
|
||||
CommandExecutionError,
|
||||
CommandNotFoundError,
|
||||
SaltInvocationError,
|
||||
)
|
||||
from salt.utils.data import CaseInsensitiveDict
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
|
@ -852,11 +852,11 @@ def _run(
|
|||
)
|
||||
log.error(log_callback(msg))
|
||||
if ret["stdout"]:
|
||||
log.log(output_loglevel, "stdout: {0}".format(log_callback(ret["stdout"])))
|
||||
log.log(output_loglevel, "stdout: %s", log_callback(ret["stdout"]))
|
||||
if ret["stderr"]:
|
||||
log.log(output_loglevel, "stderr: {0}".format(log_callback(ret["stderr"])))
|
||||
log.log(output_loglevel, "stderr: %s", log_callback(ret["stderr"]))
|
||||
if ret["retcode"]:
|
||||
log.log(output_loglevel, "retcode: {0}".format(ret["retcode"]))
|
||||
log.log(output_loglevel, "retcode: %s", ret["retcode"])
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -3339,7 +3339,7 @@ def shell_info(shell, list_modules=False):
|
|||
hive="HKEY_LOCAL_MACHINE", key="Software\\Microsoft\\PowerShell"
|
||||
)
|
||||
pw_keys.sort(key=int)
|
||||
if len(pw_keys) == 0:
|
||||
if not pw_keys:
|
||||
return {
|
||||
"error": "Unable to locate 'powershell' Reason: Cannot be "
|
||||
"found in registry.",
|
||||
|
|
|
@ -485,8 +485,7 @@ def get(
|
|||
else:
|
||||
if merge not in ("recurse", "overwrite"):
|
||||
log.warning(
|
||||
"Unsupported merge strategy '{0}'. Falling back "
|
||||
"to 'recurse'.".format(merge)
|
||||
"Unsupported merge strategy '%s'. Falling back " "to 'recurse'.", merge
|
||||
)
|
||||
merge = "recurse"
|
||||
|
||||
|
@ -553,7 +552,7 @@ def gather_bootstrap_script(bootstrap=None):
|
|||
if not HAS_CLOUD:
|
||||
return False, "config.gather_bootstrap_script is unavailable"
|
||||
ret = salt.utils.cloud.update_bootstrap(__opts__, url=bootstrap)
|
||||
if "Success" in ret and len(ret["Success"]["Files updated"]) > 0:
|
||||
if "Success" in ret and ret["Success"]["Files updated"]:
|
||||
return ret["Success"]["Files updated"][0]
|
||||
|
||||
|
||||
|
|
|
@ -394,17 +394,11 @@ def copy_to(
|
|||
# Before we try to replace the file, compare checksums.
|
||||
source_md5 = __salt__["file.get_sum"](local_file, "md5")
|
||||
if source_md5 == _get_md5(name, dest, run_all):
|
||||
log.debug(
|
||||
"{0} and {1}:{2} are the same file, skipping copy".format(
|
||||
source, name, dest
|
||||
)
|
||||
)
|
||||
log.debug("%s and %s:%s are the same file, skipping copy", source, name, dest)
|
||||
return True
|
||||
|
||||
log.debug(
|
||||
"Copying {0} to {1} container '{2}' as {3}".format(
|
||||
source, container_type, name, dest
|
||||
)
|
||||
"Copying %s to %s container '%s' as %s", source, container_type, name, dest
|
||||
)
|
||||
|
||||
# Using cat here instead of opening the file, reading it into memory,
|
||||
|
|
|
@ -113,7 +113,7 @@ def remove(module, details=False):
|
|||
for file_ in files:
|
||||
if file_ in rm_details:
|
||||
continue
|
||||
log.trace("Removing {0}".format(file_))
|
||||
log.trace("Removing %s", file_)
|
||||
if __salt__["file.remove"](file_):
|
||||
rm_details[file_] = "removed"
|
||||
else:
|
||||
|
|
|
@ -355,7 +355,7 @@ def raw_cron(user):
|
|||
)
|
||||
).splitlines(True)
|
||||
|
||||
if len(lines) != 0 and lines[0].startswith(
|
||||
if lines and lines[0].startswith(
|
||||
"# DO NOT EDIT THIS FILE - edit the master and reinstall."
|
||||
):
|
||||
del lines[0:3]
|
||||
|
|
|
@ -233,8 +233,7 @@ def _csf_to_list(option):
|
|||
|
||||
|
||||
def split_option(option):
|
||||
l = re.split("(?: +)?\=(?: +)?", option) # pylint: disable=W1401
|
||||
return l
|
||||
return re.split(r"(?: +)?\=(?: +)?", option)
|
||||
|
||||
|
||||
def get_option(option):
|
||||
|
|
|
@ -68,7 +68,7 @@ def _check_cygwin_installed(cyg_arch="x86_64"):
|
|||
path_to_cygcheck = os.sep.join(
|
||||
["C:", _get_cyg_dir(cyg_arch), "bin", "cygcheck.exe"]
|
||||
)
|
||||
LOG.debug("Path to cygcheck.exe: {0}".format(path_to_cygcheck))
|
||||
LOG.debug("Path to cygcheck.exe: %s", path_to_cygcheck)
|
||||
if not os.path.exists(path_to_cygcheck):
|
||||
LOG.debug("Could not find cygcheck.exe")
|
||||
return False
|
||||
|
@ -122,7 +122,7 @@ def check_valid_package(package, cyg_arch="x86_64", mirrors=None):
|
|||
if mirrors is None:
|
||||
mirrors = [{DEFAULT_MIRROR: DEFAULT_MIRROR_KEY}]
|
||||
|
||||
LOG.debug("Checking Valid Mirrors: {0}".format(mirrors))
|
||||
LOG.debug("Checking Valid Mirrors: %s", mirrors)
|
||||
|
||||
for mirror in mirrors:
|
||||
for mirror_url, key in mirror.items():
|
||||
|
@ -251,7 +251,7 @@ def uninstall(packages, cyg_arch="x86_64", mirrors=None):
|
|||
args = []
|
||||
if packages is not None:
|
||||
args.append("--remove-packages {pkgs}".format(pkgs=packages))
|
||||
LOG.debug("args: {0}".format(args))
|
||||
LOG.debug("args: %s", args)
|
||||
if not _check_cygwin_installed(cyg_arch):
|
||||
LOG.debug("We're convinced cygwin isn't installed")
|
||||
return True
|
||||
|
@ -279,12 +279,7 @@ def update(cyg_arch="x86_64", mirrors=None):
|
|||
|
||||
# Can't update something that isn't installed
|
||||
if not _check_cygwin_installed(cyg_arch):
|
||||
LOG.debug(
|
||||
"Cygwin ({0}) not installed,\
|
||||
could not update".format(
|
||||
cyg_arch
|
||||
)
|
||||
)
|
||||
LOG.debug("Cygwin (%s) not installed, could not update", cyg_arch)
|
||||
return False
|
||||
|
||||
return _run_silent_cygwin(cyg_arch=cyg_arch, args=args, mirrors=mirrors)
|
||||
|
|
|
@ -235,7 +235,7 @@ def enabled(name, **kwargs):
|
|||
salt '*' daemontools.enabled <service name>
|
||||
"""
|
||||
if not available(name):
|
||||
log.error("Service {0} not found".format(name))
|
||||
log.error("Service %s not found", name)
|
||||
return False
|
||||
|
||||
run_file = os.path.join(SERVICE_DIR, name, "run")
|
||||
|
|
|
@ -86,9 +86,7 @@ def cluster_create(
|
|||
cmdstr = " ".join([pipes.quote(c) for c in cmd])
|
||||
ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
|
||||
if ret.get("retcode", 0) != 0:
|
||||
log.error(
|
||||
"Error creating a Postgresql" " cluster {0}/{1}".format(version, name)
|
||||
)
|
||||
log.error("Error creating a Postgresql cluster %s/%s", version, name)
|
||||
return False
|
||||
return ret
|
||||
|
||||
|
@ -154,9 +152,7 @@ def cluster_remove(version, name="main", stop=False):
|
|||
ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
|
||||
# FIXME - return Boolean ?
|
||||
if ret.get("retcode", 0) != 0:
|
||||
log.error(
|
||||
"Error removing a Postgresql" " cluster {0}/{1}".format(version, name)
|
||||
)
|
||||
log.error("Error removing a Postgresql cluster %s/%s", version, name)
|
||||
else:
|
||||
ret["changes"] = ("Successfully removed" " cluster {0}/{1}").format(
|
||||
version, name
|
||||
|
|
|
@ -180,8 +180,9 @@ def _error_msg_routes(iface, option, expected):
|
|||
|
||||
|
||||
def _log_default_iface(iface, opt, value):
|
||||
msg = "Using default option -- Interface: {0} Option: {1} Value: {2}"
|
||||
log.info(msg.format(iface, opt, value))
|
||||
log.info(
|
||||
"Using default option -- Interface: %s Option: %s Value: %s", iface, opt, value
|
||||
)
|
||||
|
||||
|
||||
def _error_msg_network(option, expected):
|
||||
|
@ -194,8 +195,7 @@ def _error_msg_network(option, expected):
|
|||
|
||||
|
||||
def _log_default_network(opt, value):
|
||||
msg = "Using existing setting -- Setting: {0} Value: {1}"
|
||||
log.info(msg.format(opt, value))
|
||||
log.info("Using existing setting -- Setting: %s Value: %s", opt, value)
|
||||
|
||||
|
||||
def _raise_error_iface(iface, option, expected):
|
||||
|
@ -829,30 +829,27 @@ def _parse_settings_bond(opts, iface):
|
|||
}
|
||||
|
||||
if opts["mode"] in ["balance-rr", "0"]:
|
||||
log.info("Device: {0} Bonding Mode: load balancing (round-robin)".format(iface))
|
||||
log.info("Device: %s Bonding Mode: load balancing (round-robin)", iface)
|
||||
return _parse_settings_bond_0(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["active-backup", "1"]:
|
||||
log.info(
|
||||
"Device: {0} Bonding Mode: fault-tolerance (active-backup)".format(iface)
|
||||
)
|
||||
log.info("Device: %s Bonding Mode: fault-tolerance (active-backup)", iface)
|
||||
return _parse_settings_bond_1(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["balance-xor", "2"]:
|
||||
log.info("Device: {0} Bonding Mode: load balancing (xor)".format(iface))
|
||||
log.info("Device: %s Bonding Mode: load balancing (xor)", iface)
|
||||
return _parse_settings_bond_2(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["broadcast", "3"]:
|
||||
log.info("Device: {0} Bonding Mode: fault-tolerance (broadcast)".format(iface))
|
||||
log.info("Device: %s Bonding Mode: fault-tolerance (broadcast)", iface)
|
||||
return _parse_settings_bond_3(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["802.3ad", "4"]:
|
||||
log.info(
|
||||
"Device: {0} Bonding Mode: IEEE 802.3ad Dynamic link "
|
||||
"aggregation".format(iface)
|
||||
"Device: %s Bonding Mode: IEEE 802.3ad Dynamic link " "aggregation", iface
|
||||
)
|
||||
return _parse_settings_bond_4(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["balance-tlb", "5"]:
|
||||
log.info("Device: {0} Bonding Mode: transmit load balancing".format(iface))
|
||||
log.info("Device: %s Bonding Mode: transmit load balancing", iface)
|
||||
return _parse_settings_bond_5(opts, iface, bond_def)
|
||||
elif opts["mode"] in ["balance-alb", "6"]:
|
||||
log.info("Device: {0} Bonding Mode: adaptive load balancing".format(iface))
|
||||
log.info("Device: %s Bonding Mode: adaptive load balancing", iface)
|
||||
return _parse_settings_bond_6(opts, iface, bond_def)
|
||||
else:
|
||||
valid = [
|
||||
|
|
|
@ -513,7 +513,7 @@ def build(
|
|||
dscs = make_src_pkg(dsc_dir, spec, sources, env, saltenv, runas)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
shutil.rmtree(dsc_dir)
|
||||
log.error("Failed to make src package, exception '{0}'".format(exc))
|
||||
log.error("Failed to make src package, exception '%s'", exc)
|
||||
return ret
|
||||
|
||||
root_user = "root"
|
||||
|
@ -586,7 +586,7 @@ def build(
|
|||
ret.setdefault("Packages", []).append(bdist)
|
||||
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.error("Error building from '{0}', execption '{1}'".format(dsc, exc))
|
||||
log.error("Error building from '%s', execption '%s'", dsc, exc)
|
||||
|
||||
# remove any Packages file created for local dependency processing
|
||||
for pkgzfile in os.listdir(dest_dir):
|
||||
|
|
|
@ -96,8 +96,8 @@ def A(host, nameserver=None):
|
|||
# In this case, 0 is not the same as False
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as "
|
||||
"fallback.".format(cmd["retcode"])
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
cmd["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
@ -126,8 +126,8 @@ def AAAA(host, nameserver=None):
|
|||
# In this case, 0 is not the same as False
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as "
|
||||
"fallback.".format(cmd["retcode"])
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
cmd["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
@ -156,8 +156,8 @@ def NS(domain, resolve=True, nameserver=None):
|
|||
# In this case, 0 is not the same as False
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as "
|
||||
"fallback.".format(cmd["retcode"])
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
cmd["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
@ -195,9 +195,8 @@ def SPF(domain, record="SPF", nameserver=None):
|
|||
# In this case, 0 is not the same as False
|
||||
if result["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as fallback.".format(
|
||||
result["retcode"]
|
||||
)
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
result["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
@ -207,7 +206,7 @@ def SPF(domain, record="SPF", nameserver=None):
|
|||
return SPF(domain, "TXT", nameserver)
|
||||
|
||||
sections = re.sub('"', "", result["stdout"]).split()
|
||||
if len(sections) == 0 or sections[0] != "v=spf1":
|
||||
if not sections or sections[0] != "v=spf1":
|
||||
return []
|
||||
|
||||
if sections[1].startswith("redirect="):
|
||||
|
@ -253,8 +252,8 @@ def MX(domain, resolve=False, nameserver=None):
|
|||
# In this case, 0 is not the same as False
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as "
|
||||
"fallback.".format(cmd["retcode"])
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
cmd["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
@ -287,8 +286,8 @@ def TXT(host, nameserver=None):
|
|||
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning(
|
||||
"dig returned exit code '{0}'. Returning empty list as "
|
||||
"fallback.".format(cmd["retcode"])
|
||||
"dig returned exit code '%s'. Returning empty list as fallback.",
|
||||
cmd["retcode"],
|
||||
)
|
||||
return []
|
||||
|
||||
|
|
|
@ -611,7 +611,7 @@ def hdparms(disks, args=None):
|
|||
disk_data = {}
|
||||
for line in _hdparm("-{0} {1}".format(args, disk), False).splitlines():
|
||||
line = line.strip()
|
||||
if len(line) == 0 or line == disk + ":":
|
||||
if not line or line == disk + ":":
|
||||
continue
|
||||
|
||||
if ":" in line:
|
||||
|
@ -647,7 +647,7 @@ def hdparms(disks, args=None):
|
|||
rvals.append(val)
|
||||
if valdict:
|
||||
rvals.append(valdict)
|
||||
if len(rvals) == 0:
|
||||
if not rvals:
|
||||
continue
|
||||
elif len(rvals) == 1:
|
||||
rvals = rvals[0]
|
||||
|
|
|
@ -1506,6 +1506,86 @@ def login(*registries):
|
|||
return ret
|
||||
|
||||
|
||||
def logout(*registries):
|
||||
"""
|
||||
.. versionadded:: 3001
|
||||
|
||||
Performs a ``docker logout`` to remove the saved authentication details for
|
||||
one or more configured repositories.
|
||||
|
||||
Multiple registry URLs (matching those configured in Pillar) can be passed,
|
||||
and Salt will attempt to logout of *just* those registries. If no registry
|
||||
URLs are provided, Salt will attempt to logout of *all* configured
|
||||
registries.
|
||||
|
||||
**RETURN DATA**
|
||||
|
||||
A dictionary containing the following keys:
|
||||
|
||||
- ``Results`` - A dictionary mapping registry URLs to the authentication
|
||||
result. ``True`` means a successful logout, ``False`` means a failed
|
||||
logout.
|
||||
- ``Errors`` - A list of errors encountered during the course of this
|
||||
function.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion docker.logout
|
||||
salt myminion docker.logout hub
|
||||
salt myminion docker.logout hub https://mydomain.tld/registry/
|
||||
"""
|
||||
# NOTE: This function uses the "docker logout" CLI command to remove
|
||||
# authentication information from config.json. docker-py does not support
|
||||
# this usecase (see https://github.com/docker/docker-py/issues/1091)
|
||||
|
||||
# To logout of all known (to Salt) docker registries, they have to be collected first
|
||||
registry_auth = __salt__["config.get"]("docker-registries", {})
|
||||
ret = {"retcode": 0}
|
||||
errors = ret.setdefault("Errors", [])
|
||||
if not isinstance(registry_auth, dict):
|
||||
errors.append("'docker-registries' Pillar value must be a dictionary")
|
||||
registry_auth = {}
|
||||
for reg_name, reg_conf in six.iteritems(
|
||||
__salt__["config.option"]("*-docker-registries", wildcard=True)
|
||||
):
|
||||
try:
|
||||
registry_auth.update(reg_conf)
|
||||
except TypeError:
|
||||
errors.append(
|
||||
"Docker registry '{0}' was not specified as a "
|
||||
"dictionary".format(reg_name)
|
||||
)
|
||||
|
||||
# If no registries passed, we will logout of all known registries
|
||||
if not registries:
|
||||
registries = list(registry_auth)
|
||||
|
||||
results = ret.setdefault("Results", {})
|
||||
for registry in registries:
|
||||
if registry not in registry_auth:
|
||||
errors.append("No match found for registry '{0}'".format(registry))
|
||||
continue
|
||||
else:
|
||||
cmd = ["docker", "logout"]
|
||||
if registry.lower() != "hub":
|
||||
cmd.append(registry)
|
||||
log.debug("Attempting to logout of docker registry '%s'", registry)
|
||||
logout_cmd = __salt__["cmd.run_all"](
|
||||
cmd, python_shell=False, output_loglevel="quiet",
|
||||
)
|
||||
results[registry] = logout_cmd["retcode"] == 0
|
||||
if not results[registry]:
|
||||
if logout_cmd["stderr"]:
|
||||
errors.append(logout_cmd["stderr"])
|
||||
elif logout_cmd["stdout"]:
|
||||
errors.append(logout_cmd["stdout"])
|
||||
if errors:
|
||||
ret["retcode"] = 1
|
||||
return ret
|
||||
|
||||
|
||||
# Functions for information gathering
|
||||
def depends(name):
|
||||
"""
|
||||
|
|
|
@ -36,7 +36,7 @@ def __parse_drac(output):
|
|||
section = ""
|
||||
|
||||
for i in output.splitlines():
|
||||
if len(i.rstrip()) > 0 and "=" in i:
|
||||
if i.rstrip() and "=" in i:
|
||||
if section in drac:
|
||||
drac[section].update(dict([[prop.strip() for prop in i.split("=")]]))
|
||||
else:
|
||||
|
@ -54,7 +54,7 @@ def __execute_cmd(command):
|
|||
cmd = __salt__["cmd.run_all"]("racadm {0}".format(command))
|
||||
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning("racadm return an exit code '{0}'.".format(cmd["retcode"]))
|
||||
log.warning("racadm return an exit code '%s'.", cmd["retcode"])
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -73,7 +73,7 @@ def system_info():
|
|||
cmd = __salt__["cmd.run_all"]("racadm getsysinfo")
|
||||
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning("racadm return an exit code '{0}'.".format(cmd["retcode"]))
|
||||
log.warning("racadm return an exit code '%s'.", cmd["retcode"])
|
||||
|
||||
return __parse_drac(cmd["stdout"])
|
||||
|
||||
|
@ -92,7 +92,7 @@ def network_info():
|
|||
cmd = __salt__["cmd.run_all"]("racadm getniccfg")
|
||||
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning("racadm return an exit code '{0}'.".format(cmd["retcode"]))
|
||||
log.warning("racadm return an exit code '%s'.", cmd["retcode"])
|
||||
|
||||
return __parse_drac(cmd["stdout"])
|
||||
|
||||
|
@ -197,7 +197,7 @@ def list_users():
|
|||
)
|
||||
|
||||
if cmd["retcode"] != 0:
|
||||
log.warning("racadm return an exit code '{0}'.".format(cmd["retcode"]))
|
||||
log.warning("racadm return an exit code '%s'.", cmd["retcode"])
|
||||
|
||||
for user in cmd["stdout"].splitlines():
|
||||
if not user.startswith("cfg"):
|
||||
|
@ -242,7 +242,7 @@ def delete_user(username, uid=None):
|
|||
)
|
||||
|
||||
else:
|
||||
log.warning("'{0}' does not exist".format(username))
|
||||
log.warning("'%s' does not exist", username)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -271,7 +271,7 @@ def change_password(username, password, uid=None):
|
|||
)
|
||||
)
|
||||
else:
|
||||
log.warning("'{0}' does not exist".format(username))
|
||||
log.warning("'%s' does not exist", username)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -305,7 +305,7 @@ def create_user(username, password, permissions, users=None):
|
|||
users = list_users()
|
||||
|
||||
if username in users:
|
||||
log.warning("'{0}' already exists".format(username))
|
||||
log.warning("'%s' already exists", username)
|
||||
return False
|
||||
|
||||
for idx in six.iterkeys(users):
|
||||
|
|
|
@ -54,7 +54,7 @@ def __parse_drac(output):
|
|||
if i.strip().endswith(":") and "=" not in i:
|
||||
section = i[0:-1]
|
||||
drac[section] = {}
|
||||
if len(i.rstrip()) > 0 and "=" in i:
|
||||
if i.rstrip() and "=" in i:
|
||||
if section in drac:
|
||||
drac[section].update(dict([[prop.strip() for prop in i.split("=")]]))
|
||||
else:
|
||||
|
@ -137,7 +137,7 @@ def __execute_ret(
|
|||
if l.startswith("Continuing execution"):
|
||||
continue
|
||||
|
||||
if len(l.strip()) == 0:
|
||||
if not l.strip():
|
||||
continue
|
||||
fmtlines.append(l)
|
||||
if "=" in l:
|
||||
|
@ -418,7 +418,7 @@ def list_users(host=None, admin_username=None, admin_password=None, module=None)
|
|||
else:
|
||||
break
|
||||
else:
|
||||
if len(_username) > 0:
|
||||
if _username:
|
||||
users[_username].update({key: val})
|
||||
|
||||
return users
|
||||
|
@ -1223,7 +1223,7 @@ def inventory(host=None, admin_username=None, admin_password=None):
|
|||
in_chassis = True
|
||||
continue
|
||||
|
||||
if len(l) < 1:
|
||||
if not l:
|
||||
continue
|
||||
|
||||
line = re.split(" +", l.strip())
|
||||
|
|
|
@ -203,9 +203,7 @@ def check_db(*names, **kwargs):
|
|||
ret = {}
|
||||
for name in names:
|
||||
if name in ret:
|
||||
log.warning(
|
||||
"pkg.check_db: Duplicate package name '{0}' " "submitted".format(name)
|
||||
)
|
||||
log.warning("pkg.check_db: Duplicate package name '%s' submitted", name)
|
||||
continue
|
||||
if "/" not in name:
|
||||
ret.setdefault(name, {})["found"] = False
|
||||
|
@ -265,7 +263,7 @@ def latest_version(*names, **kwargs):
|
|||
"""
|
||||
refresh = salt.utils.data.is_true(kwargs.pop("refresh", True))
|
||||
|
||||
if len(names) == 0:
|
||||
if not names:
|
||||
return ""
|
||||
|
||||
# Refresh before looking for the latest version available
|
||||
|
@ -499,7 +497,8 @@ def refresh_db():
|
|||
if now - timestamp < day:
|
||||
log.info(
|
||||
"Did not sync package tree since last sync was done at"
|
||||
" {0}, less than 1 day ago".format(timestamp)
|
||||
" %s, less than 1 day ago",
|
||||
timestamp,
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -654,16 +653,15 @@ def install(
|
|||
'new': '<new-version>'}}
|
||||
"""
|
||||
log.debug(
|
||||
"Called modules.pkg.install: {0}".format(
|
||||
{
|
||||
"name": name,
|
||||
"refresh": refresh,
|
||||
"pkgs": pkgs,
|
||||
"sources": sources,
|
||||
"kwargs": kwargs,
|
||||
"binhost": binhost,
|
||||
}
|
||||
)
|
||||
"Called modules.pkg.install: %s",
|
||||
{
|
||||
"name": name,
|
||||
"refresh": refresh,
|
||||
"pkgs": pkgs,
|
||||
"sources": sources,
|
||||
"kwargs": kwargs,
|
||||
"binhost": binhost,
|
||||
},
|
||||
)
|
||||
if salt.utils.data.is_true(refresh):
|
||||
refresh_db()
|
||||
|
@ -688,7 +686,7 @@ def install(
|
|||
version_num += "[{0}]".format(",".join(uses))
|
||||
pkg_params = {name: version_num}
|
||||
|
||||
if pkg_params is None or len(pkg_params) == 0:
|
||||
if not pkg_params:
|
||||
return {}
|
||||
elif pkg_type == "file":
|
||||
emerge_opts = ["tbz2file"]
|
||||
|
@ -1256,7 +1254,7 @@ def check_extra_requirements(pkgname, pkgver):
|
|||
try:
|
||||
cpv = _porttree().dbapi.xmatch("bestmatch-visible", atom)
|
||||
except portage.exception.InvalidAtom as iae:
|
||||
log.error("Unable to find a matching package for {0}: ({1})".format(atom, iae))
|
||||
log.error("Unable to find a matching package for %s: (%s)", atom, iae)
|
||||
return False
|
||||
|
||||
if cpv == "":
|
||||
|
|
|
@ -72,20 +72,20 @@ def setval(key, val, false_unsets=False, permanent=False):
|
|||
)
|
||||
|
||||
if not isinstance(key, six.string_types):
|
||||
log.debug(
|
||||
"{0}: 'key' argument is not a string type: '{1}'".format(__name__, key)
|
||||
)
|
||||
log.debug("%s: 'key' argument is not a string type: '%s'", __name__, key)
|
||||
if val is False:
|
||||
if false_unsets is True:
|
||||
try:
|
||||
os.environ.pop(key, None)
|
||||
if permanent and is_windows:
|
||||
__salt__["reg.delete_value"](permanent_hive, permanent_key, key)
|
||||
__utils__["reg.delete_value"](permanent_hive, permanent_key, key)
|
||||
return None
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"{0}: Exception occurred when unsetting "
|
||||
"environ key '{1}': '{2}'".format(__name__, key, exc)
|
||||
"%s: Exception occurred when unsetting " "environ key '%s': '%s'",
|
||||
__name__,
|
||||
key,
|
||||
exc,
|
||||
)
|
||||
return False
|
||||
else:
|
||||
|
@ -94,18 +94,22 @@ def setval(key, val, false_unsets=False, permanent=False):
|
|||
try:
|
||||
os.environ[key] = val
|
||||
if permanent and is_windows:
|
||||
__salt__["reg.set_value"](permanent_hive, permanent_key, key, val)
|
||||
__utils__["reg.set_value"](permanent_hive, permanent_key, key, val)
|
||||
return os.environ[key]
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"{0}: Exception occurred when setting"
|
||||
"environ key '{1}': '{2}'".format(__name__, key, exc)
|
||||
"%s: Exception occurred when setting" "environ key '%s': '%s'",
|
||||
__name__,
|
||||
key,
|
||||
exc,
|
||||
)
|
||||
return False
|
||||
else:
|
||||
log.debug(
|
||||
"{0}: 'val' argument for key '{1}' is not a string "
|
||||
"or False: '{2}'".format(__name__, key, val)
|
||||
"%s: 'val' argument for key '%s' is not a string " "or False: '%s'",
|
||||
__name__,
|
||||
key,
|
||||
val,
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -161,9 +165,7 @@ def setenv(
|
|||
"""
|
||||
ret = {}
|
||||
if not isinstance(environ, dict):
|
||||
log.debug(
|
||||
"{0}: 'environ' argument is not a dict: '{1}'".format(__name__, environ)
|
||||
)
|
||||
log.debug("%s: 'environ' argument is not a dict: '%s'", __name__, environ)
|
||||
return False
|
||||
if clear_all is True:
|
||||
# Unset any keys not defined in 'environ' dict supplied by user
|
||||
|
@ -177,8 +179,10 @@ def setenv(
|
|||
ret[key] = setval(key, val, false_unsets, permanent=permanent)
|
||||
else:
|
||||
log.debug(
|
||||
"{0}: 'val' argument for key '{1}' is not a string "
|
||||
"or False: '{2}'".format(__name__, key, val)
|
||||
"%s: 'val' argument for key '%s' is not a string " "or False: '%s'",
|
||||
__name__,
|
||||
key,
|
||||
val,
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -215,9 +219,7 @@ def get(key, default=""):
|
|||
salt '*' environ.get baz default=False
|
||||
"""
|
||||
if not isinstance(key, six.string_types):
|
||||
log.debug(
|
||||
"{0}: 'key' argument is not a string type: '{1}'".format(__name__, key)
|
||||
)
|
||||
log.debug("%s: 'key' argument is not a string type: '%s'", __name__, key)
|
||||
return False
|
||||
return os.environ.get(key, default)
|
||||
|
||||
|
@ -242,9 +244,7 @@ def has_value(key, value=None):
|
|||
salt '*' environ.has_value foo
|
||||
"""
|
||||
if not isinstance(key, six.string_types):
|
||||
log.debug(
|
||||
"{0}: 'key' argument is not a string type: '{1}'".format(__name__, key)
|
||||
)
|
||||
log.debug("%s: 'key' argument is not a string type: '%s'", __name__, key)
|
||||
return False
|
||||
try:
|
||||
cur_val = os.environ[key]
|
||||
|
@ -286,9 +286,7 @@ def item(keys, default=""):
|
|||
key_list = keys
|
||||
else:
|
||||
log.debug(
|
||||
"{0}: 'keys' argument is not a string or list type: '{1}'".format(
|
||||
__name__, keys
|
||||
)
|
||||
"%s: 'keys' argument is not a string or list type: '%s'", __name__, keys
|
||||
)
|
||||
for key in key_list:
|
||||
ret[key] = os.environ.get(key, default)
|
||||
|
|
|
@ -67,7 +67,7 @@ def exec_action(
|
|||
if state_only:
|
||||
return True
|
||||
|
||||
if len(out) < 1:
|
||||
if not out:
|
||||
return False
|
||||
|
||||
if len(out) == 1 and not out[0].strip():
|
||||
|
@ -204,7 +204,7 @@ def set_target(module, target, module_parameter=None, action_parameter=None):
|
|||
|
||||
# get list of available modules
|
||||
if module not in get_modules():
|
||||
log.error("Module {0} not available".format(module))
|
||||
log.error("Module %s not available", module)
|
||||
return False
|
||||
|
||||
exec_result = exec_action(
|
||||
|
|
|
@ -2005,7 +2005,7 @@ def line(
|
|||
match = _regex_to_static(body, match)
|
||||
|
||||
if os.stat(path).st_size == 0 and mode in ("delete", "replace"):
|
||||
log.warning("Cannot find text to {0}. File '{1}' is empty.".format(mode, path))
|
||||
log.warning("Cannot find text to %s. File '%s' is empty.", mode, path)
|
||||
body = []
|
||||
elif mode == "delete" and match:
|
||||
body = [line for line in body if line != match[0]]
|
||||
|
@ -5037,7 +5037,7 @@ def check_file_meta(
|
|||
try:
|
||||
differences = get_diff(name, tmp, show_filenames=False)
|
||||
except CommandExecutionError as exc:
|
||||
log.error("Failed to diff files: {0}".format(exc))
|
||||
log.error("Failed to diff files: %s", exc)
|
||||
differences = exc.strerror
|
||||
__clean_tmp(tmp)
|
||||
if differences:
|
||||
|
@ -5859,9 +5859,11 @@ def mknod_chrdev(name, major, minor, user=None, group=None, mode="0660"):
|
|||
|
||||
ret = {"name": name, "changes": {}, "comment": "", "result": False}
|
||||
log.debug(
|
||||
"Creating character device name:{0} major:{1} minor:{2} mode:{3}".format(
|
||||
name, major, minor, mode
|
||||
)
|
||||
"Creating character device name:%s major:%s minor:%s mode:%s",
|
||||
name,
|
||||
major,
|
||||
minor,
|
||||
mode,
|
||||
)
|
||||
try:
|
||||
if __opts__["test"]:
|
||||
|
@ -5930,9 +5932,11 @@ def mknod_blkdev(name, major, minor, user=None, group=None, mode="0660"):
|
|||
|
||||
ret = {"name": name, "changes": {}, "comment": "", "result": False}
|
||||
log.debug(
|
||||
"Creating block device name:{0} major:{1} minor:{2} mode:{3}".format(
|
||||
name, major, minor, mode
|
||||
)
|
||||
"Creating block device name:%s major:%s minor:%s mode:%s",
|
||||
name,
|
||||
major,
|
||||
minor,
|
||||
mode,
|
||||
)
|
||||
try:
|
||||
if __opts__["test"]:
|
||||
|
@ -6000,7 +6004,7 @@ def mknod_fifo(name, user=None, group=None, mode="0660"):
|
|||
name = os.path.expanduser(name)
|
||||
|
||||
ret = {"name": name, "changes": {}, "comment": "", "result": False}
|
||||
log.debug("Creating FIFO name: {0}".format(name))
|
||||
log.debug("Creating FIFO name: %s", name)
|
||||
try:
|
||||
if __opts__["test"]:
|
||||
ret["changes"] = {"new": "Fifo pipe {0} created.".format(name)}
|
||||
|
|
|
@ -82,7 +82,7 @@ def reload_rules():
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' firewalld.reload
|
||||
salt '*' firewalld.reload_rules
|
||||
"""
|
||||
return __firewall_cmd("--reload")
|
||||
|
||||
|
|
|
@ -289,7 +289,7 @@ def showconfig(name, default=False, dict_return=False):
|
|||
try:
|
||||
pkg = output[0].split()[-1].rstrip(":")
|
||||
except (IndexError, AttributeError, TypeError) as exc:
|
||||
log.error("Unable to get pkg-version string: {0}".format(exc))
|
||||
log.error("Unable to get pkg-version string: %s", exc)
|
||||
return {}
|
||||
|
||||
ret = {pkg: {}}
|
||||
|
|
|
@ -172,10 +172,10 @@ def bootstrap(
|
|||
_mkpart(root, fs_format, fs_opts, mount_dir)
|
||||
|
||||
loop1 = __salt__["cmd.run"]("losetup -f")
|
||||
log.debug("First loop device is {0}".format(loop1))
|
||||
log.debug("First loop device is %s", loop1)
|
||||
__salt__["cmd.run"]("losetup {0} {1}".format(loop1, root))
|
||||
loop2 = __salt__["cmd.run"]("losetup -f")
|
||||
log.debug("Second loop device is {0}".format(loop2))
|
||||
log.debug("Second loop device is %s", loop2)
|
||||
start = six.text_type(2048 * 2048)
|
||||
__salt__["cmd.run"]("losetup -o {0} {1} {2}".format(start, loop2, loop1))
|
||||
__salt__["mount.mount"](mount_dir, loop2)
|
||||
|
@ -231,7 +231,7 @@ def _mkpart(root, fs_format, fs_opts, mount_dir):
|
|||
"""
|
||||
__salt__["partition.mklabel"](root, "msdos")
|
||||
loop1 = __salt__["cmd.run"]("losetup -f")
|
||||
log.debug("First loop device is {0}".format(loop1))
|
||||
log.debug("First loop device is %s", loop1)
|
||||
__salt__["cmd.run"]("losetup {0} {1}".format(loop1, root))
|
||||
part_info = __salt__["partition.list"](loop1)
|
||||
start = six.text_type(2048 * 2048) + "B"
|
||||
|
@ -240,7 +240,7 @@ def _mkpart(root, fs_format, fs_opts, mount_dir):
|
|||
__salt__["partition.set"](loop1, "1", "boot", "on")
|
||||
part_info = __salt__["partition.list"](loop1)
|
||||
loop2 = __salt__["cmd.run"]("losetup -f")
|
||||
log.debug("Second loop device is {0}".format(loop2))
|
||||
log.debug("Second loop device is %s", loop2)
|
||||
start = start.rstrip("B")
|
||||
__salt__["cmd.run"]("losetup -o {0} {1} {2}".format(start, loop2, loop1))
|
||||
_mkfs(loop2, fs_format, fs_opts)
|
||||
|
@ -415,9 +415,7 @@ def _bootstrap_deb(
|
|||
return False
|
||||
|
||||
if static_qemu and not salt.utils.validate.path.is_executable(static_qemu):
|
||||
log.error(
|
||||
"Required tool qemu not " "present/readable at: {0}".format(static_qemu)
|
||||
)
|
||||
log.error("Required tool qemu not present/readable at: %s", static_qemu)
|
||||
return False
|
||||
|
||||
if isinstance(pkgs, (list, tuple)):
|
||||
|
|
|
@ -48,7 +48,7 @@ def __virtual__():
|
|||
|
||||
|
||||
def _ret_code(cmd, ignore_retcode=False):
|
||||
log.debug("executing [{0}]".format(cmd))
|
||||
log.debug("executing [%s]", cmd)
|
||||
sts = __salt__["cmd.retcode"](
|
||||
cmd, python_shell=False, ignore_retcode=ignore_retcode
|
||||
)
|
||||
|
|
|
@ -359,7 +359,11 @@ def image_show(id=None, name=None, profile=None): # pylint: disable=C0103
|
|||
except exc.HTTPNotFound:
|
||||
return {"result": False, "comment": "No image with ID {0}".format(id)}
|
||||
pformat = pprint.PrettyPrinter(indent=4).pformat
|
||||
log.debug("Properties of image {0}:\n{1}".format(image.name, pformat(image)))
|
||||
log.debug(
|
||||
"Properties of image %s:\n%s",
|
||||
image.name,
|
||||
pprint.PrettyPrinter(indent=4).pformat(image),
|
||||
)
|
||||
|
||||
schema = image_schema(profile=profile)
|
||||
if len(schema.keys()) == 1:
|
||||
|
@ -398,7 +402,7 @@ def image_list(id=None, profile=None, name=None): # pylint: disable=C0103
|
|||
'name "{0}"'.format(name),
|
||||
}
|
||||
_add_image(ret, image)
|
||||
log.debug("Returning images: {0}".format(ret))
|
||||
log.debug("Returning images: %s", ret)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -441,7 +445,7 @@ def image_update(id=None, name=None, profile=None, **kwargs): # pylint: disable
|
|||
img_list = image_list(name=name, profile=profile)
|
||||
if img_list is dict and "result" in img_list:
|
||||
return img_list
|
||||
elif len(img_list) == 0:
|
||||
elif not img_list:
|
||||
return {
|
||||
"result": False,
|
||||
"comment": "No image with name '{0}' " "found.".format(name),
|
||||
|
@ -453,13 +457,13 @@ def image_update(id=None, name=None, profile=None, **kwargs): # pylint: disable
|
|||
image = img_list[name]
|
||||
else:
|
||||
raise SaltInvocationError
|
||||
log.debug("Found image:\n{0}".format(image))
|
||||
log.debug("Found image:\n%s", image)
|
||||
to_update = {}
|
||||
for key, value in kwargs.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
if key not in image or image[key] != value:
|
||||
log.debug("add <{0}={1}> to to_update".format(key, value))
|
||||
log.debug("add <%s=%s> to to_update", key, value)
|
||||
to_update[key] = value
|
||||
g_client = _auth(profile)
|
||||
updated = g_client.images.update(image["id"], **to_update)
|
||||
|
@ -481,11 +485,14 @@ def schema_get(name, profile=None):
|
|||
salt '*' glance.schema_get name=f16-jeos
|
||||
"""
|
||||
g_client = _auth(profile)
|
||||
pformat = pprint.PrettyPrinter(indent=4).pformat
|
||||
schema_props = {}
|
||||
for prop in g_client.schemas.get(name).properties:
|
||||
schema_props[prop.name] = prop.description
|
||||
log.debug("Properties of schema {0}:\n{1}".format(name, pformat(schema_props)))
|
||||
log.debug(
|
||||
"Properties of schema %s:\n%s",
|
||||
name,
|
||||
pprint.PrettyPrinter(indent=4).pformat(schema_props),
|
||||
)
|
||||
return {name: schema_props}
|
||||
|
||||
|
||||
|
|
|
@ -588,7 +588,7 @@ def add_volume_bricks(name, bricks):
|
|||
else:
|
||||
new_bricks.append(brick)
|
||||
|
||||
if len(new_bricks) > 0:
|
||||
if new_bricks:
|
||||
for brick in new_bricks:
|
||||
cmd += " {0}".format(brick)
|
||||
return _gluster(cmd)
|
||||
|
|
|
@ -264,7 +264,7 @@ def _poll_for_events(
|
|||
event_args={"sort_dir": "asc", "marker": marker},
|
||||
)
|
||||
|
||||
if len(events) == 0:
|
||||
if not events:
|
||||
no_event_polls += 1
|
||||
else:
|
||||
no_event_polls = 0
|
||||
|
@ -419,7 +419,7 @@ def delete_stack(name=None, poll=0, timeout=60, profile=None):
|
|||
ret["comment"] = "Deleted stack {0}.".format(name)
|
||||
return ret
|
||||
except Exception as ex: # pylint: disable=W0703
|
||||
log.exception("Delete failed {0}".format(ex))
|
||||
log.exception("Delete failed %s", ex)
|
||||
ret["result"] = False
|
||||
ret["comment"] = "{0}".format(ex)
|
||||
return ret
|
||||
|
@ -551,7 +551,7 @@ def create_stack(
|
|||
try:
|
||||
h_client.stacks.validate(**kwargs)
|
||||
except Exception as ex: # pylint: disable=W0703
|
||||
log.exception("Template not valid {0}".format(ex))
|
||||
log.exception("Template not valid %s", ex)
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Template not valid {0}".format(ex)
|
||||
return ret
|
||||
|
@ -623,7 +623,7 @@ def create_stack(
|
|||
try:
|
||||
h_client.stacks.create(**fields)
|
||||
except Exception as ex: # pylint: disable=W0703
|
||||
log.exception("Create failed {0}".format(ex))
|
||||
log.exception("Create failed %s", ex)
|
||||
ret["result"] = False
|
||||
ret["comment"] = "{0}".format(ex)
|
||||
return ret
|
||||
|
@ -762,7 +762,7 @@ def update_stack(
|
|||
try:
|
||||
h_client.stacks.validate(**kwargs)
|
||||
except Exception as ex: # pylint: disable=W0703
|
||||
log.exception("Template not valid {0}".format(ex))
|
||||
log.exception("Template not valid %s", ex)
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Template not valid {0}".format(ex)
|
||||
return ret
|
||||
|
@ -832,7 +832,7 @@ def update_stack(
|
|||
try:
|
||||
h_client.stacks.update(name, **fields)
|
||||
except Exception as ex: # pylint: disable=W0703
|
||||
log.exception("Update failed {0}".format(ex))
|
||||
log.exception("Update failed %s", ex)
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Update failed {0}".format(ex)
|
||||
return ret
|
||||
|
|
|
@ -515,7 +515,7 @@ def proccess_lowstates(**kwargs):
|
|||
"ERROR: to see details run: [salt-call state.show_lowstate] <-----***-SEE-***"
|
||||
)
|
||||
else:
|
||||
if len(ls) > 0:
|
||||
if ls:
|
||||
if not isinstance(ls[0], dict):
|
||||
raise Exception(
|
||||
"ERROR: to see details run: [salt-call state.show_lowstate] <-----***-SEE-***"
|
||||
|
@ -557,7 +557,7 @@ def _state_data_to_yaml_string(data, whitelist=None, blacklist=None):
|
|||
kset &= set(whitelist)
|
||||
for k in kset:
|
||||
y[k] = data[k]
|
||||
if len(y) == 0:
|
||||
if not y:
|
||||
return None
|
||||
return salt.utils.yaml.safe_dump(y, default_flow_style=False)
|
||||
|
||||
|
@ -724,7 +724,7 @@ def proccesser_markdown(lowstate_item, config, **kwargs):
|
|||
details += _format_markdown_system_file(s["name"], config)
|
||||
|
||||
# if no state doc is created use default state as yaml
|
||||
if len(details) == 0:
|
||||
if not details:
|
||||
y = _state_data_to_yaml_string(s)
|
||||
if y:
|
||||
details += "```\n{0}```\n".format(y)
|
||||
|
|
|
@ -168,7 +168,7 @@ def raw_system_incron():
|
|||
|
||||
salt '*' incron.raw_system_incron
|
||||
"""
|
||||
log.debug("read_file {0}".format(_read_file(_INCRON_SYSTEM_TAB, "salt")))
|
||||
log.debug("read_file %s", _read_file(_INCRON_SYSTEM_TAB, "salt"))
|
||||
return "".join(_read_file(_INCRON_SYSTEM_TAB, "salt"))
|
||||
|
||||
|
||||
|
@ -203,7 +203,7 @@ def list_tab(user):
|
|||
data = raw_system_incron()
|
||||
else:
|
||||
data = raw_incron(user)
|
||||
log.debug("user data {0}".format(data))
|
||||
log.debug("user data %s", data)
|
||||
ret = {"crons": [], "pre": []}
|
||||
flag = False
|
||||
for line in data.splitlines():
|
||||
|
|
|
@ -359,9 +359,7 @@ def get_host_domainname(name, domains=None, **api_opts):
|
|||
d = d.lower().rstrip(".")
|
||||
if name.endswith(d) and len(d) > len(match):
|
||||
match = d
|
||||
if len(match) > 0:
|
||||
return match
|
||||
return None
|
||||
return match if match else None
|
||||
|
||||
|
||||
def get_host_hostname(name, domains=None, **api_opts):
|
||||
|
@ -669,8 +667,6 @@ def delete_a(name=None, ipv4addr=None, allow_array=False, **api_opts):
|
|||
r = get_a(name, ipv4addr, allow_array=False, **api_opts)
|
||||
if not r:
|
||||
return True
|
||||
if len(r) == 0:
|
||||
return True
|
||||
if len(r) > 1 and not allow_array:
|
||||
raise Exception("More than one result, use allow_array to override")
|
||||
ret = []
|
||||
|
|
|
@ -427,7 +427,7 @@ def list_sets(family="ipv4"):
|
|||
sets = []
|
||||
sets.append({})
|
||||
for item in _tmp:
|
||||
if len(item) == 0:
|
||||
if not item:
|
||||
count = count + 1
|
||||
sets.append({})
|
||||
continue
|
||||
|
@ -514,7 +514,7 @@ def add(setname=None, entry=None, family="ipv4", **kwargs):
|
|||
cmd = "{0} add -exist {1} {2}".format(_ipset_cmd(), setname, cmd)
|
||||
out = __salt__["cmd.run"](cmd, python_shell=False)
|
||||
|
||||
if len(out) == 0:
|
||||
if not out:
|
||||
return "Success"
|
||||
return "Error: {0}".format(out)
|
||||
|
||||
|
@ -543,7 +543,7 @@ def delete(set=None, entry=None, family="ipv4", **kwargs):
|
|||
cmd = "{0} del {1} {2}".format(_ipset_cmd(), set, entry)
|
||||
out = __salt__["cmd.run"](cmd, python_shell=False)
|
||||
|
||||
if len(out) == 0:
|
||||
if not out:
|
||||
return "Success"
|
||||
return "Error: {0}".format(out)
|
||||
|
||||
|
@ -664,10 +664,7 @@ def flush(set=None, family="ipv4"):
|
|||
cmd = "{0} flush".format(_ipset_cmd())
|
||||
out = __salt__["cmd.run"](cmd, python_shell=False)
|
||||
|
||||
if len(out) == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return not out
|
||||
|
||||
|
||||
def _find_set_members(set):
|
||||
|
|
|
@ -167,7 +167,7 @@ def _regex_iptables_save(cmd_output, filters=None):
|
|||
log.warning("Skipping regex rule: '%s': %s", pattern, e)
|
||||
continue
|
||||
|
||||
if len(__context__["iptables.save_filters"]) > 0:
|
||||
if __context__["iptables.save_filters"]:
|
||||
# line by line get rid of any regex matches
|
||||
_filtered_cmd_output = [
|
||||
line
|
||||
|
@ -718,7 +718,7 @@ def save(filename=None, family="ipv4"):
|
|||
ipt = __salt__["cmd.run"](cmd)
|
||||
|
||||
# regex out the output if configured with filters
|
||||
if len(_conf_save_filters()) > 0:
|
||||
if _conf_save_filters():
|
||||
ipt = _regex_iptables_save(ipt)
|
||||
|
||||
out = __salt__["file.write"](filename, ipt)
|
||||
|
@ -902,10 +902,7 @@ def append(table="filter", chain=None, rule=None, family="ipv4"):
|
|||
_iptables_cmd(family), wait, table, chain, rule
|
||||
)
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
if len(out) == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return not out
|
||||
|
||||
|
||||
def insert(table="filter", chain=None, position=None, rule=None, family="ipv4"):
|
||||
|
|
|
@ -788,7 +788,7 @@ def create_secret(
|
|||
return ret
|
||||
data[sname] = encoded
|
||||
|
||||
log.trace("secret data is: {0}".format(data))
|
||||
log.trace("secret data is: %s", data)
|
||||
|
||||
if secret and update:
|
||||
if not data:
|
||||
|
|
|
@ -58,7 +58,7 @@ def add(overlay):
|
|||
# If we did not have any overlays before and we successfully added
|
||||
# a new one. We need to ensure the make.conf is sourcing layman's
|
||||
# make.conf so emerge can see the overlays
|
||||
if len(old_overlays) == 0 and len(new_overlays) > 0:
|
||||
if not old_overlays and new_overlays:
|
||||
srcline = "source /var/lib/layman/make.conf"
|
||||
makeconf = _get_makeconf()
|
||||
if not __salt__["file.contains"](makeconf, "layman"):
|
||||
|
@ -91,7 +91,7 @@ def delete(overlay):
|
|||
|
||||
# If we now have no overlays added, We need to ensure that the make.conf
|
||||
# does not source layman's make.conf, as it will break emerge
|
||||
if len(new_overlays) == 0:
|
||||
if not new_overlays:
|
||||
srcline = "source /var/lib/layman/make.conf"
|
||||
makeconf = _get_makeconf()
|
||||
if __salt__["file.contains"](makeconf, "layman"):
|
||||
|
|
|
@ -143,7 +143,7 @@ def list_sizes(profile, location_id=None, **libcloud_kwargs):
|
|||
libcloud_kwargs = salt.utils.args.clean_kwargs(**libcloud_kwargs)
|
||||
if location_id is not None:
|
||||
locations = [loc for loc in conn.list_locations() if loc.id == location_id]
|
||||
if len(locations) == 0:
|
||||
if not locations:
|
||||
raise ValueError("Location not found")
|
||||
else:
|
||||
sizes = conn.list_sizes(location=locations[0], **libcloud_kwargs)
|
||||
|
@ -791,7 +791,7 @@ def _get_by_id(collection, id):
|
|||
Get item from a list by the id field
|
||||
"""
|
||||
matches = [item for item in collection if item.id == id]
|
||||
if len(matches) == 0:
|
||||
if not matches:
|
||||
raise ValueError("Could not find a matching item")
|
||||
elif len(matches) > 1:
|
||||
raise ValueError("The id matched {0} items, not 1".format(len(matches)))
|
||||
|
|
|
@ -385,7 +385,7 @@ def balancer_detach_member(balancer_id, member_id, profile, **libcloud_kwargs):
|
|||
match = [member for member in members if member.id == member_id]
|
||||
if len(match) > 1:
|
||||
raise ValueError("Ambiguous argument, found mulitple records")
|
||||
elif len(match) == 0:
|
||||
elif not match:
|
||||
raise ValueError("Bad argument, found no records")
|
||||
else:
|
||||
member = match[0]
|
||||
|
|
|
@ -44,7 +44,7 @@ def version():
|
|||
|
||||
|
||||
def _raise_on_no_files(*args):
|
||||
if len(args) == 0:
|
||||
if not args:
|
||||
raise CommandExecutionError(
|
||||
"You need to specify at least one file or directory to work with!"
|
||||
)
|
||||
|
|
|
@ -96,7 +96,7 @@ def _parse_options(entry, options, include_unset=True):
|
|||
"""
|
||||
log_cfg = {}
|
||||
options = shlex.split(options)
|
||||
if len(options) == 0:
|
||||
if not options:
|
||||
return None
|
||||
|
||||
## identifier is entry or log?
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue