diff --git a/.pylintrc b/.pylintrc index 52fa3c7a7ed..071e0339b1c 100644 --- a/.pylintrc +++ b/.pylintrc @@ -74,7 +74,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes diff --git a/.testing.pylintrc b/.testing.pylintrc index f2af4adaaa8..9305180f432 100644 --- a/.testing.pylintrc +++ b/.testing.pylintrc @@ -71,7 +71,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes diff --git a/doc/Makefile b/doc/Makefile index beaeb67bc63..39af8918fb9 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -9,11 +9,6 @@ BUILDDIR = _build SPHINXLANG = XELATEX = xelatex -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - # ----- Translations Support ------------------------------------------------> # If language is set, also set translation options ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0) @@ -36,7 +31,7 @@ ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations +.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations help: @echo "Please use \`make ' where is one of" @@ -69,38 +64,42 @@ clean: rm -rf $(BUILDDIR)/* test -d 'locale' && find locale/ -name *.mo -exec rm {} \; || true -html: translations +# User-friendly check for sphinx-build +check_sphinx-build: + @which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)" >&2; false) + +html: check_sphinx-build translations $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." -dirhtml: translations +dirhtml: check_sphinx-build translations $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." -singlehtml: translations +singlehtml: check_sphinx-build translations $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." -pickle: translations +pickle: check_sphinx-build translations $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." -json: translations +json: check_sphinx-build translations $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." -htmlhelp: translations +htmlhelp: check_sphinx-build translations $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." -qthelp: translations +qthelp: check_sphinx-build translations $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ @@ -109,7 +108,7 @@ qthelp: translations @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc" -devhelp: translations +devhelp: check_sphinx-build translations $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @@ -118,31 +117,31 @@ devhelp: translations @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt" @echo "# devhelp" -epub: translations +epub: check_sphinx-build translations $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." -latex: translations +latex: check_sphinx-build translations $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." -latexpdf: translations +latexpdf: check_sphinx-build translations $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." -latexpdfja: translations +latexpdfja: check_sphinx-build translations $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." -pdf: translations +pdf: check_sphinx-build translations @if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \ echo "The '$(XELATEX)' command was not found."; \ fi @@ -157,62 +156,62 @@ cheatsheet: translations cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf @echo "./salt-cheatsheet.pdf created." -text: translations +text: check_sphinx-build translations $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." -man: translations +man: check_sphinx-build translations $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." -texinfo: translations +texinfo: check_sphinx-build translations $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." -info: translations +info: check_sphinx-build translations $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." -gettext: +gettext: check_sphinx-build $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale" -changes: translations +changes: check_sphinx-build translations $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." -spelling: +spelling: check_sphinx-build $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Spell check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/spelling/output.txt." -linkcheck: +linkcheck: check_sphinx-build $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." -doctest: +doctest: check_sphinx-build $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." -xml: translations +xml: check_sphinx-build translations $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." -pseudoxml: translations +pseudoxml: check_sphinx-build translations $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/conf.py b/doc/conf.py index 488bc688b3b..90b9317193f 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -26,7 +26,7 @@ class Mock(object): ''' def __init__(self, mapping=None, *args, **kwargs): """ - Mapping allows to bypass the Mock object, but actually assign + Mapping allows autodoc to bypass the Mock object, but actually assign a specific value, expected by a specific attribute returned. """ self.__mapping = mapping or {} diff --git a/doc/faq.rst b/doc/faq.rst index 00571047869..a81cd45dfdf 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -260,9 +260,9 @@ service. But restarting the service while in the middle of a state run interrupts the process of the Minion running states and sending results back to the Master. A common way to workaround that is to schedule restarting of the Minion service using :ref:`masterless mode ` after all -other states have been applied. This allows to keep Minion to Master connection -alive for the Minion to report the final results to the Master, while the -service is restarting in the background. +other states have been applied. This allows the minion to keep Minion to Master +connection alive for the Minion to report the final results to the Master, while +the service is restarting in the background. Upgrade without automatic restart ********************************* diff --git a/doc/topics/releases/2014.7.6.rst b/doc/topics/releases/2014.7.6.rst index 412c1da4cb4..0b0c091a25b 100644 --- a/doc/topics/releases/2014.7.6.rst +++ b/doc/topics/releases/2014.7.6.rst @@ -760,7 +760,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt - **PR** `#22925`_: (*rallytime*) Backport `#22895`_ to 2014.7 | refs: `#23113`_ - - **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a succesfull login + - **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a successful login | refs: `#22925`_ * dfe2066 Merge pull request `#23113`_ from saltstack/revert-22925-`bp-22895`_ * b957ea8 Revert "Backport `#22895`_ to 2014.7" @@ -921,7 +921,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt | refs: `#23113`_ @ *2015-04-22T02:30:26Z* - - **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a succesfull login + - **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a successful login | refs: `#22925`_ * 6890752 Merge pull request `#22925`_ from rallytime/`bp-22895`_ * 3852d96 Pylint fix @@ -930,7 +930,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt * 5ebf159 Cleaned up pull request - * a08ac47 pam_tally counter was not reset to 0 after a succesfull login + * a08ac47 pam_tally counter was not reset to 0 after a successful login - **PR** `#22914`_: (*cachedout*) Call proper returner function in jobs.list_jobs @ *2015-04-22T00:49:01Z* diff --git a/doc/topics/releases/2015.8.4.rst b/doc/topics/releases/2015.8.4.rst index 56d84195c05..2cd072ad783 100644 --- a/doc/topics/releases/2015.8.4.rst +++ b/doc/topics/releases/2015.8.4.rst @@ -403,7 +403,7 @@ Changes: - **PR** `#29708`_: (*lagesag*) Fix test=True for file.directory with recurse ignore_files/ignore_dirs. -- **PR** `#29642`_: (*cachedout*) Correctly restart deamonized minions on failure +- **PR** `#29642`_: (*cachedout*) Correctly restart daemonized minions on failure - **PR** `#29599`_: (*cachedout*) Clean up minion shutdown diff --git a/doc/topics/releases/2016.11.0.rst b/doc/topics/releases/2016.11.0.rst index 335f80688bc..05bc8fabf59 100644 --- a/doc/topics/releases/2016.11.0.rst +++ b/doc/topics/releases/2016.11.0.rst @@ -264,7 +264,7 @@ Runner Changes ``salt://_utils/``) are now able to be synced to the master, making it easier to use them in custom runners. A :py:mod:`saltutil.sync_utils ` function has been added to the - :py:mod:`saltutil runner ` to faciliate the syncing of + :py:mod:`saltutil runner ` to facilitate the syncing of utility modules to the master. Pillar Changes @@ -291,7 +291,7 @@ Junos Module Changes Network Automation: NAPALM ========================== -Beginning with 2016.11.0, network automation is inclued by default in the core +Beginning with 2016.11.0, network automation is included by default in the core of Salt. It is based on a the `NAPALM `_ library and provides facilities to manage the configuration and retrieve data from network devices running widely used operating systems such: JunOS, IOS-XR, @@ -720,7 +720,7 @@ Runner Module Deprecations - The ``fileserver`` runner no longer accepts the ``outputter`` keyword argument. Users will need to specify an outputter using the ``--out`` option. -- The ``jobs`` runner no longer accepts the ``ouputter`` keyword argument. Users will need to +- The ``jobs`` runner no longer accepts the ``outputter`` keyword argument. Users will need to specify an outputter using the ``--out`` option. - ``virt`` runner module: diff --git a/doc/topics/releases/2016.11.1.rst b/doc/topics/releases/2016.11.1.rst index 8011108cdab..479b54985ba 100644 --- a/doc/topics/releases/2016.11.1.rst +++ b/doc/topics/releases/2016.11.1.rst @@ -873,7 +873,7 @@ Changes: - **PR** `#37827`_: (*silenius*) add missing chloginclass - **PR** `#37826`_: (*rallytime*) Update branch refs to more relevant branch - **PR** `#37823`_: (*rallytime*) Add "names" option to file state docs: point users to highstate doc examples - - **PR** `#37822`_: (*laleocen*) add documenation for multiline encryption using nacl + - **PR** `#37822`_: (*laleocen*) add documentation for multiline encryption using nacl | refs: `#37826`_ - **PR** `#37821`_: (*rallytime*) Clarify keystone.user_present password state docs with default behavior - **PR** `#37820`_: (*rallytime*) Add some dependency documentation to libvirt docs diff --git a/doc/topics/releases/2016.11.2.rst b/doc/topics/releases/2016.11.2.rst index a3725478586..aff3a91a8be 100644 --- a/doc/topics/releases/2016.11.2.rst +++ b/doc/topics/releases/2016.11.2.rst @@ -189,9 +189,9 @@ Changes: * fd2ee7d Add some simple unit tests for salt.config.api_config function - * 3d2fefc Make sure the pidfile and log_file values are overriden by api opts + * 3d2fefc Make sure the pidfile and log_file values are overridden by api opts - * 1f6b540 Make sure the pidfile and log_file values are overriden by api opts + * 1f6b540 Make sure the pidfile and log_file values are overridden by api opts * 04d307f salt-api no longer forces the default timeout @@ -844,7 +844,7 @@ Changes: * 68d5475 Fixing Snapper unit tests for SUBVOLUME support - * e9919a9 Removing posible double '/' from the file paths + * e9919a9 Removing possible double '/' from the file paths * 8b4f87f Updating and fixing the documentation diff --git a/doc/topics/releases/2016.11.3.rst b/doc/topics/releases/2016.11.3.rst index 2adeda6ac99..ee25da393fd 100644 --- a/doc/topics/releases/2016.11.3.rst +++ b/doc/topics/releases/2016.11.3.rst @@ -367,7 +367,7 @@ Changes: * 5244041 Merge pull request `#39221`_ from lvg01/fix-bug-39220 - * e8a41d6 Removes to early content stripping (stripping is allready done when needed with ident:true), fixes `#39220`_ + * e8a41d6 Removes to early content stripping (stripping is already done when needed with ident:true), fixes `#39220`_ * a4b169e Fixed wrong logic, fixes `#39220`_ @@ -482,7 +482,7 @@ Changes: - **PR** `#39276`_: (*gtmanfred*) _device_mismatch_ignored will never be True @ *2017-02-09T17:05:28Z* - - **ISSUE** `#39269`_: (*alexharrington*) Remount forced with lizardfs fuse filesystem due to device missmatch + - **ISSUE** `#39269`_: (*alexharrington*) Remount forced with lizardfs fuse filesystem due to device mismatch | refs: `#39276`_ - **ISSUE** `#39106`_: (*carsten-AEI*) CVMFS fuse mount gets remounted every time | refs: `#39276`_ @@ -688,7 +688,7 @@ Changes: - **ISSUE** `#1`_: (*thatch45*) Enable regex on the salt cli - **PR** `#39146`_: (*gtmanfred*) update vmware getting started doc - **PR** `#39145`_: (*garethgreenaway*) [2016.3] Fix when targeting via pillar with Salt syndic - - **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and inteface for master, closes `#39118`_ + - **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and interface for master, closes `#39118`_ - **PR** `#39116`_: (*terminalmage*) Don't abort pillar.get with merge=True if default is None - **PR** `#39077`_: (*terminalmage*) Apply fix from `#38705`_ to 2016.3 branch - **PR** `#38804`_: (*alexbleotu*) Second attempt to fix prepending of root_dir to paths @@ -717,7 +717,7 @@ Changes: * 97521b3 Second attempt to fix prepending of root_dir to paths - * 6ffeda3 Clarify ipv6 option for minion and inteface for master, closes `#39118`_ (`#39131`_) + * 6ffeda3 Clarify ipv6 option for minion and interface for master, closes `#39118`_ (`#39131`_) * 646b9ea Don't abort pillar.get with merge=True if default is None (`#39116`_) @@ -978,7 +978,7 @@ Changes: - **PR** `#39039`_: (*rallytime*) Update 2016.11.2 release notes -* a7fc02e Ungate the status.py module and raise unsupported errors in functions not executeable on Windows. (`#39005`_) +* a7fc02e Ungate the status.py module and raise unsupported errors in functions not executable on Windows. (`#39005`_) - **PR** `#39005`_: (*cro*) Ungate the status.py module and raise unsupported errors in functions not executable on Windows. | refs: `#39536`_ @@ -1214,7 +1214,7 @@ Changes: | refs: `#38875`_ - **PR** `#38890`_: (*cro*) Backport `#38887`_ to 2016.3: Enable resetting a VM via salt-cloud & VMware driver - **PR** `#38883`_: (*techhat*) Don't require text_out path to exist - - **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistant + - **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistent - **PR** `#38867`_: (*mchugh19*) Touch deploy.sh before use | refs: `#38883`_ - **PR** `#38851`_: (*terminalmage*) Support docker-py 2.0 in dockerng @@ -1237,7 +1237,7 @@ Changes: * fbc4d2a reactor: ensure glob_ref is a string - * 2e443d7 cp.cache_file: add note re: return for nonexistant salt:// path + * 2e443d7 cp.cache_file: add note re: return for nonexistent salt:// path * e9ebec4 Merge pull request `#38890`_ from cro/vmware_reset_vm_20163 diff --git a/doc/topics/releases/2016.11.4.rst b/doc/topics/releases/2016.11.4.rst index 6838f682991..6a3411994f9 100644 --- a/doc/topics/releases/2016.11.4.rst +++ b/doc/topics/releases/2016.11.4.rst @@ -832,7 +832,7 @@ Changes: * 2febd05 Merge pull request `#40372`_ from zer0def/pip-cache-fixes * d68067f Merge remote-tracking branch 'main/2016.11' into pip-cache-fixes - * 4f23a23 Fixed the `test_install_download_cache_argument_in_resulting_command` to accomodate introduced cache directory argument fixes and renamed it to `test_install_download_cache_dir_arguments_in_resulting_command`. + * 4f23a23 Fixed the `test_install_download_cache_argument_in_resulting_command` to accommodate introduced cache directory argument fixes and renamed it to `test_install_download_cache_dir_arguments_in_resulting_command`. * 9d0f94e Fixed unnecessary API changes introduced with suggested changes. @@ -1345,7 +1345,7 @@ Changes: @ *2017-03-17T15:17:08Z* - **PR** `#40090`_: (*rallytime*) Back-port `#40056`_ to 2016.3 - - **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistant user + - **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistent user - **PR** `#40057`_: (*cachedout*) More mentionbot blacklists - **PR** `#40056`_: (*thatch45*) update mention bot blacklist | refs: `#40090`_ @@ -1354,7 +1354,7 @@ Changes: * 116201f Merge pull request `#40059`_ from terminalmage/fix-virtualenv-traceback - * e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistant user + * e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistent user * a01b52b Merge pull request `#40090`_ from rallytime/`bp-40056`_ @@ -1386,7 +1386,7 @@ Changes: - **PR** `#40053`_: (*gtmanfred*) Update rh_ip.py - **PR** `#40041`_: (*terminalmage*) Fix transposed lines in salt.utils.process - **PR** `#40038`_: (*velom*) correctly parse "pkg_name===version" from pip freeze - - **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call + - **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call | refs: `#40072`_ * b12720a Merge pull request `#40088`_ from rallytime/merge-2016.11 * 626bd03 Merge branch '2016.3' into '2016.11' @@ -1397,9 +1397,9 @@ Changes: * 8dcffc7 Merge pull request `#40018`_ from meaksh/2016.3-handling-timeouts-for-manage.up-runner - * 9f5c3b7 Allows to set custom timeouts for 'manage.up' and 'manage.status' + * 9f5c3b7 Allow setting custom timeouts for 'manage.up' and 'manage.status' - * 2102d9c Allows to set 'timeout' and 'gather_job_timeout' via kwargs + * 2102d9c Allow setting 'timeout' and 'gather_job_timeout' via kwargs * 22fc529 Merge pull request `#40038`_ from velom/fix-pip-freeze-parsing @@ -1419,15 +1419,15 @@ Changes: * 703ab23 Merge pull request `#40055`_ from rallytime/doc-build-warnings * 72d16c9 Update "yaml" code-block references with "jinja" where needed -- **PR** `#40072`_: (*meaksh*) [2016.11] Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call +- **PR** `#40072`_: (*meaksh*) [2016.11] Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call @ *2017-03-16T15:31:46Z* - - **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call + - **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call | refs: `#40072`_ * e73a1d0 Merge pull request `#40072`_ from meaksh/2016.11-handling-timeouts-for-manage.up-runner - * 40246d3 Allows to set custom timeouts for 'manage.up' and 'manage.status' + * 40246d3 Allow setting custom timeouts for 'manage.up' and 'manage.status' - * ad232fd Allows to set 'timeout' and 'gather_job_timeout' via kwargs + * ad232fd Allow setting 'timeout' and 'gather_job_timeout' via kwargs - **PR** `#40045`_: (*terminalmage*) Fix error when chhome is invoked by user.present state in Windows @ *2017-03-15T19:00:41Z* @@ -1458,7 +1458,7 @@ Changes: - **PR** `#40016`_: (*terminalmage*) Attempt to fix failing grains tests in 2016.3 - **PR** `#39994`_: (*rallytime*) Add a versionadded tag for dockerng ulimits addition - **PR** `#39988`_: (*terminalmage*) Add comment explaining change from `#39973`_ - - **PR** `#39980`_: (*vutny*) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function + - **PR** `#39980`_: (*vutny*) [2016.3] Allow using `bg` kwarg for `cmd.run` state function - **PR** `#39973`_: (*terminalmage*) Don't use docker.Client instance from context if missing attributes * 277bd17 Merge pull request `#40025`_ from rallytime/merge-2016.11 * 029f28b Merge branch '2016.3' into '2016.11' @@ -1475,7 +1475,7 @@ Changes: * 0c61d06 Merge pull request `#39980`_ from vutny/cmd-run-state-bg - * a81dc9d [2016.3] Allow to use `bg` kwarg for `cmd.run` state function + * a81dc9d [2016.3] Allow using `bg` kwarg for `cmd.run` state function * b042484 Merge pull request `#39994`_ from rallytime/ulimits-dockerng-version @@ -1834,7 +1834,7 @@ Changes: * 9f70ad7 Merge pull request `#39472`_ from whiteinge/_reformat_low-update * d11f538 Add RunnerClient test for old/new-style arg/kwarg parsing - * ec377ab Reenable skipped RunnerClient tests + * ec377ab Re-enable skipped RunnerClient tests * 27f7fd9 Update _reformat_low to run arg through parse_input @@ -2022,13 +2022,13 @@ Changes: * e63cbba Merge pull request `#39653`_ from cachedout/26_odict * 91eb721 Use salt's ordereddict for comparison -- **PR** `#39609`_: (*gtmanfred*) intialize the Client stuff in FSClient +- **PR** `#39609`_: (*gtmanfred*) initialize the Client stuff in FSClient @ *2017-02-24T18:50:55Z* - **ISSUE** `#38836`_: (*toanctruong*) file.managed with S3 Source errors out with obscure message | refs: `#39589`_ `#39609`_ * 0bc6027 Merge pull request `#39609`_ from gtmanfred/2016.11 - * 0820620 intialize the Client stuff in FSClient + * 0820620 initialize the Client stuff in FSClient - **PR** `#39615`_: (*skizunov*) Bonjour/Avahi beacons: Make sure TXT record length is valid @ *2017-02-24T18:47:05Z* diff --git a/doc/topics/releases/2016.3.0.rst b/doc/topics/releases/2016.3.0.rst index 4d9e9cbe731..d7ee082aea6 100644 --- a/doc/topics/releases/2016.3.0.rst +++ b/doc/topics/releases/2016.3.0.rst @@ -30,7 +30,7 @@ Backwards-incompatible Changes It has been moved one directory down, into the master cachedir. On most platforms, this is ``/var/cache/salt/master/extmods``. Most users won't have to worry about this, but those who have been manually placing custom runners - into ``/var/cache/salt/extmods/runners``, or ouputters into + into ``/var/cache/salt/extmods/runners``, or outputters into ``/var/cache/salt/extmods/output``, etc. will be affected by this. To transition, it is recommended not to simply move the extmods directory into ``/var/cache/salt/master``, but to copy the custom modules into the salt diff --git a/doc/topics/releases/2016.3.5.rst b/doc/topics/releases/2016.3.5.rst index 236ef0bee86..d77a3a65e1f 100644 --- a/doc/topics/releases/2016.3.5.rst +++ b/doc/topics/releases/2016.3.5.rst @@ -149,9 +149,9 @@ Changes: * fd2ee7d Add some simple unit tests for salt.config.api_config function - * 3d2fefc Make sure the pidfile and log_file values are overriden by api opts + * 3d2fefc Make sure the pidfile and log_file values are overridden by api opts - * 1f6b540 Make sure the pidfile and log_file values are overriden by api opts + * 1f6b540 Make sure the pidfile and log_file values are overridden by api opts * 04d307f salt-api no longer forces the default timeout @@ -1046,7 +1046,7 @@ Changes: * 0e74bad Update branch refs to more relevant branch (`#37826`_) - **PR** `#37826`_: (*rallytime*) Update branch refs to more relevant branch - - **PR** `#37822`_: (*laleocen*) add documenation for multiline encryption using nacl + - **PR** `#37822`_: (*laleocen*) add documentation for multiline encryption using nacl | refs: `#37826`_ * 6a9b49c Add "names" option to file state docs: point users to highstate doc examples (`#37823`_) diff --git a/doc/topics/releases/2016.3.6.rst b/doc/topics/releases/2016.3.6.rst index 838f0375d77..5e74300126b 100644 --- a/doc/topics/releases/2016.3.6.rst +++ b/doc/topics/releases/2016.3.6.rst @@ -127,11 +127,11 @@ Changes: * 35ddb79 Merge pull request `#40141`_ from bobrik/fallback-resolve * af1545d Use the first address if cannot connect to any -- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistant user +- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistent user @ *2017-03-16T20:46:43Z* * 116201f Merge pull request `#40059`_ from terminalmage/fix-virtualenv-traceback - * e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistant user + * e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistent user - **PR** `#40090`_: (*rallytime*) Back-port `#40056`_ to 2016.3 @ *2017-03-16T19:42:58Z* @@ -153,13 +153,13 @@ Changes: * d36bdb1 Merge pull request `#40070`_ from Ch3LL/2016.3.6_release * a1f8b49 update 2016.3.6 release notes with additional PR's -- **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call +- **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call @ *2017-03-15T19:43:01Z* * 8dcffc7 Merge pull request `#40018`_ from meaksh/2016.3-handling-timeouts-for-manage.up-runner - * 9f5c3b7 Allows to set custom timeouts for 'manage.up' and 'manage.status' + * 9f5c3b7 Allow setting custom timeouts for 'manage.up' and 'manage.status' - * 2102d9c Allows to set 'timeout' and 'gather_job_timeout' via kwargs + * 2102d9c Allow setting 'timeout' and 'gather_job_timeout' via kwargs - **PR** `#40038`_: (*velom*) correctly parse "pkg_name===version" from pip freeze @ *2017-03-15T19:30:03Z* @@ -197,11 +197,11 @@ Changes: * 5d84b40 Attempt to fix failing grains tests in 2016.3 -- **PR** `#39980`_: (*vutny*) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function +- **PR** `#39980`_: (*vutny*) [2016.3] Allow using `bg` kwarg for `cmd.run` state function @ *2017-03-14T17:16:14Z* * 0c61d06 Merge pull request `#39980`_ from vutny/cmd-run-state-bg - * a81dc9d [2016.3] Allow to use `bg` kwarg for `cmd.run` state function + * a81dc9d [2016.3] Allow using `bg` kwarg for `cmd.run` state function - **PR** `#39994`_: (*rallytime*) Add a versionadded tag for dockerng ulimits addition @ *2017-03-13T20:58:02Z* @@ -658,7 +658,7 @@ Changes: - **ISSUE** `#39220`_: (*lvg01*) state file.line skips leading spaces in content with mode:ensure and indent:False | refs: `#39221`_ `#39221`_ `#39221`_ `#39221`_ * 5244041 Merge pull request `#39221`_ from lvg01/fix-bug-39220 - * e8a41d6 Removes to early content stripping (stripping is allready done when needed with ident:true), fixes `#39220`_ + * e8a41d6 Removes to early content stripping (stripping is already done when needed with ident:true), fixes `#39220`_ * a4b169e Fixed wrong logic, fixes `#39220`_ @@ -807,11 +807,11 @@ Changes: * 97521b3 Second attempt to fix prepending of root_dir to paths -* 6ffeda3 Clarify ipv6 option for minion and inteface for master, closes `#39118`_ (`#39131`_) +* 6ffeda3 Clarify ipv6 option for minion and interface for master, closes `#39118`_ (`#39131`_) - **ISSUE** `#39118`_: (*bobrik*) Minion ipv6 option is not documented | refs: `#39289`_ - - **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and inteface for master, closes `#39118`_ + - **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and interface for master, closes `#39118`_ * 646b9ea Don't abort pillar.get with merge=True if default is None (`#39116`_) @@ -1013,7 +1013,7 @@ Changes: * e40fac5 Catch MinionError in file.source_list -- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistant +- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistent @ *2017-01-24T15:23:39Z* - **ISSUE** `#36121`_: (*Ashald*) TemplateNotFound/Unable to cache file @@ -1021,7 +1021,7 @@ Changes: * b5df104 Merge pull request `#38875`_ from terminalmage/issue36121 * fbc4d2a reactor: ensure glob_ref is a string - * 2e443d7 cp.cache_file: add note re: return for nonexistant salt:// path + * 2e443d7 cp.cache_file: add note re: return for nonexistent salt:// path - **PR** `#38890`_: (*cro*) Backport `#38887`_ to 2016.3: Enable resetting a VM via salt-cloud & VMware driver @ *2017-01-24T15:15:35Z* diff --git a/doc/topics/sdb/index.rst b/doc/topics/sdb/index.rst index 23a0e90f9e9..c4d94120e2c 100644 --- a/doc/topics/sdb/index.rst +++ b/doc/topics/sdb/index.rst @@ -98,7 +98,7 @@ a new value using a command like: Deleting values (if supported by the driver) is done pretty much the same way as getting them. Provided that you have a profile called ``mykvstore`` that uses -a driver allowing to delete values you would delete a value as shown bellow: +a driver allowing to delete values you would delete a value as shown below: .. code-block:: bash diff --git a/doc/topics/transports/tcp.rst b/doc/topics/transports/tcp.rst index d12bde129e4..b4bf3e6f80e 100644 --- a/doc/topics/transports/tcp.rst +++ b/doc/topics/transports/tcp.rst @@ -66,7 +66,7 @@ Specific options can be sent to the minion also, as defined in the Python .. note:: - While setting the ssl_version is not required, we recomend it. Some older + While setting the ssl_version is not required, we recommend it. Some older versions of python do not support the latest TLS protocol and if this is the case for your version of python we strongly recommend upgrading your version of Python. diff --git a/pkg/salt-api.service b/pkg/salt-api.service index c3e67d510ce..7ca582dfb4e 100644 --- a/pkg/salt-api.service +++ b/pkg/salt-api.service @@ -1,5 +1,6 @@ [Unit] Description=The Salt API +Documentation=man:salt-api(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html After=network.target [Service] diff --git a/pkg/salt-master.service b/pkg/salt-master.service index 1f4650f8728..4996b969560 100644 --- a/pkg/salt-master.service +++ b/pkg/salt-master.service @@ -1,5 +1,6 @@ [Unit] Description=The Salt Master Server +Documentation=man:salt-master(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html After=network.target [Service] diff --git a/pkg/salt-minion.service b/pkg/salt-minion.service index d007b9f626f..dc53d4e4762 100644 --- a/pkg/salt-minion.service +++ b/pkg/salt-minion.service @@ -1,5 +1,6 @@ [Unit] Description=The Salt Minion +Documentation=man:salt-minion(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html After=network.target salt-master.service [Service] diff --git a/pkg/salt-proxy@.service b/pkg/salt-proxy@.service index f97120a0558..e0633a408d6 100644 --- a/pkg/salt-proxy@.service +++ b/pkg/salt-proxy@.service @@ -1,5 +1,6 @@ [Unit] Description=salt-proxy service +Documentation=man:salt-proxy(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html After=network.target [Service] diff --git a/pkg/salt-syndic.service b/pkg/salt-syndic.service index 5068394635f..9c9a1e09404 100644 --- a/pkg/salt-syndic.service +++ b/pkg/salt-syndic.service @@ -1,5 +1,6 @@ [Unit] Description=The Salt Master Server +Documentation=man:salt-syndic(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html After=network.target [Service] diff --git a/pkg/suse/salt.changes b/pkg/suse/salt.changes index 874c78d5b4d..7e6ac7ee29d 100644 --- a/pkg/suse/salt.changes +++ b/pkg/suse/salt.changes @@ -130,7 +130,7 @@ Wed Oct 7 19:15:54 UTC 2015 - mrueckert@suse.de 2. the only part of the package which would really benefit from it would be the doc package. but given we only install the files via %doc, we can not use it for that either. -- reenable completions on distros newer than sle11 +- re-enable completions on distros newer than sle11 - do not use _datarootdir, use _datadir instead. ------------------------------------------------------------------- diff --git a/salt/beacons/avahi_announce.py b/salt/beacons/avahi_announce.py index 6039c7069dd..ee4d9a7bb75 100644 --- a/salt/beacons/avahi_announce.py +++ b/salt/beacons/avahi_announce.py @@ -91,7 +91,7 @@ def beacon(config): ''' Broadcast values via zeroconf - If the announced values are static, it is adviced to set run_once: True + If the announced values are static, it is advised to set run_once: True (do not poll) on the beacon configuration. The following are required configuration settings: diff --git a/salt/beacons/bonjour_announce.py b/salt/beacons/bonjour_announce.py index 2c9eb695585..fce5211464f 100644 --- a/salt/beacons/bonjour_announce.py +++ b/salt/beacons/bonjour_announce.py @@ -84,7 +84,7 @@ def beacon(config): ''' Broadcast values via zeroconf - If the announced values are static, it is adviced to set run_once: True + If the announced values are static, it is advised to set run_once: True (do not poll) on the beacon configuration. The following are required configuration settings: diff --git a/salt/client/mixins.py b/salt/client/mixins.py index 7e815ce8b18..5aa7dba5ad6 100644 --- a/salt/client/mixins.py +++ b/salt/client/mixins.py @@ -538,7 +538,7 @@ class AsyncClientMixin(object): # if this is a ret, we have our own set of rules if suffix == 'ret': - # Check if ouputter was passed in the return data. If this is the case, + # Check if outputter was passed in the return data. If this is the case, # then the return data will be a dict two keys: 'data' and 'outputter' if isinstance(event.get('return'), dict) \ and set(event['return']) == set(('data', 'outputter')): diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py index cec92968211..3138bec4a0d 100644 --- a/salt/cloud/clouds/ec2.py +++ b/salt/cloud/clouds/ec2.py @@ -4342,7 +4342,7 @@ def delete_keypair(kwargs=None, call=None): return False params = {'Action': 'DeleteKeyPair', - 'KeyName.1': kwargs['keyname']} + 'KeyName': kwargs['keyname']} data = aws.query(params, return_url=True, diff --git a/salt/cloud/clouds/vmware.py b/salt/cloud/clouds/vmware.py index 921f48707be..aceafd82ba3 100644 --- a/salt/cloud/clouds/vmware.py +++ b/salt/cloud/clouds/vmware.py @@ -2657,7 +2657,7 @@ def create(vm_): log.info("Creating {0} from {1}({2})".format(vm_['name'], clone_type, vm_['clonefrom'])) if datastore and not datastore_ref and datastore_cluster_ref: - # datastore cluster has been specified so apply Storage DRS recomendations + # datastore cluster has been specified so apply Storage DRS recommendations pod_spec = vim.storageDrs.PodSelectionSpec(storagePod=datastore_cluster_ref) storage_spec = vim.storageDrs.StoragePlacementSpec( diff --git a/salt/cloud/deploy/bootstrap-salt.sh b/salt/cloud/deploy/bootstrap-salt.sh index cc1c4ffb8ef..ededd585088 100755 --- a/salt/cloud/deploy/bootstrap-salt.sh +++ b/salt/cloud/deploy/bootstrap-salt.sh @@ -18,7 +18,7 @@ #====================================================================================================================== set -o nounset # Treat unset variables as an error -__ScriptVersion="2017.01.10" +__ScriptVersion="2017.05.24" __ScriptName="bootstrap-salt.sh" __ScriptFullName="$0" @@ -239,6 +239,9 @@ _CUSTOM_REPO_URL="null" _CUSTOM_MASTER_CONFIG="null" _CUSTOM_MINION_CONFIG="null" _QUIET_GIT_INSTALLATION=$BS_FALSE +_REPO_URL="repo.saltstack.com" +_PY_EXE="" +_INSTALL_PY="$BS_FALSE" #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __usage @@ -342,7 +345,7 @@ __usage() { points to a repository that mirrors Salt packages located at repo.saltstack.com. The option passed with -R replaces the "repo.saltstack.com". If -R is passed, -r is also set. Currently only - works on CentOS/RHEL based distributions. + works on CentOS/RHEL and Debian based distributions. -J Replace the Master config file with data passed in as a JSON string. If a Master config file is found, a reasonable effort will be made to save the file with a ".bak" extension. If used in conjunction with -C or -F, @@ -354,12 +357,22 @@ __usage() { no ".bak" file will be created as either of those options will force a complete overwrite of the file. -q Quiet salt installation from git (setup.py install -q) + -x Changes the python version used to install a git version of salt. Currently + this is considered experimental and has only been tested on Centos 6. This + only works for git installations. + -y Installs a different python version on host. Currently this has only been + tested with Centos 6 and is considered experimental. This will install the + ius repo on the box if disable repo is false. This must be used in conjunction + with -x . For example: + sh bootstrap.sh -P -y -x python2.7 git v2016.11.3 + The above will install python27 and install the git version of salt using the + python2.7 executable. This only works for git and pip installations. EOT } # ---------- end of function __usage ---------- -while getopts ':hvnDc:g:Gwk:s:MSNXCPFUKIA:i:Lp:dH:ZbflV:J:j:rR:aq' opt +while getopts ':hvnDc:g:Gyx:wk:s:MSNXCPFUKIA:i:Lp:dH:ZbflV:J:j:rR:aq' opt do case "${opt}" in @@ -424,6 +437,8 @@ do J ) _CUSTOM_MASTER_CONFIG=$OPTARG ;; j ) _CUSTOM_MINION_CONFIG=$OPTARG ;; q ) _QUIET_GIT_INSTALLATION=$BS_TRUE ;; + x ) _PY_EXE="$OPTARG" ;; + y ) _INSTALL_PY="$BS_TRUE" ;; \?) echo echoerror "Option does not exist : $OPTARG" @@ -524,14 +539,14 @@ elif [ "$ITYPE" = "stable" ]; then else __check_unparsed_options "$*" - if [ "$(echo "$1" | egrep '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3)$')" != "" ]; then + if [ "$(echo "$1" | egrep '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11)$')" != "" ]; then STABLE_REV="$1" shift elif [ "$(echo "$1" | egrep '^([0-9]*\.[0-9]*\.[0-9]*)$')" != "" ]; then STABLE_REV="archive/$1" shift else - echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, latest, \$MAJOR.\$MINOR.\$PATCH)" + echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, latest, \$MAJOR.\$MINOR.\$PATCH)" exit 1 fi fi @@ -549,10 +564,15 @@ if [ "$ITYPE" != "git" ]; then fi fi -# Check for -r if -R is being passed. Set -r with a warning. -if [ "$_CUSTOM_REPO_URL" != "null" ] && [ "$_DISABLE_REPOS" -eq $BS_FALSE ]; then - echowarn "Detected -R option. No other repositories will be configured when -R is used. Setting -r option to True." - _DISABLE_REPOS=$BS_TRUE +# Set the _REPO_URL value based on if -R was passed or not. Defaults to repo.saltstack.com. +if [ "$_CUSTOM_REPO_URL" != "null" ]; then + _REPO_URL="$_CUSTOM_REPO_URL" + + # Check for -r since -R is being passed. Set -r with a warning. + if [ "$_DISABLE_REPOS" -eq $BS_FALSE ]; then + echowarn "Detected -R option. No other repositories will be configured when -R is used. Setting -r option to True." + _DISABLE_REPOS=$BS_TRUE + fi fi # Check for any unparsed arguments. Should be an error. @@ -876,13 +896,13 @@ __strip_duplicates() { # enough. #---------------------------------------------------------------------------------------------------------------------- __sort_release_files() { - KNOWN_RELEASE_FILES=$(echo "(arch|centos|debian|ubuntu|fedora|redhat|suse|\ - mandrake|mandriva|gentoo|slackware|turbolinux|unitedlinux|lsb|system|\ + KNOWN_RELEASE_FILES=$(echo "(arch|alpine|centos|debian|ubuntu|fedora|redhat|suse|\ + mandrake|mandriva|gentoo|slackware|turbolinux|unitedlinux|void|lsb|system|\ oracle|os)(-|_)(release|version)" | sed -r 's:[[:space:]]::g') primary_release_files="" secondary_release_files="" # Sort know VS un-known files first - for release_file in $(echo "${@}" | sed -r 's:[[:space:]]:\n:g' | sort --unique --ignore-case); do + for release_file in $(echo "${@}" | sed -r 's:[[:space:]]:\n:g' | sort -f | uniq); do match=$(echo "$release_file" | egrep -i "${KNOWN_RELEASE_FILES}") if [ "${match}" != "" ]; then primary_release_files="${primary_release_files} ${release_file}" @@ -952,13 +972,11 @@ __gather_linux_system_info() { DISTRO_NAME="Oracle Linux" elif [ "${DISTRO_NAME}" = "AmazonAMI" ]; then DISTRO_NAME="Amazon Linux AMI" + elif [ "${DISTRO_NAME}" = "ManjaroLinux" ]; then + DISTRO_NAME="Arch Linux" elif [ "${DISTRO_NAME}" = "Arch" ]; then DISTRO_NAME="Arch Linux" return - elif [ "${DISTRO_NAME}" = "Raspbian" ]; then - DISTRO_NAME="Debian" - elif [ "${DISTRO_NAME}" = "Cumulus Linux" ]; then - DISTRO_NAME="Debian" fi rv=$(lsb_release -sr) [ "${rv}" != "" ] && DISTRO_VERSION=$(__parse_version_string "$rv") @@ -973,7 +991,6 @@ __gather_linux_system_info() { # We already have the distribution name and version return fi - # shellcheck disable=SC2035,SC2086 for rsource in $(__sort_release_files "$( cd /etc && /bin/ls *[_-]release *[_-]version 2>/dev/null | env -i sort | \ @@ -1006,6 +1023,7 @@ __gather_linux_system_info() { fi ;; arch ) n="Arch Linux" ;; + alpine ) n="Alpine Linux" ;; centos ) n="CentOS" ;; debian ) n="Debian" ;; ubuntu ) n="Ubuntu" ;; @@ -1016,6 +1034,7 @@ __gather_linux_system_info() { slackware ) n="Slackware" ;; turbolinux ) n="TurboLinux" ;; unitedlinux ) n="UnitedLinux" ;; + void ) n="VoidLinux" ;; oracle ) n="Oracle Linux" ;; system ) while read -r line; do @@ -1032,6 +1051,10 @@ __gather_linux_system_info() { rv="$(__unquote_string "$(grep '^VERSION_ID=' /etc/os-release | sed -e 's/^VERSION_ID=\(.*\)$/\1/g')")" [ "${rv}" != "" ] && v=$(__parse_version_string "$rv") || v="" case $(echo "${nn}" | tr '[:upper:]' '[:lower:]') in + alpine ) + n="Alpine Linux" + v="${rv}" + ;; amzn ) # Amazon AMI's after 2014.09 match here n="Amazon Linux AMI" @@ -1065,6 +1088,51 @@ __gather_linux_system_info() { } +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __install_python_and_deps() +# DESCRIPTION: Install a different version of python and its dependencies on a host. Currently this has only been +# tested on Centos 6 and is considered experimental. +#---------------------------------------------------------------------------------------------------------------------- +__install_python_and_deps() { + if [ "$_PY_EXE" = "" ]; then + echoerror "Must specify -x with -y to install a specific python version" + exit 1 + fi + + PY_PKG_V=$(echo "$_PY_EXE" | sed -r "s/\.//g") + __PACKAGES="${PY_PKG_V}" + + + if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then + echoinfo "Attempting to install a repo to help provide a separate python package" + echoinfo "$DISTRO_NAME_L" + case "$DISTRO_NAME_L" in + "red_hat"|"centos") + __PYTHON_REPO_URL="https://centos${DISTRO_MAJOR_VERSION}.iuscommunity.org/ius-release.rpm" + ;; + *) + echoerror "Installing a repo to provide a python package is only supported on Redhat/CentOS. + If a repo is already available please try running script with -r" + exit 1 + ;; + esac + + echoinfo "Installing IUS repo" + __yum_install_noinput "${__PYTHON_REPO_URL}" || return 1 + fi + + echoinfo "Installing ${__PACKAGES}" + __yum_install_noinput "${__PACKAGES}" || return 1 + + _PIP_PACKAGES="tornado PyYAML msgpack-python jinja2 pycrypto zmq" + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then + _PIP_PACKAGES="${_PIP_PACKAGES} apache-libcloud" + fi + + __install_pip_pkgs "${_PIP_PACKAGES}" "${_PY_EXE}" || return 1 +} + + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __gather_sunos_system_info # DESCRIPTION: Discover SunOS system info @@ -1174,6 +1242,7 @@ __gather_system_info() { } + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __get_dpkg_architecture # DESCRIPTION: Determine primary architecture for packages to install on Debian and derivatives @@ -1189,6 +1258,7 @@ __get_dpkg_architecture() { return 0 } + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __ubuntu_derivatives_translation # DESCRIPTION: Map Ubuntu derivatives to their Ubuntu base versions. @@ -1197,7 +1267,7 @@ __get_dpkg_architecture() { #---------------------------------------------------------------------------------------------------------------------- # shellcheck disable=SC2034 __ubuntu_derivatives_translation() { - UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os)" + UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon)" # Mappings trisquel_6_ubuntu_base="12.04" linuxmint_13_ubuntu_base="12.04" @@ -1205,6 +1275,7 @@ __ubuntu_derivatives_translation() { linuxmint_18_ubuntu_base="16.04" linaro_12_ubuntu_base="12.04" elementary_os_02_ubuntu_base="12.04" + neon_16_ubuntu_base="16.04" # Translate Ubuntu derivatives to their base Ubuntu version match=$(echo "$DISTRO_NAME_L" | egrep ${UBUNTU_DERIVATIVES}) @@ -1233,6 +1304,7 @@ __ubuntu_derivatives_translation() { fi } + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __ubuntu_codename_translation # DESCRIPTION: Map Ubuntu major versions to their corresponding codenames @@ -1265,12 +1337,18 @@ __ubuntu_codename_translation() { DISTRO_CODENAME="yakkety" fi ;; + "17") + if [ "$_april" ]; then + DISTRO_CODENAME="zesty" + fi + ;; *) DISTRO_CODENAME="trusty" ;; esac } + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __debian_derivatives_translation # DESCRIPTION: Map Debian derivatives to their Debian base versions. @@ -1279,25 +1357,26 @@ __ubuntu_codename_translation() { #---------------------------------------------------------------------------------------------------------------------- # shellcheck disable=SC2034 __debian_derivatives_translation() { - # If the file does not exist, return [ ! -f /etc/os-release ] && return - DEBIAN_DERIVATIVES="(kali|linuxmint|cumulus-linux)" + DEBIAN_DERIVATIVES="(cumulus_.+|kali|linuxmint|raspbian)" # Mappings - kali_1_debian_base="7.0" - linuxmint_1_debian_base="8.0" cumulus_2_debian_base="7.0" cumulus_3_debian_base="8.0" - - # Detect derivates, Cumulus Linux, Kali and LinuxMint *only* for now - rv=$(grep ^ID= /etc/os-release | sed -e 's/.*=//') + kali_1_debian_base="7.0" + linuxmint_1_debian_base="8.0" + raspbian_8_debian_base="8.0" # Translate Debian derivatives to their base Debian version - match=$(echo "$rv" | egrep ${DEBIAN_DERIVATIVES}) + match=$(echo "$DISTRO_NAME_L" | egrep ${DEBIAN_DERIVATIVES}) if [ "${match}" != "" ]; then case $match in + cumulus_*) + _major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g') + _debian_derivative="cumulus" + ;; kali) _major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g') _debian_derivative="kali" @@ -1306,9 +1385,9 @@ __debian_derivatives_translation() { _major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g') _debian_derivative="linuxmint" ;; - cumulus-linux) + raspbian) _major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g') - _debian_derivative="cumulus" + _debian_derivative="raspbian" ;; esac @@ -1338,6 +1417,7 @@ __check_and_refresh_suse_pkg_repo() { fi } + __gather_system_info echo @@ -1424,7 +1504,7 @@ __ubuntu_codename_translation if ([ "${DISTRO_NAME_L}" != "ubuntu" ] && [ "$ITYPE" = "daily" ]); then echoerror "${DISTRO_NAME} does not have daily packages support" exit 1 -elif ([ "$(echo "${DISTRO_NAME_L}" | egrep '(debian|ubuntu|centos|red_hat|oracle|scientific)')" = "" ] && [ "$ITYPE" = "stable" ] && [ "$STABLE_REV" != "latest" ]); then +elif ([ "$(echo "${DISTRO_NAME_L}" | egrep '(debian|ubuntu|centos|red_hat|oracle|scientific|amazon)')" = "" ] && [ "$ITYPE" = "stable" ] && [ "$STABLE_REV" != "latest" ]); then echoerror "${DISTRO_NAME} does not have major version pegged packages support" exit 1 fi @@ -1452,12 +1532,13 @@ fi # Starting from Ubuntu 16.10, gnupg-curl has been renamed to gnupg1-curl. GNUPG_CURL="gnupg-curl" -if ([ "${DISTRO_NAME_L}" = "ubuntu" ] && [ "${DISTRO_VERSION}" = "16.10" ]); then - GNUPG_CURL="gnupg1-curl" +if [ "${DISTRO_NAME_L}" = "ubuntu" ]; then + if [ "${DISTRO_VERSION}" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then + GNUPG_CURL="gnupg1-curl" + fi fi - #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __function_defined # DESCRIPTION: Checks if a function is defined within this scripts scope @@ -1543,7 +1624,7 @@ __rpm_import_gpg() { __yum_install_noinput() { ENABLE_EPEL_CMD="" - if [ $_DISABLE_REPOS -eq $BS_TRUE ]; then + if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then ENABLE_EPEL_CMD="--enablerepo=${_EPEL_REPO}" fi @@ -1571,6 +1652,15 @@ __git_clone_and_checkout() { export GIT_SSL_NO_VERIFY=1 fi + case ${OS_NAME_L} in + openbsd|freebsd|netbsd ) + __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed -E 's/^(v?[0-9]{1,4}\.[0-9]{1,2})(\.[0-9]{1,2})?.*$/MATCH/') + ;; + * ) + __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed 's/^.*\(v\?[[:digit:]]\{1,4\}\.[[:digit:]]\{1,2\}\)\(\.[[:digit:]]\{1,2\}\)\?.*$/MATCH/') + ;; + esac + __SALT_GIT_CHECKOUT_PARENT_DIR=$(dirname "${_SALT_GIT_CHECKOUT_DIR}" 2>/dev/null) __SALT_GIT_CHECKOUT_PARENT_DIR="${__SALT_GIT_CHECKOUT_PARENT_DIR:-/tmp/git}" __SALT_CHECKOUT_REPONAME="$(basename "${_SALT_GIT_CHECKOUT_DIR}" 2>/dev/null)" @@ -1616,7 +1706,7 @@ __git_clone_and_checkout() { if [ "$_FORCE_SHALLOW_CLONE" -eq "${BS_TRUE}" ]; then echoinfo "Forced shallow cloning of git repository." __SHALLOW_CLONE=$BS_TRUE - elif [ "$(echo "$GIT_REV" | sed 's/^.*\(v\?[[:digit:]]\{1,4\}\.[[:digit:]]\{1,2\}\)\(\.[[:digit:]]\{1,2\}\)\?.*$/MATCH/')" = "MATCH" ]; then + elif [ "$__TAG_REGEX_MATCH" = "MATCH" ]; then echoinfo "Git revision matches a Salt version tag, shallow cloning enabled." __SHALLOW_CLONE=$BS_TRUE else @@ -1750,7 +1840,7 @@ __check_end_of_life_versions() { centos) # CentOS versions lower than 5 are no longer supported - if [ "$DISTRO_MAJOR_VERSION" -lt 5 ]; then + if [ "$DISTRO_MAJOR_VERSION" -lt 6 ]; then echoerror "End of life distributions are not supported." echoerror "Please consider upgrading to the next stable. See:" echoerror " http://wiki.centos.org/Download" @@ -1760,7 +1850,7 @@ __check_end_of_life_versions() { red_hat*linux) # Red Hat (Enterprise) Linux versions lower than 5 are no longer supported - if [ "$DISTRO_MAJOR_VERSION" -lt 5 ]; then + if [ "$DISTRO_MAJOR_VERSION" -lt 6 ]; then echoerror "End of life distributions are not supported." echoerror "Please consider upgrading to the next stable. See:" echoerror " https://access.redhat.com/support/policy/updates/errata/" @@ -1768,6 +1858,36 @@ __check_end_of_life_versions() { fi ;; + oracle*linux) + # Oracle Linux versions lower than 5 are no longer supported + if [ "$DISTRO_MAJOR_VERSION" -lt 6 ]; then + echoerror "End of life distributions are not supported." + echoerror "Please consider upgrading to the next stable. See:" + echoerror " http://www.oracle.com/us/support/library/elsp-lifetime-069338.pdf" + exit 1 + fi + ;; + + scientific*linux) + # Scientific Linux versions lower than 5 are no longer supported + if [ "$DISTRO_MAJOR_VERSION" -lt 6 ]; then + echoerror "End of life distributions are not supported." + echoerror "Please consider upgrading to the next stable. See:" + echoerror " https://www.scientificlinux.org/downloads/sl-versions/" + exit 1 + fi + ;; + + cloud*linux) + # Cloud Linux versions lower than 5 are no longer supported + if [ "$DISTRO_MAJOR_VERSION" -lt 6 ]; then + echoerror "End of life distributions are not supported." + echoerror "Please consider upgrading to the next stable. See:" + echoerror " https://docs.cloudlinux.com/index.html?cloudlinux_life-cycle.html" + exit 1 + fi + ;; + amazon*linux*ami) # Amazon Linux versions lower than 2012.0X no longer supported if [ "$DISTRO_MAJOR_VERSION" -lt 2012 ]; then @@ -2136,6 +2256,32 @@ __check_services_openbsd() { fi } # ---------- end of function __check_services_openbsd ---------- +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __check_services_alpine +# DESCRIPTION: Return 0 or 1 in case the service is enabled or not +# PARAMETERS: servicename +#---------------------------------------------------------------------------------------------------------------------- +__check_services_alpine() { + if [ $# -eq 0 ]; then + echoerror "You need to pass a service name to check!" + exit 1 + elif [ $# -ne 1 ]; then + echoerror "You need to pass a service name to check as the single argument to the function" + fi + + servicename=$1 + echodebug "Checking if service ${servicename} is enabled" + + # shellcheck disable=SC2086,SC2046,SC2144 + if rc-status $(rc-status -r) | tail -n +2 | grep -q "\<$servicename\>"; then + echodebug "Service ${servicename} is enabled" + return 0 + else + echodebug "Service ${servicename} is NOT enabled" + return 1 + fi +} # ---------- end of function __check_services_openbsd ---------- + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __create_virtualenv @@ -2171,6 +2317,36 @@ __activate_virtualenv() { return 0 } # ---------- end of function __activate_virtualenv ---------- +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __install_pip_pkgs +# DESCRIPTION: Return 0 or 1 if successfully able to install pip packages. Can provide a different python version to +# install pip packages with. If $py_ver is not specified it will use the default python version. +# PARAMETERS: pkgs, py_ver +#---------------------------------------------------------------------------------------------------------------------- + +__install_pip_pkgs() { + _pip_pkgs="$1" + _py_exe="$2" + _py_pkg=$(echo "$_py_exe" | sed -r "s/\.//g") + _pip_cmd="${_py_exe} -m pip" + + if [ "${_py_exe}" = "" ]; then + _py_exe='python' + fi + + __check_pip_allowed + + # Install pip and pip dependencies + if ! __check_command_exists "${_pip_cmd} --version"; then + __PACKAGES="${_py_pkg}-setuptools ${_py_pkg}-pip gcc ${_py_pkg}-devel" + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 + fi + + echoinfo "Installing pip packages: ${_pip_pkgs} using ${_py_exe}" + # shellcheck disable=SC2086 + ${_pip_cmd} install ${_pip_pkgs} || return 1 +} #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __install_pip_deps @@ -2393,12 +2569,12 @@ install_ubuntu_stable_deps() { __REPO_ARCH="$DPKG_ARCHITECTURE" if [ "$DPKG_ARCHITECTURE" = "i386" ]; then - echoerror "repo.saltstack.com likely doesn't have all required 32-bit packages for Ubuntu $DISTRO_MAJOR_VERSION (yet?)." + echoerror "$_REPO_URL likely doesn't have all required 32-bit packages for Ubuntu $DISTRO_MAJOR_VERSION (yet?)." # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location __REPO_ARCH="amd64" elif [ "$DPKG_ARCHITECTURE" != "amd64" ]; then - echoerror "repo.saltstack.com doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." + echoerror "$_REPO_URL doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." if [ "$ITYPE" != "git" ]; then echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2016.3.1" exit 1 @@ -2408,17 +2584,17 @@ install_ubuntu_stable_deps() { # Versions starting with 2015.5.6, 2015.8.1 and 2016.3.0 are hosted at repo.saltstack.com if [ "$(echo "$STABLE_REV" | egrep '^(2015\.5|2015\.8|2016\.3|2016\.11|latest|archive\/)')" != "" ]; then # Workaround for latest non-LTS ubuntu - if [ "$DISTRO_VERSION" = "16.10" ]; then + if [ "$DISTRO_VERSION" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then echowarn "Non-LTS Ubuntu detected, but stable packages requested. Trying packages from latest LTS release. You may experience problems." UBUNTU_VERSION=16.04 - UBUNTU_CODENAME=xenial + UBUNTU_CODENAME="xenial" else UBUNTU_VERSION=$DISTRO_VERSION UBUNTU_CODENAME=$DISTRO_CODENAME fi # SaltStack's stable Ubuntu repository: - SALTSTACK_UBUNTU_URL="${HTTP_VAL}://repo.saltstack.com/apt/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}" + SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/apt/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}" echo "deb $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/saltstack.list # Make sure https transport is available @@ -2794,7 +2970,7 @@ install_debian_7_deps() { __REPO_ARCH="$DPKG_ARCHITECTURE" if [ "$DPKG_ARCHITECTURE" = "i386" ]; then - echoerror "repo.saltstack.com likely doesn't have all required 32-bit packages for Debian $DISTRO_MAJOR_VERSION (yet?)." + echoerror "$_REPO_URL likely doesn't have all required 32-bit packages for Debian $DISTRO_MAJOR_VERSION (yet?)." if [ "$ITYPE" != "git" ]; then echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2016.3.1" @@ -2803,14 +2979,14 @@ install_debian_7_deps() { # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location __REPO_ARCH="amd64" elif [ "$DPKG_ARCHITECTURE" != "amd64" ]; then - echoerror "repo.saltstack.com doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." + echoerror "$_REPO_URL doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." exit 1 fi # Versions starting with 2015.8.7 and 2016.3.0 are hosted at repo.saltstack.com if [ "$(echo "$STABLE_REV" | egrep '^(2015\.8|2016\.3|2016\.11|latest|archive\/201[5-6]\.)')" != "" ]; then # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location - SALTSTACK_DEBIAN_URL="${HTTP_VAL}://repo.saltstack.com/apt/debian/${DISTRO_MAJOR_VERSION}/${__REPO_ARCH}/${STABLE_REV}" + SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/apt/debian/${DISTRO_MAJOR_VERSION}/${__REPO_ARCH}/${STABLE_REV}" echo "deb $SALTSTACK_DEBIAN_URL wheezy main" > "/etc/apt/sources.list.d/saltstack.list" if [ "$HTTP_VAL" = "https" ] ; then @@ -2826,7 +3002,7 @@ install_debian_7_deps() { apt-get update else - echowarn "Packages from repo.saltstack.com are required to install Salt version 2015.8 or higher on Debian $DISTRO_MAJOR_VERSION." + echowarn "Packages from $_REPO_URL are required to install Salt version 2015.8 or higher on Debian $DISTRO_MAJOR_VERSION." fi # Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813 @@ -2873,7 +3049,7 @@ install_debian_8_deps() { __REPO_ARCH="$DPKG_ARCHITECTURE" if [ "$DPKG_ARCHITECTURE" = "i386" ]; then - echoerror "repo.saltstack.com likely doesn't have all required 32-bit packages for Debian $DISTRO_MAJOR_VERSION (yet?)." + echoerror "$_REPO_URL likely doesn't have all required 32-bit packages for Debian $DISTRO_MAJOR_VERSION (yet?)." if [ "$ITYPE" != "git" ]; then echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2016.3.1" @@ -2882,7 +3058,7 @@ install_debian_8_deps() { # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location __REPO_ARCH="amd64" elif [ "$DPKG_ARCHITECTURE" != "amd64" ] && [ "$DPKG_ARCHITECTURE" != "armhf" ]; then - echoerror "repo.saltstack.com doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." + echoerror "$_REPO_URL doesn't have packages for your system architecture: $DPKG_ARCHITECTURE." echoerror "Try git installation mode with pip and disable SaltStack apt repository, for example:" echoerror " sh ${__ScriptName} -r -P git v2016.3.1" @@ -2891,7 +3067,7 @@ install_debian_8_deps() { # Versions starting with 2015.5.6, 2015.8.1 and 2016.3.0 are hosted at repo.saltstack.com if [ "$(echo "$STABLE_REV" | egrep '^(2015\.5|2015\.8|2016\.3|2016\.11|latest|archive\/201[5-6]\.)')" != "" ]; then - SALTSTACK_DEBIAN_URL="${HTTP_VAL}://repo.saltstack.com/apt/debian/${DISTRO_MAJOR_VERSION}/${__REPO_ARCH}/${STABLE_REV}" + SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/apt/debian/${DISTRO_MAJOR_VERSION}/${__REPO_ARCH}/${STABLE_REV}" echo "deb $SALTSTACK_DEBIAN_URL jessie main" > "/etc/apt/sources.list.d/saltstack.list" if [ "$HTTP_VAL" = "https" ] ; then @@ -3396,18 +3572,10 @@ __install_epel_repository() { # Download latest 'epel-release' package for the distro version directly epel_repo_url="${HTTP_VAL}://dl.fedoraproject.org/pub/epel/epel-release-latest-${DISTRO_MAJOR_VERSION}.noarch.rpm" - if [ "$DISTRO_MAJOR_VERSION" -eq 5 ]; then - __fetch_url /tmp/epel-release.rpm "$epel_repo_url" || return 1 - rpm -Uvh --force /tmp/epel-release.rpm || return 1 - rm -f /tmp/epel-release.rpm - elif [ "$DISTRO_MAJOR_VERSION" -ge 6 ]; then - rpm -Uvh --force "$epel_repo_url" || return 1 - else - echoerror "Failed add EPEL repository support." - return 1 - fi + rpm -Uvh --force "$epel_repo_url" || return 1 _EPEL_REPOS_INSTALLED=$BS_TRUE + return 0 } @@ -3432,34 +3600,17 @@ __install_saltstack_rhel_repository() { repo_rev="latest" fi - # Check if a custom repo URL was passed with -R. If not, use repo.salstack.com. - if [ "$_CUSTOM_REPO_URL" != "null" ]; then - repo_url="$_CUSTOM_REPO_URL" - else - repo_url="repo.saltstack.com" - fi - - # Cloud Linux $releasever = 7.x, which doesn't exist in repo.saltstack.com, we need this to be "7" - if [ "${DISTRO_NAME}" = "Cloud Linux" ] && [ "${DISTRO_MAJOR_VERSION}" = "7" ]; then - base_url="${HTTP_VAL}://${repo_url}/yum/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${repo_rev}/" - else - base_url="${HTTP_VAL}://${repo_url}/yum/redhat/\$releasever/\$basearch/${repo_rev}/" - fi - - fetch_url="${HTTP_VAL}://${repo_url}/yum/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${repo_rev}/" - - if [ "${DISTRO_MAJOR_VERSION}" -eq 5 ]; then - gpg_key="SALTSTACK-EL5-GPG-KEY.pub" - else - gpg_key="SALTSTACK-GPG-KEY.pub" - fi - + # Avoid using '$releasever' variable for yum. + # Instead, this should work correctly on all RHEL variants. + base_url="${HTTP_VAL}://${_REPO_URL}/yum/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${repo_rev}/" + gpg_key="SALTSTACK-GPG-KEY.pub" repo_file="/etc/yum.repos.d/saltstack.repo" + if [ ! -s "$repo_file" ]; then cat <<_eof > "$repo_file" [saltstack] name=SaltStack ${repo_rev} Release Channel for RHEL/CentOS \$releasever -baseurl=$base_url +baseurl=${base_url} skip_if_unavailable=True gpgcheck=1 gpgkey=${base_url}${gpg_key} @@ -3467,16 +3618,10 @@ enabled=1 enabled_metadata=1 _eof + fetch_url="${HTTP_VAL}://${_REPO_URL}/yum/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${repo_rev}/" __rpm_import_gpg "${fetch_url}${gpg_key}" || return 1 fi - if [ "$DISTRO_MAJOR_VERSION" -eq 7 ] && ([ "$repo_rev" = "latest" ] || [ "$repo_rev" = "2015.8" ]); then - # Import CentOS 7 GPG key on RHEL for installing base dependencies from - # Salt corporate repository - rpm -qa gpg-pubkey\* --qf "%{name}-%{version}\n" | grep -q ^gpg-pubkey-f4a80eb5$ || \ - __rpm_import_gpg "${HTTP_VAL}://${repo_url}/yum/redhat/7/x86_64/${repo_rev}/base/RPM-GPG-KEY-CentOS-7" || return 1 - fi - return 0 } @@ -3505,11 +3650,6 @@ install_centos_stable_deps() { yum -y update || return 1 fi - if [ "$DISTRO_MAJOR_VERSION" -eq 5 ]; then - # Install curl which is not included in @core CentOS 5 installation - __check_command_exists curl || yum -y install "curl.${CPU_ARCH_L}" || return 1 - fi - if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then __install_epel_repository || return 1 __install_saltstack_rhel_repository || return 1 @@ -3522,14 +3662,8 @@ install_centos_stable_deps() { __install_saltstack_rhel_repository || return 1 fi - __PACKAGES="yum-utils chkconfig" - # YAML module is used for generating custom master/minion configs - if [ "$DISTRO_MAJOR_VERSION" -eq 5 ]; then - __PACKAGES="${__PACKAGES} python26-PyYAML" - else - __PACKAGES="${__PACKAGES} PyYAML" - fi + __PACKAGES="yum-utils chkconfig PyYAML" # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 @@ -3598,31 +3732,19 @@ install_centos_stable_post() { } install_centos_git_deps() { - if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then - if [ "$DISTRO_MAJOR_VERSION" -gt 5 ]; then - __yum_install_noinput ca-certificates || return 1 - else - __yum_install_noinput "openssl.${CPU_ARCH_L}" || return 1 - fi - fi - install_centos_stable_deps || return 1 + if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then + __yum_install_noinput ca-certificates || return 1 + fi + if ! __check_command_exists git; then __yum_install_noinput git || return 1 fi __git_clone_and_checkout || return 1 - __PACKAGES="" - - if [ "$DISTRO_MAJOR_VERSION" -eq 5 ]; then - __PACKAGES="${__PACKAGES} python26 python26-crypto python26-jinja2 python26-msgpack python26-requests" - __PACKAGES="${__PACKAGES} python26-tornado python26-zmq" - else - __PACKAGES="${__PACKAGES} python-crypto python-futures python-msgpack python-zmq python-jinja2" - __PACKAGES="${__PACKAGES} python-requests python-tornado" - fi + __PACKAGES="python-crypto python-futures python-msgpack python-zmq python-jinja2 python-requests python-tornado" if [ "$DISTRO_MAJOR_VERSION" -ge 7 ]; then __PACKAGES="${__PACKAGES} systemd-python" @@ -3632,8 +3754,12 @@ install_centos_git_deps() { __PACKAGES="${__PACKAGES} python-libcloud" fi - # shellcheck disable=SC2086 - __yum_install_noinput ${__PACKAGES} || return 1 + if [ "${_INSTALL_PY}" = "${BS_TRUE}" ]; then + __install_python_and_deps || return 1 + else + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 + fi # Let's trigger config_salt() if [ "$_TEMP_CONFIG_DIR" = "null" ]; then @@ -3645,10 +3771,11 @@ install_centos_git_deps() { } install_centos_git() { - if [ "$DISTRO_MAJOR_VERSION" -eq 5 ]; then - _PYEXE=python2.6 + if [ "${_PY_EXE}" != "" ]; then + _PYEXE=${_PY_EXE} + echoinfo "Using the following python version: ${_PY_EXE} to install salt" else - _PYEXE=python2 + _PYEXE='python2' fi if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then @@ -3724,9 +3851,9 @@ install_centos_restart_daemons() { return 1 fi elif [ -f /etc/init.d/salt-$fname ]; then - # Still in SysV init!? - /etc/init.d/salt-$fname stop > /dev/null 2>&1 - /etc/init.d/salt-$fname start + # Disable stdin to fix shell session hang on killing tee pipe + service salt-$fname stop < /dev/null > /dev/null 2>&1 + service salt-$fname start < /dev/null elif [ -f /usr/bin/systemctl ]; then # CentOS 7 uses systemd /usr/bin/systemctl stop salt-$fname > /dev/null 2>&1 @@ -4158,12 +4285,189 @@ install_cloud_linux_check_services() { # ####################################################################################################################### +####################################################################################################################### +# +# Alpine Linux Install Functions +# +install_alpine_linux_stable_deps() { + if ! grep -q '^[^#].\+alpine/.\+/community' /etc/apk/repositories; then + # Add community repository entry based on the "main" repo URL + __REPO=$(grep '^[^#].\+alpine/.\+/main\>' /etc/apk/repositories) + echo "${__REPO}" | sed -e 's/main/community/' >> /etc/apk/repositories + fi + + apk update + + # Get latest root CA certs + apk -U add ca-certificates + + if ! __check_command_exists openssl; then + # Install OpenSSL to be able to pull from https:// URLs + apk -U add openssl + fi +} + +install_alpine_linux_git_deps() { + install_alpine_linux_stable_deps || return 1 + + apk -U add python2 py-virtualenv py2-crypto py2-setuptools \ + py2-jinja2 py2-yaml py2-markupsafe py2-msgpack py2-psutil \ + py2-zmq zeromq py2-requests || return 1 + + if ! __check_command_exists git; then + apk -U add git || return 1 + fi + + __git_clone_and_checkout || return 1 + + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then + # We're on the develop branch, install whichever tornado is on the requirements file + __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")" + if [ "${__REQUIRED_TORNADO}" != "" ]; then + apk -U add py2-tornado || return 1 + fi + fi + + # Let's trigger config_salt() + if [ "$_TEMP_CONFIG_DIR" = "null" ]; then + _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/" + CONFIG_SALT_FUNC="config_salt" + fi +} + +install_alpine_linux_stable() { + __PACKAGES="salt" + + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-cloud" + fi + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-master" + fi + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-minion" + fi + if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-syndic" + fi + + # shellcheck disable=SC2086 + apk -U add ${__PACKAGES} || return 1 + return 0 +} + +install_alpine_linux_git() { + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then + python2 setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install || return 1 + else + python2 setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1 + fi +} + +install_alpine_linux_post() { + for fname in api master minion syndic; do + # Skip if not meant to be installed + [ $fname = "api" ] && \ + ([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + if [ -f /sbin/rc-update ]; then + script_url="${_SALTSTACK_REPO_URL%.git}/raw/develop/pkg/alpine/salt-$fname" + [ -f "/etc/init.d/salt-$fname" ] || __fetch_url "/etc/init.d/salt-$fname" "$script_url" + + if [ $? -eq 0 ]; then + chmod +x "/etc/init.d/salt-$fname" + else + echoerror "Failed to get OpenRC init script for $OS_NAME from $script_url." + return 1 + fi + + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + /sbin/rc-update add "salt-$fname" > /dev/null 2>&1 || return 1 + fi + done +} + +install_alpine_linux_restart_daemons() { + [ "${_START_DAEMONS}" -eq $BS_FALSE ] && return + + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + + # Disable stdin to fix shell session hang on killing tee pipe + /sbin/rc-service salt-$fname stop < /dev/null > /dev/null 2>&1 + /sbin/rc-service salt-$fname start < /dev/null || return 1 + done +} + +install_alpine_linux_check_services() { + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + + __check_services_alpine salt-$fname || return 1 + done + + return 0 +} + +daemons_running_alpine_linux() { + [ "${_START_DAEMONS}" -eq $BS_FALSE ] && return + + FAILED_DAEMONS=0 + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + + # shellcheck disable=SC2009 + if [ "$(ps wwwaux | grep -v grep | grep salt-$fname)" = "" ]; then + echoerror "salt-$fname was not found running" + FAILED_DAEMONS=$((FAILED_DAEMONS + 1)) + fi + done + + return $FAILED_DAEMONS +} + +# +# Ended Alpine Linux Install Functions +# +####################################################################################################################### + + ####################################################################################################################### # # Amazon Linux AMI Install Functions # install_amazon_linux_ami_deps() { + # Shim to figure out if we're using old (rhel) or new (aws) rpms. + _USEAWS=$BS_FALSE + + repo_rev="$(echo "${STABLE_REV}" | sed 's|.*\/||g')" + + if echo "$repo_rev" | egrep -q '^(latest|2016\.11)$'; then + _USEAWS=$BS_TRUE + elif echo "$repo_rev" | egrep -q '^[0-9]+$' && [ "$(echo "$repo_rev" | cut -c1-4)" -gt 2016 ]; then + _USEAWS=$BS_TRUE + fi # We need to install yum-utils before doing anything else when installing on # Amazon Linux ECS-optimized images. See issue #974. @@ -4183,15 +4487,31 @@ install_amazon_linux_ami_deps() { __REPO_FILENAME="saltstack-repo.repo" + # Set a few vars to make life easier. + if [ $_USEAWS -eq $BS_TRUE ]; then + base_url="$HTTP_VAL://repo.saltstack.com/yum/amazon/latest/\$basearch/$repo_rev/" + gpg_key="${base_url}SALTSTACK-GPG-KEY.pub" + repo_name="SaltStack repo for Amazon Linux" + pkg_append="python27" + else + base_url="$HTTP_VAL://repo.saltstack.com/yum/redhat/6/\$basearch/$repo_rev/" + gpg_key="${base_url}SALTSTACK-GPG-KEY.pub" + repo_name="SaltStack repo for RHEL/CentOS 6" + pkg_append="python" + fi + + # This should prob be refactored to use __install_saltstack_rhel_repository() + # With args passed in to do the right thing. Reformatted to be more like the + # amazon linux yum file. if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then cat <<_eof > "/etc/yum.repos.d/${__REPO_FILENAME}" [saltstack-repo] -disabled=False -name=SaltStack repo for RHEL/CentOS 6 +name=$repo_name +failovermethod=priority +priority=10 gpgcheck=1 -gpgkey=$HTTP_VAL://repo.saltstack.com/yum/redhat/6/\$basearch/$STABLE_REV/SALTSTACK-GPG-KEY.pub -baseurl=$HTTP_VAL://repo.saltstack.com/yum/redhat/6/\$basearch/$STABLE_REV/ -humanname=SaltStack repo for RHEL/CentOS 6 +gpgkey=$gpg_key +baseurl=$base_url _eof fi @@ -4199,8 +4519,9 @@ _eof yum -y update || return 1 fi fi - - __PACKAGES="PyYAML python-crypto python-msgpack python-zmq python26-ordereddict python-jinja2 python-requests" + #ordereddict removed. + #Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64 which is already installed + __PACKAGES="${pkg_append}-PyYAML ${pkg_append}-crypto ${pkg_append}-msgpack ${pkg_append}-zmq ${pkg_append}-jinja2 ${pkg_append}-requests" # shellcheck disable=SC2086 yum -y install ${__PACKAGES} ${ENABLE_EPEL_CMD} || return 1 @@ -4217,6 +4538,15 @@ install_amazon_linux_ami_git_deps() { yum -y install ca-certificates || return 1 fi + PIP_EXE='pip' + if __check_command_exists python2.7; then + if ! __check_command_exists pip2.7; then + /usr/bin/easy_install-2.7 pip || return 1 + fi + PIP_EXE='/usr/local/bin/pip2.7' + _PY_EXE='python2.7' + fi + install_amazon_linux_ami_deps || return 1 ENABLE_EPEL_CMD="" @@ -4243,7 +4573,7 @@ install_amazon_linux_ami_git_deps() { # We're on the develop branch, install whichever tornado is on the requirements file __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")" if [ "${__REQUIRED_TORNADO}" != "" ]; then - __PACKAGES="${__PACKAGES} python-tornado" + __PACKAGES="${__PACKAGES} ${pkg_append}-tornado" fi fi @@ -4254,7 +4584,7 @@ install_amazon_linux_ami_git_deps() { if [ "${__PIP_PACKAGES}" != "" ]; then # shellcheck disable=SC2086 - pip-python install ${__PIP_PACKAGES} || return 1 + ${PIP_EXE} install ${__PIP_PACKAGES} || return 1 fi # Let's trigger config_salt() @@ -4314,29 +4644,29 @@ install_arch_linux_stable_deps() { pacman-key --init && pacman-key --populate archlinux || return 1 fi - pacman -Sy --noconfirm --needed archlinux-keyring || return 1 + # Pacman does not resolve dependencies on outdated versions + # They always need to be updated + pacman -Syy --noconfirm - pacman -Sy --noconfirm --needed pacman || return 1 + pacman -S --noconfirm --needed archlinux-keyring || return 1 + + pacman -Su --noconfirm --needed pacman || return 1 if __check_command_exists pacman-db-upgrade; then pacman-db-upgrade || return 1 fi # YAML module is used for generating custom master/minion configs - pacman -Sy --noconfirm --needed python2-yaml - - if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then - pacman -Syyu --noconfirm --needed || return 1 - fi + pacman -Su --noconfirm --needed python2-yaml if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then - pacman -Sy --noconfirm --needed apache-libcloud || return 1 + pacman -Su --noconfirm --needed apache-libcloud || return 1 fi if [ "${_EXTRA_PACKAGES}" != "" ]; then echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" # shellcheck disable=SC2086 - pacman -Sy --noconfirm --needed ${_EXTRA_PACKAGES} || return 1 + pacman -Su --noconfirm --needed ${_EXTRA_PACKAGES} || return 1 fi } @@ -4348,7 +4678,7 @@ install_arch_linux_git_deps() { pacman -Sy --noconfirm --needed git || return 1 fi pacman -R --noconfirm python2-distribute - pacman -Sy --noconfirm --needed python2-crypto python2-setuptools python2-jinja \ + pacman -Su --noconfirm --needed python2-crypto python2-setuptools python2-jinja \ python2-markupsafe python2-msgpack python2-psutil \ python2-pyzmq zeromq python2-requests python2-systemd || return 1 @@ -4358,7 +4688,7 @@ install_arch_linux_git_deps() { # We're on the develop branch, install whichever tornado is on the requirements file __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")" if [ "${__REQUIRED_TORNADO}" != "" ]; then - pacman -Sy --noconfirm --needed python2-tornado + pacman -Su --noconfirm --needed python2-tornado fi fi @@ -4373,7 +4703,11 @@ install_arch_linux_git_deps() { } install_arch_linux_stable() { - pacman -Sy --noconfirm --needed pacman || return 1 + # Pacman does not resolve dependencies on outdated versions + # They always need to be updated + pacman -Syy --noconfirm + + pacman -Su --noconfirm --needed pacman || return 1 # See https://mailman.archlinux.org/pipermail/arch-dev-public/2013-June/025043.html # to know why we're ignoring below. pacman -Syu --noconfirm --ignore filesystem,bash || return 1 @@ -4575,6 +4909,8 @@ __configure_freebsd_pkg_details() { ## ensure future ports builds use pkgng echo "WITH_PKGNG= yes" >> /etc/make.conf + + /usr/local/sbin/pkg update -f || return 1 } install_freebsd_9_stable_deps() { @@ -4630,6 +4966,11 @@ install_freebsd_11_stable_deps() { install_freebsd_git_deps() { install_freebsd_9_stable_deps || return 1 + # shellcheck disable=SC2086 + SALT_DEPENDENCIES=$(/usr/local/sbin/pkg search ${FROM_FREEBSD} -R -d sysutils/py-salt | grep -i origin | sed -e 's/^[[:space:]]*//' | tail -n +2 | awk -F\" '{print $2}' | tr '\n' ' ') + # shellcheck disable=SC2086 + /usr/local/sbin/pkg install ${FROM_FREEBSD} -y ${SALT_DEPENDENCIES} || return 1 + if ! __check_command_exists git; then /usr/local/sbin/pkg install -y git || return 1 fi @@ -4691,7 +5032,9 @@ install_freebsd_9_stable() { } install_freebsd_10_stable() { - install_freebsd_9_stable + # shellcheck disable=SC2086 + /usr/local/sbin/pkg install ${FROM_FREEBSD} -y sysutils/py-salt || return 1 + return 0 } install_freebsd_11_stable() { @@ -4705,40 +5048,37 @@ install_freebsd_11_stable() { } install_freebsd_git() { - # shellcheck disable=SC2086 - /usr/local/sbin/pkg install ${FROM_SALTSTACK} -y sysutils/py-salt || return 1 - # Let's keep the rc.d files before deleting the package - mkdir /tmp/rc-scripts || return 1 - cp /usr/local/etc/rc.d/salt* /tmp/rc-scripts || return 1 - - # Let's delete the package - /usr/local/sbin/pkg delete -y sysutils/py-salt || return 1 + # /usr/local/bin/python2 in FreeBSD is a symlink to /usr/local/bin/python2.7 + __PYTHON_PATH=$(readlink -f "$(which python2)") + __ESCAPED_PYTHON_PATH=$(echo "${__PYTHON_PATH}" | sed 's/\//\\\//g') # Install from git if [ ! -f salt/syspaths.py ]; then # We still can't provide the system paths, salt 0.16.x - /usr/local/bin/python2 setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1 + ${__PYTHON_PATH} setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1 else - /usr/local/bin/python2 setup.py \ - --salt-root-dir=/usr/local \ + ${__PYTHON_PATH} setup.py \ + --salt-root-dir=/ \ --salt-config-dir="${_SALT_ETC_DIR}" \ --salt-cache-dir="${_SALT_CACHE_DIR}" \ --salt-sock-dir=/var/run/salt \ - --salt-srv-root-dir=/srv \ + --salt-srv-root-dir="${_SALT_ETC_DIR}" \ --salt-base-file-roots-dir="${_SALT_ETC_DIR}/states" \ --salt-base-pillar-roots-dir="${_SALT_ETC_DIR}/pillar" \ --salt-base-master-roots-dir="${_SALT_ETC_DIR}/salt-master" \ --salt-logs-dir=/var/log/salt \ - --salt-pidfile-dir=/var/run ${SETUP_PY_INSTALL_ARGS} install \ + --salt-pidfile-dir=/var/run \ + ${SETUP_PY_INSTALL_ARGS} install \ || return 1 fi - # Restore the rc.d scripts - cp /tmp/rc-scripts/salt* /usr/local/etc/rc.d/ || return 1 - - # Delete our temporary scripts directory - rm -rf /tmp/rc-scripts || return 1 + for script in salt_api salt_master salt_minion salt_proxy salt_syndic; do + __fetch_url "/usr/local/etc/rc.d/${script}" "https://raw.githubusercontent.com/freebsd/freebsd-ports/master/sysutils/py-salt/files/${script}.in" || return 1 + sed -i '' 's/%%PREFIX%%/\/usr\/local/g' /usr/local/etc/rc.d/${script} + sed -i '' "s/%%PYTHON_CMD%%/${__ESCAPED_PYTHON_PATH}/g" /usr/local/etc/rc.d/${script} + chmod +x /usr/local/etc/rc.d/${script} || return 1 + done # And we're good to go return 0 @@ -4774,6 +5114,9 @@ install_freebsd_11_stable_post() { } install_freebsd_git_post() { + if [ -f $salt_conf_file ]; then + rm -f $salt_conf_file + fi install_freebsd_9_stable_post || return 1 return 0 } @@ -4807,7 +5150,7 @@ install_freebsd_restart_daemons() { __choose_openbsd_mirror() { OPENBSD_REPO='' MINTIME='' - MIRROR_LIST=$(awk -F= '/installpath = / {print $2}' /etc/examples/pkg.conf) + MIRROR_LIST=$(ftp -w 15 -Vao - 'http://ftp.openbsd.org/cgi-bin/ftplist.cgi?dbversion=1' | awk '/^http/ {print $1}') for MIRROR in $MIRROR_LIST; do MIRROR_HOST=$(echo "$MIRROR" | sed -e 's|.*//||' -e 's|+*/.*$||') @@ -4831,7 +5174,7 @@ install_openbsd_deps() { __choose_openbsd_mirror || return 1 echoinfo "setting package repository to $OPENBSD_REPO with ping time of $MINTIME" [ -n "$OPENBSD_REPO" ] || return 1 - echo "installpath += ${OPENBSD_REPO}" >>/etc/pkg.conf || return 1 + echo "${OPENBSD_REPO}" >>/etc/installurl || return 1 if [ "${_EXTRA_PACKAGES}" != "" ]; then echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" @@ -5557,7 +5900,6 @@ install_suse_12_stable_post() { # shellcheck disable=SC2086 curl $_CURL_ARGS -L "https://github.com/saltstack/salt/raw/develop/pkg/salt-$fname.service" \ -o "/usr/lib/systemd/system/salt-$fname.service" || return 1 - continue fi # Skip salt-api since the service should be opt-in and not necessarily started on boot @@ -5951,6 +6293,84 @@ install_gentoo_check_services() { # ####################################################################################################################### +####################################################################################################################### +# +# VoidLinux Install Functions +# +install_voidlinux_stable_deps() { + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + xbps-install -Suy || return 1 + fi + + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + xbps-install -Suy "${_EXTRA_PACKAGES}" || return 1 + fi + + return 0 +} + +install_voidlinux_stable() { + xbps-install -Suy salt || return 1 + return 0 +} + +install_voidlinux_stable_post() { + for fname in master minion syndic; do + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + ln -s /etc/sv/salt-$fname /var/service/. + done +} + +install_voidlinux_restart_daemons() { + [ $_START_DAEMONS -eq $BS_FALSE ] && return + + for fname in master minion syndic; do + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + sv restart salt-$fname + done +} + +install_voidlinux_check_services() { + for fname in master minion syndic; do + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + [ -e /var/service/salt-$fname ] || return 1 + done + + return 0 +} + +daemons_running_voidlinux() { + [ "$_START_DAEMONS" -eq $BS_FALSE ] && return 0 + + FAILED_DAEMONS=0 + for fname in master minion syndic; do + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + if [ "$(sv status salt-$fname | grep run)" = "" ]; then + echoerror "salt-$fname was not found running" + FAILED_DAEMONS=$((FAILED_DAEMONS + 1)) + fi + done + + return $FAILED_DAEMONS +} +# +# Ended VoidLinux Install Functions +# +####################################################################################################################### + ####################################################################################################################### # # Default minion configuration function. Matches ANY distribution as long as diff --git a/salt/config/__init__.py b/salt/config/__init__.py index a5371419669..22ff64c1753 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -2070,7 +2070,7 @@ def prepend_root_dir(opts, path_options): # Remove relative root dir so we can add the absolute root dir path = path[len(root_opt):] elif os.path.isabs(path_option): - # Absolute path (not default or overriden root_dir) + # Absolute path (not default or overridden root_dir) # No prepending required continue # Prepending the root dir @@ -2422,7 +2422,7 @@ def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None, elif master_config_path is not None and master_config is None: master_config = salt.config.master_config(master_config_path) - # cloud config has a seperate cachedir + # cloud config has a separate cachedir del master_config['cachedir'] # 2nd - salt-cloud configuration which was loaded before so we could diff --git a/salt/daemons/flo/worker.py b/salt/daemons/flo/worker.py index 5a20a279b57..fba2e7e632d 100644 --- a/salt/daemons/flo/worker.py +++ b/salt/daemons/flo/worker.py @@ -65,7 +65,7 @@ def worker_fork(self): class Worker(multiprocessing.Process): ''' - Create an ioflo worker in a seperate process + Create an ioflo worker in a separate process ''' def __init__(self, opts, windex, worker_verify, access_keys, mkey, aes): super(Worker, self).__init__() diff --git a/salt/modules/acme.py b/salt/modules/acme.py index 742bd5f0ecb..b07fab81971 100644 --- a/salt/modules/acme.py +++ b/salt/modules/acme.py @@ -281,7 +281,7 @@ def renew_by(name, window=None): def needs_renewal(name, window=None): ''' - Check if a certicate needs renewal + Check if a certificate needs renewal :param name: CommonName of cert :param window: Window in days to renew earlier or True/force to just return True diff --git a/salt/modules/archive.py b/salt/modules/archive.py index e3a89a436d7..7614ac3c7a5 100644 --- a/salt/modules/archive.py +++ b/salt/modules/archive.py @@ -994,7 +994,7 @@ def unzip(zip_file, extract_perms : True The Python zipfile_ module does not extract file/directory attributes by default. When this argument is set to ``True``, Salt will attempt to - apply the file permision attributes to the extracted files/folders. + apply the file permission attributes to the extracted files/folders. On Windows, only the read-only flag will be extracted as set within the zip file, other attributes (i.e. user/group permissions) are ignored. diff --git a/salt/modules/boto_secgroup.py b/salt/modules/boto_secgroup.py index 3adec7dfdd0..8cfba94e626 100644 --- a/salt/modules/boto_secgroup.py +++ b/salt/modules/boto_secgroup.py @@ -597,7 +597,7 @@ def set_tags(tags, a dict of key:value pair of tags to set on the security group name - the name of the security gruop + the name of the security group group_id the group id of the security group (in lie of a name/vpc combo) diff --git a/salt/modules/ceph.py b/salt/modules/ceph.py index 68921034be4..797728041ae 100644 --- a/salt/modules/ceph.py +++ b/salt/modules/ceph.py @@ -143,7 +143,7 @@ def osd_prepare(**kwargs): Notes: cluster_uuid - Set the deivce to store the osd data on. + Set the device to store the osd data on. journal_dev Set the journal device. defaults to osd_dev. @@ -194,7 +194,7 @@ def keyring_create(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -223,7 +223,7 @@ def keyring_save(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -251,7 +251,7 @@ def keyring_purge(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -281,7 +281,7 @@ def keyring_present(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -309,7 +309,7 @@ def keyring_auth_add(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -337,7 +337,7 @@ def keyring_auth_del(**kwargs): Notes: keyring_type - Required paramter + Required parameter Can be set to: admin, mon, osd, rgw, mds @@ -374,7 +374,7 @@ def mon_is(**kwargs): def mon_status(**kwargs): ''' - Get status from mon deamon + Get status from mon daemon CLI Example: @@ -396,7 +396,7 @@ def mon_status(**kwargs): def mon_quorum(**kwargs): ''' - Is mon deamon in quorum + Is mon daemon in quorum CLI Example: @@ -418,7 +418,7 @@ def mon_quorum(**kwargs): def mon_active(**kwargs): ''' - Is mon deamon running + Is mon daemon running CLI Example: @@ -518,7 +518,7 @@ def rgw_create(**kwargs): Notes: name: - Required paramter + Required parameter Set the rgw client name. Must start with 'rgw.' cluster_uuid @@ -546,7 +546,7 @@ def rgw_destroy(**kwargs): Notes: name: - Required paramter + Required parameter Set the rgw client name. Must start with 'rgw.' cluster_uuid @@ -576,15 +576,15 @@ def mds_create(**kwargs): Notes: name: - Required paramter + Required parameter Set the rgw client name. Must start with 'mds.' port: - Required paramter + Required parameter Port for the mds to listen to. addr: - Required paramter + Required parameter Address or IP address for the mds to listen to. cluster_uuid @@ -612,7 +612,7 @@ def mds_destroy(**kwargs): Notes: name: - Required paramter + Required parameter Set the rgw client name. Must start with 'mds.' cluster_uuid diff --git a/salt/modules/cisconso.py b/salt/modules/cisconso.py index 77d85fe97cc..a6d1f2b0d9f 100644 --- a/salt/modules/cisconso.py +++ b/salt/modules/cisconso.py @@ -43,7 +43,7 @@ def get_data(datastore, path): :type datastore: :class:`DatastoreType` (``str`` enum). :param path: The device path to set the value at, - a list of element names in order, / seperated + a list of element names in order, / separated :type path: ``list``, ``str`` OR ``tuple`` :return: The network configuration at that tree @@ -67,7 +67,7 @@ def set_data_value(datastore, path, data): :type datastore: :class:`DatastoreType` (``str`` enum). :param path: The device path to set the value at, - a list of element names in order, / seperated + a list of element names in order, / separated :type path: ``list``, ``str`` OR ``tuple`` :param data: The new value at the given path diff --git a/salt/modules/debian_ip.py b/salt/modules/debian_ip.py index 8610d811bef..16870cc6ca1 100644 --- a/salt/modules/debian_ip.py +++ b/salt/modules/debian_ip.py @@ -1581,13 +1581,13 @@ def _write_file_ifaces(iface, data, **settings): if adapter == iface: saved_ifcfg = tmp - _SEPERATE_FILE = False + _SEPARATE_FILE = False if 'filename' in settings: if not settings['filename'].startswith('/'): filename = '{0}/{1}'.format(_DEB_NETWORK_DIR, settings['filename']) else: filename = settings['filename'] - _SEPERATE_FILE = True + _SEPARATE_FILE = True else: if 'filename' in adapters[adapter]['data']: filename = adapters[adapter]['data'] @@ -1600,7 +1600,7 @@ def _write_file_ifaces(iface, data, **settings): log.error(msg) raise AttributeError(msg) with salt.utils.flopen(filename, 'w') as fout: - if _SEPERATE_FILE: + if _SEPARATE_FILE: fout.write(saved_ifcfg) else: fout.write(ifcfg) diff --git a/salt/modules/file.py b/salt/modules/file.py index b0b902b7588..9c6e34b956a 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py @@ -594,7 +594,7 @@ def get_source_sum(file_name='', file, used to disambiguate ambiguous matches. saltenv : base - Salt fileserver environment from which to retrive the source_hash. This + Salt fileserver environment from which to retrieve the source_hash. This value will only be used when ``source_hash`` refers to a file on the Salt fileserver (i.e. one beginning with ``salt://``). @@ -4663,7 +4663,7 @@ def manage_file(name, .. note:: keep_mode does not work with salt-ssh. - As a consequence of how the files are transfered to the minion, and + As a consequence of how the files are transferred to the minion, and the inability to connect back to the master with salt-ssh, salt is unable to stat the file as it exists on the fileserver and thus cannot mirror the mode on the salt-ssh minion diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py index 9de56973c06..3a9d0ec36a6 100644 --- a/salt/modules/localemod.py +++ b/salt/modules/localemod.py @@ -127,13 +127,14 @@ def get_locale(): salt '*' locale.get_locale ''' cmd = '' - if salt.utils.systemd.booted(__context__): + if 'Suse' in __grains__['os_family']: + # this block applies to all SUSE systems - also with systemd + cmd = 'grep "^RC_LANG" /etc/sysconfig/language' + elif salt.utils.systemd.booted(__context__): params = _parse_dbus_locale() if HAS_DBUS else _parse_localectl() return params.get('LANG', '') elif 'RedHat' in __grains__['os_family']: cmd = 'grep "^LANG=" /etc/sysconfig/i18n' - elif 'Suse' in __grains__['os_family']: - cmd = 'grep "^RC_LANG" /etc/sysconfig/language' elif 'Debian' in __grains__['os_family']: # this block only applies to Debian without systemd cmd = 'grep "^LANG=" /etc/default/locale' @@ -161,7 +162,17 @@ def set_locale(locale): salt '*' locale.set_locale 'en_US.UTF-8' ''' - if salt.utils.systemd.booted(__context__): + if 'Suse' in __grains__['os_family']: + # this block applies to all SUSE systems - also with systemd + if not __salt__['file.file_exists']('/etc/sysconfig/language'): + __salt__['file.touch']('/etc/sysconfig/language') + __salt__['file.replace']( + '/etc/sysconfig/language', + '^RC_LANG=.*', + 'RC_LANG="{0}"'.format(locale), + append_if_not_found=True + ) + elif salt.utils.systemd.booted(__context__): return _localectl_set(locale) elif 'RedHat' in __grains__['os_family']: if not __salt__['file.file_exists']('/etc/sysconfig/i18n'): @@ -172,15 +183,6 @@ def set_locale(locale): 'LANG="{0}"'.format(locale), append_if_not_found=True ) - elif 'Suse' in __grains__['os_family']: - if not __salt__['file.file_exists']('/etc/sysconfig/language'): - __salt__['file.touch']('/etc/sysconfig/language') - __salt__['file.replace']( - '/etc/sysconfig/language', - '^RC_LANG=.*', - 'RC_LANG="{0}"'.format(locale), - append_if_not_found=True - ) elif 'Debian' in __grains__['os_family']: # this block only applies to Debian without systemd update_locale = salt.utils.which('update-locale') diff --git a/salt/modules/lxc.py b/salt/modules/lxc.py index b59cdfee303..0e369e5cc61 100644 --- a/salt/modules/lxc.py +++ b/salt/modules/lxc.py @@ -503,7 +503,7 @@ def cloud_init_interface(name, vm_=None, **kwargs): # via the legacy salt cloud configuration style. # On other cases, we should rely on settings provided by the new # salt lxc network profile style configuration which can - # be also be overriden or a per interface basis via the nic_opts dict. + # be also be overridden or a per interface basis via the nic_opts dict. if bridge: eth0['link'] = bridge if gateway: diff --git a/salt/modules/napalm_network.py b/salt/modules/napalm_network.py index c3999185289..b9f4dff30d9 100644 --- a/salt/modules/napalm_network.py +++ b/salt/modules/napalm_network.py @@ -577,7 +577,7 @@ def ipaddrs(**kwargs): # pylint: disable=unused-argument Returns all configured IP addresses on all interfaces as a dictionary of dictionaries.\ Keys of the main dictionary represent the name of the interface.\ Values of the main dictionary represent are dictionaries that may consist of two keys\ - 'ipv4' and 'ipv6' (one, both or none) which are themselvs dictionaries witht the IP addresses as keys.\ + 'ipv4' and 'ipv6' (one, both or none) which are themselvs dictionaries with the IP addresses as keys.\ CLI Example: @@ -929,7 +929,7 @@ def load_config(filename=None, To avoid committing the configuration, set the argument ``test`` to ``True`` and will discard (dry run). - To keep the chnages but not commit, set ``commit`` to ``False``. + To keep the changes but not commit, set ``commit`` to ``False``. To replace the config, set ``replace`` to ``True``. @@ -947,7 +947,7 @@ def load_config(filename=None, Commit? Default: ``True``. debug: False - Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` contaning the raw + Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` containing the raw configuration loaded on the device. .. versionadded:: 2016.11.2 @@ -1050,7 +1050,7 @@ def load_template(template_name, To avoid committing the configuration, set the argument ``test`` to ``True`` and will discard (dry run). - To preserve the chnages, set ``commit`` to ``False``. + To preserve the changes, set ``commit`` to ``False``. However, this is recommended to be used only in exceptional cases when there are applied few consecutive states and/or configuration changes. @@ -1074,7 +1074,7 @@ def load_template(template_name, Placing the template under ``/etc/salt/states/templates/example.jinja``, it can be used as ``salt://templates/example.jinja``. - Alternatively, for local files, the user can specify the abolute path. + Alternatively, for local files, the user can specify the absolute path. If remotely, the source can be retrieved via ``http``, ``https`` or ``ftp``. Examples: @@ -1156,7 +1156,7 @@ def load_template(template_name, debug: False Debug mode. Will insert a new key under the output dictionary, - as ``loaded_config`` contaning the raw result after the template was rendered. + as ``loaded_config`` containing the raw result after the template was rendered. .. versionadded:: 2016.11.2 @@ -1175,7 +1175,7 @@ def load_template(template_name, .. note:: - Do not explicitely specify this argument. + Do not explicitly specify this argument. This represents any other variable that will be sent to the template rendering system. Please see the examples below! @@ -1314,7 +1314,7 @@ def load_template(template_name, if template_path and not file_exists: template_name = __salt__['file.join'](template_path, template_name) if not saltenv: - # no saltenv overriden + # no saltenv overridden # use the custom template path saltenv = template_path if not salt_render else 'base' elif salt_render and not saltenv: @@ -1539,8 +1539,8 @@ def config_control(inherit_napalm_device=None, **kwargs): # pylint: disable=unu If differences found, will try to commit. In case commit unsuccessful, will try to rollback. - :return: A tuple with a boolean that specifies if the config was changed/commited/rollbacked on the device.\ - And a string that provides more details of the reason why the configuration was not commited properly. + :return: A tuple with a boolean that specifies if the config was changed/committed/rollbacked on the device.\ + And a string that provides more details of the reason why the configuration was not committed properly. CLI Example: diff --git a/salt/modules/napalm_ntp.py b/salt/modules/napalm_ntp.py index e4722a8e81f..8c16e92ef72 100644 --- a/salt/modules/napalm_ntp.py +++ b/salt/modules/napalm_ntp.py @@ -213,7 +213,7 @@ def set_peers(*peers, **options): :commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when the user does not want to commit after each change, but after a couple. - By default this function will commit the config changes (if any). To load without commiting, use the `commit` + By default this function will commit the config changes (if any). To load without committing, use the `commit` option. For dry run use the `test` argument. CLI Example: @@ -246,7 +246,7 @@ def set_servers(*servers, **options): :commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when the user does not want to commit after each change, but after a couple. - By default this function will commit the config changes (if any). To load without commiting, use the `commit` + By default this function will commit the config changes (if any). To load without committing, use the `commit` option. For dry run use the `test` argument. CLI Example: @@ -279,7 +279,7 @@ def delete_peers(*peers, **options): :commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when the user does not want to commit after each change, but after a couple. - By default this function will commit the config changes (if any). To load without commiting, use the `commit` + By default this function will commit the config changes (if any). To load without committing, use the `commit` option. For dry run use the `test` argument. CLI Example: @@ -312,7 +312,7 @@ def delete_servers(*servers, **options): :commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when the user does not want to commit after each change, but after a couple. - By default this function will commit the config changes (if any). To load without commiting, use the `commit` + By default this function will commit the config changes (if any). To load without committing, use the `commit` option. For dry run use the `test` argument. CLI Example: diff --git a/salt/modules/parallels.py b/salt/modules/parallels.py index 6bca7ba5b5c..c72b61ceefe 100644 --- a/salt/modules/parallels.py +++ b/salt/modules/parallels.py @@ -7,7 +7,7 @@ see the `Parallels Desktop Reference Guide `_. What has not been implemented yet can be accessed through ``parallels.prlctl`` -and ``parallels.prlsrvctl`` (note the preceeding double dash ``--`` as +and ``parallels.prlsrvctl`` (note the preceding double dash ``--`` as necessary): .. code-block:: diff --git a/salt/modules/ps.py b/salt/modules/ps.py index 1a4a7afa530..48087b11fc8 100644 --- a/salt/modules/ps.py +++ b/salt/modules/ps.py @@ -640,7 +640,7 @@ def get_users(): def lsof(name): ''' - Retrieve the lsof informations of the given process name. + Retrieve the lsof information of the given process name. CLI Example: @@ -657,7 +657,7 @@ def lsof(name): def netstat(name): ''' - Retrieve the netstat informations of the given process name. + Retrieve the netstat information of the given process name. CLI Example: diff --git a/salt/modules/reg.py b/salt/modules/reg.py index dc336e7a5f7..ba827f8c58a 100644 --- a/salt/modules/reg.py +++ b/salt/modules/reg.py @@ -476,9 +476,9 @@ def set_value(hive, under the key. If not passed, the key (Default) value will be set. :param object vdata: The value data to be set. - What the type of this paramater + What the type of this parameter should be is determined by the value of the vtype - paramater. The correspondence + parameter. The correspondence is as follows: .. glossary:: @@ -495,15 +495,15 @@ def set_value(hive, str :param str vtype: The value type. - The possible values of the vtype paramater are indicated - above in the description of the vdata paramater. + The possible values of the vtype parameter are indicated + above in the description of the vdata parameter. :param bool use_32bit_registry: Sets the 32bit portion of the registry on 64bit installations. On 32bit machines this is ignored. - :param bool volatile: When this paramater has a value of True, the registry key will be + :param bool volatile: When this parameter has a value of True, the registry key will be made volatile (i.e. it will not persist beyond a system reset or shutdown). - This paramater only has an effect when a key is being created and at no + This parameter only has an effect when a key is being created and at no other time. :return: Returns True if successful, False if not diff --git a/salt/modules/runit.py b/salt/modules/runit.py index cbb39e47762..083ac86917a 100644 --- a/salt/modules/runit.py +++ b/salt/modules/runit.py @@ -647,7 +647,7 @@ def enable(name, start=False, **kwargs): def disable(name, stop=False, **kwargs): ''' Don't start service ``name`` at boot - Returns ``True`` if operation is successfull + Returns ``True`` if operation is successful name the service's name @@ -686,7 +686,7 @@ def disable(name, stop=False, **kwargs): def remove(name): ''' Remove the service from system. - Returns ``True`` if operation is successfull. + Returns ``True`` if operation is successful. The service will be also stopped. name diff --git a/salt/modules/slack_notify.py b/salt/modules/slack_notify.py index 59df51ed362..db819b22841 100644 --- a/salt/modules/slack_notify.py +++ b/salt/modules/slack_notify.py @@ -247,7 +247,7 @@ def call_hook(message, username=None, icon_emoji=None): ''' - Send message to Slack incomming webhook. + Send message to Slack incoming webhook. :param message: The topic of message. :param attachment: The message to send to the Slacke WebHook. @@ -258,7 +258,7 @@ def call_hook(message, :param channel: The channel to use instead of the WebHook default. :param username: Username to use instead of WebHook default. :param icon_emoji: Icon to use instead of WebHook default. - :return: Boolean if message was sent successfuly. + :return: Boolean if message was sent successfully. CLI Example: diff --git a/salt/modules/snapper.py b/salt/modules/snapper.py index 19982c3347c..06f8ed6eb28 100644 --- a/salt/modules/snapper.py +++ b/salt/modules/snapper.py @@ -491,7 +491,7 @@ def modify_snapshot(snapshot_id=None, snapshot = get_snapshot(config=config, number=snapshot_id) try: - # Updating only the explicitely provided attributes by the user + # Updating only the explicitly provided attributes by the user updated_opts = { 'description': description if description is not None else snapshot['description'], 'cleanup': cleanup if cleanup is not None else snapshot['cleanup'], @@ -669,7 +669,7 @@ def undo(config='root', files=None, num_pre=None, num_post=None): the files into the state of num_pre. .. warning:: - If one of the files has changes after num_post, they will be overwriten + If one of the files has changes after num_post, they will be overwritten The snapshots are used to determine the file list, but the current version of the files will be overwritten by the versions in num_pre. @@ -790,7 +790,7 @@ def diff(config='root', filename=None, num_pre=None, num_post=None): if filepath.startswith(SUBVOLUME): _filepath = filepath[len(SUBVOLUME):] - # Just in case, removing posible double '/' from the final file paths + # Just in case, removing possible double '/' from the final file paths pre_file = os.path.normpath(pre_mount + "/" + _filepath).replace("//", "/") post_file = os.path.normpath(post_mount + "/" + _filepath).replace("//", "/") diff --git a/salt/modules/uptime.py b/salt/modules/uptime.py index 50e415d3c8b..a652e5e13bd 100644 --- a/salt/modules/uptime.py +++ b/salt/modules/uptime.py @@ -56,7 +56,7 @@ def create(name, **params): raise CommandExecutionError( 'request to uptime failed : {0}'.format(req.reason) ) - log.debug('[uptime] PUT request successfull') + log.debug('[uptime] PUT request successful') return req.json()['_id'] @@ -83,7 +83,7 @@ def delete(name): raise CommandExecutionError( 'request to uptime failed : {0}'.format(req.reason) ) - log.debug('[uptime] DELETE request successfull') + log.debug('[uptime] DELETE request successful') return True diff --git a/salt/modules/win_pki.py b/salt/modules/win_pki.py index 2b2fc56da04..5b046b364a4 100644 --- a/salt/modules/win_pki.py +++ b/salt/modules/win_pki.py @@ -272,9 +272,9 @@ def import_cert(name, return False if password: - cert_props = get_cert_file(name=cached_source_path, password=password) + cert_props = get_cert_file(name=cached_source_path, cert_format=cert_format, password=password) else: - cert_props = get_cert_file(name=cached_source_path) + cert_props = get_cert_file(name=cached_source_path, cert_format=cert_format) current_certs = get_certs(context=context, store=store) diff --git a/salt/modules/x509.py b/salt/modules/x509.py index 94f937823cb..acd5d2b7bd3 100644 --- a/salt/modules/x509.py +++ b/salt/modules/x509.py @@ -881,7 +881,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals represents one certificate. A dict must contain either the key ``serial_number`` with the value of the serial number to revoke, or ``certificate`` with either the PEM encoded text of the certificate, - or a path ot the certificate to revoke. + or a path to the certificate to revoke. The dict can optionally contain the ``revocation_date`` key. If this key is omitted the revocation date will be set to now. If should be a diff --git a/salt/modules/xbpspkg.py b/salt/modules/xbpspkg.py index 80c9affc6c5..6b4c9986ecd 100644 --- a/salt/modules/xbpspkg.py +++ b/salt/modules/xbpspkg.py @@ -431,7 +431,7 @@ def remove(name=None, pkgs=None, recursive=True, **kwargs): The name of the package to be deleted. recursive - Also remove dependant packages (not required elsewhere). + Also remove dependent packages (not required elsewhere). Default mode: enabled. Multiple Package Options: diff --git a/salt/modules/zabbix.py b/salt/modules/zabbix.py index 534ffed9ac9..ea99a018aac 100644 --- a/salt/modules/zabbix.py +++ b/salt/modules/zabbix.py @@ -441,7 +441,7 @@ def user_getmedia(userids=None, **connection_args): :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) - :return: List of retreived media, False on failure. + :return: List of retrieved media, False on failure. CLI Example: .. code-block:: bash diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py index 986f4a4887e..ec1798f2022 100644 --- a/salt/netapi/rest_cherrypy/app.py +++ b/salt/netapi/rest_cherrypy/app.py @@ -481,7 +481,6 @@ import json import os import signal import tarfile -import time from multiprocessing import Process, Pipe # Import third-party libs @@ -2411,7 +2410,6 @@ class WebsocketEndpoint(object): logger.error( "Error: Salt event has non UTF-8 data:\n{0}" .format(data)) - time.sleep(0.1) parent_pipe, child_pipe = Pipe() handler.pipe = parent_pipe diff --git a/salt/pillar/makostack.py b/salt/pillar/makostack.py index e1e6e8e99cf..f233dcfd215 100644 --- a/salt/pillar/makostack.py +++ b/salt/pillar/makostack.py @@ -58,7 +58,7 @@ You can also provide a list of config files: Select config files through grains|pillar|opts matching ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can also opt for a much more flexible configuration: MakoStack allows to +You can also opt for a much more flexible configuration: MakoStack allows one to select the config files for the current minion based on matching values from either grains, or pillar, or opts objects. diff --git a/salt/proxy/chronos.py b/salt/proxy/chronos.py index 57ac7def12f..3fbdc49ee89 100644 --- a/salt/proxy/chronos.py +++ b/salt/proxy/chronos.py @@ -62,7 +62,7 @@ def ping(): decode=True, ) log.debug( - 'chronos.info returned succesfully: %s', + 'chronos.info returned successfully: %s', response, ) if 'dict' in response: diff --git a/salt/proxy/cisconso.py b/salt/proxy/cisconso.py index e1e91d2951b..5b30452602b 100644 --- a/salt/proxy/cisconso.py +++ b/salt/proxy/cisconso.py @@ -270,7 +270,7 @@ def get_data(datastore, path): :type datastore: :class:`DatastoreType` (``str`` enum). :param path: The device path, a list of element names in order, - comma seperated + comma separated :type path: ``list`` of ``str`` OR ``tuple`` :return: The network configuration at that tree @@ -293,7 +293,7 @@ def set_data_value(datastore, path, data): :type datastore: :class:`DatastoreType` (``str`` enum). :param path: The device path to set the value at, - a list of element names in order, comma seperated + a list of element names in order, comma separated :type path: ``list`` of ``str`` OR ``tuple`` :param data: The new value at the given path diff --git a/salt/proxy/marathon.py b/salt/proxy/marathon.py index 1b43699c490..ae68f0106d1 100644 --- a/salt/proxy/marathon.py +++ b/salt/proxy/marathon.py @@ -62,7 +62,7 @@ def ping(): decode=True, ) log.debug( - 'marathon.info returned succesfully: %s', + 'marathon.info returned successfully: %s', response, ) if 'text' in response and response['text'].strip() == 'pong': diff --git a/salt/proxy/napalm.py b/salt/proxy/napalm.py index 4d51479ef36..8dfd7cdb3dc 100644 --- a/salt/proxy/napalm.py +++ b/salt/proxy/napalm.py @@ -245,7 +245,7 @@ def call(method, *args, **kwargs): * result (True/False): if the operation succeeded * out (object): returns the object as-is from the call * comment (string): provides more details in case the call failed - * traceback (string): complete traceback in case of exeception. Please submit an issue including this traceback + * traceback (string): complete traceback in case of exception. Please submit an issue including this traceback on the `correct driver repo`_ and make sure to read the FAQ_ .. _`correct driver repo`: https://github.com/napalm-automation/napalm/issues/new diff --git a/salt/sdb/confidant.py b/salt/sdb/confidant.py index f7275494b8c..f5d2124d29a 100644 --- a/salt/sdb/confidant.py +++ b/salt/sdb/confidant.py @@ -77,7 +77,7 @@ def get(key, profile=None): returns a dict of joined credential_pairs, credentials_metadata returns a dict of metadata relevant to the credentials mapped to the confidant service, and result returns a bool that can be used to determine if the sdb - call succeded or failed to fetch credentials from confidant (or from local + call succeeded or failed to fetch credentials from confidant (or from local cache). If result is false, the data in credentials or credentials_metadata can't be trusted. ''' diff --git a/salt/states/boto_apigateway.py b/salt/states/boto_apigateway.py index 465562e25f5..28adc200be4 100644 --- a/salt/states/boto_apigateway.py +++ b/salt/states/boto_apigateway.py @@ -116,7 +116,7 @@ def present(name, api_name, swagger_file, stage_name, api_key_required, The canconicalization of these input parameters is done in the following order: 1) lambda_funcname_format is formatted with the input parameters as passed, 2) resulting string is stripped for leading/trailing spaces, - 3) path paramter's curly braces are removed from the resource path, + 3) path parameter's curly braces are removed from the resource path, 4) consecutive spaces and forward slashes in the paths are replaced with '_' 5) consecutive '_' are replaced with '_' diff --git a/salt/states/boto_datapipeline.py b/salt/states/boto_datapipeline.py index 74133c41eba..85f97882cd7 100644 --- a/salt/states/boto_datapipeline.py +++ b/salt/states/boto_datapipeline.py @@ -322,11 +322,11 @@ def _pipeline_present_with_definition(name, expected_pipeline_objects, pipeline_objects = pipeline_definition.get('pipelineObjects') parameter_objects = pipeline_definition.get('parameterObjects') - paramater_values = pipeline_definition.get('parameterValues') + parameter_values = pipeline_definition.get('parameterValues') present = (_recursive_compare(_cleaned(pipeline_objects), _cleaned(expected_pipeline_objects)) and _recursive_compare(parameter_objects, expected_parameter_objects) and - _recursive_compare(paramater_values, expected_parameter_values)) + _recursive_compare(parameter_values, expected_parameter_values)) return present, pipeline_definition diff --git a/salt/states/boto_vpc.py b/salt/states/boto_vpc.py index f0c04bb2e71..de9f8bd6129 100644 --- a/salt/states/boto_vpc.py +++ b/salt/states/boto_vpc.py @@ -1735,10 +1735,10 @@ def delete_vpc_peering_connection(name, conn_id=None, conn_name=None, Name of the state conn_id - ID of the peering connection to delete. Exlusive with conn_name. + ID of the peering connection to delete. Exclusive with conn_name. conn_name - The name of the peering connection to delete. Exlusive with conn_id. + The name of the peering connection to delete. Exclusive with conn_id. region Region to connect to. diff --git a/salt/states/ceph.py b/salt/states/ceph.py index c79d4ac83a6..23194b19725 100644 --- a/salt/states/ceph.py +++ b/salt/states/ceph.py @@ -68,14 +68,14 @@ def quorum(name, **kwargs): - require: - sesceph: mon_running ''' - paramters = _ordereddict2dict(kwargs) - if paramters is None: - return _error(name, "Invalid paramters:%s") + parameters = _ordereddict2dict(kwargs) + if parameters is None: + return _error(name, "Invalid parameters:%s") if __opts__['test']: return _test(name, "cluster quorum") try: - cluster_quorum = __salt__['ceph.cluster_quorum'](**paramters) + cluster_quorum = __salt__['ceph.cluster_quorum'](**parameters) except (CommandExecutionError, CommandNotFoundError) as err: return _error(name, err.strerror) if cluster_quorum: diff --git a/salt/states/cisconso.py b/salt/states/cisconso.py index b23ec3b514b..eb26cfaf332 100644 --- a/salt/states/cisconso.py +++ b/salt/states/cisconso.py @@ -25,7 +25,7 @@ def value_present(name, datastore, path, config): :type datastore: :class:`DatastoreType` (``str`` enum). :param path: The device path to set the value at, - a list of element names in order, / seperated + a list of element names in order, / separated :type path: ``list``, ``str`` OR ``tuple`` :param config: The new value at the given path diff --git a/salt/states/file.py b/salt/states/file.py index 869a7ee70b3..c354579a9ef 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -1737,7 +1737,7 @@ def managed(name, .. note:: keep does not work with salt-ssh. - As a consequence of how the files are transfered to the minion, and + As a consequence of how the files are transferred to the minion, and the inability to connect back to the master with salt-ssh, salt is unable to stat the file as it exists on the fileserver and thus cannot mirror the mode on the salt-ssh minion @@ -1968,7 +1968,7 @@ def managed(name, tmp_ext Suffix for temp file created by ``check_cmd``. Useful for checkers - dependant on config file extension (e.g. the init-checkconf upstart + dependent on config file extension (e.g. the init-checkconf upstart config checker). .. code-block:: yaml diff --git a/salt/states/grafana_datasource.py b/salt/states/grafana_datasource.py index 124829d6228..a4d54ddb687 100644 --- a/salt/states/grafana_datasource.py +++ b/salt/states/grafana_datasource.py @@ -208,6 +208,8 @@ def _diff(old, new): for key in old_keys: if key == 'id' or key == 'orgId': del old[key] + elif key not in new.keys(): + del old[key] elif old[key] == new[key]: del old[key] del new[key] diff --git a/salt/states/nxos.py b/salt/states/nxos.py index 680dcda062c..dc813dfb3dd 100644 --- a/salt/states/nxos.py +++ b/salt/states/nxos.py @@ -279,7 +279,7 @@ def config_absent(name): .. note:: For certain cases extra lines could be removed based on dependencies. In this example, included after the example for config_present, the - ACLs would be removed because they depend on the existance of the + ACLs would be removed because they depend on the existence of the group. ''' diff --git a/salt/states/pkg.py b/salt/states/pkg.py index 688ed3c7a10..5a485407889 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py @@ -1233,7 +1233,7 @@ def installed( ``3010`` is the only recognized exit code, but this is subject to future refinement. The value of this param - defaults to ``True``. This paramater has no effect + defaults to ``True``. This parameter has no effect on non-Windows systems. .. versionadded:: 2016.11.0 @@ -1828,7 +1828,7 @@ def latest( for the remainder of the current boot session. For the time being, ``3010`` is the only recognized exit code, but this is subject to future refinement. The value of this param - defaults to ``True``. This paramater has no effect on + defaults to ``True``. This parameter has no effect on non-Windows systems. .. versionadded:: 2016.11.0 diff --git a/salt/states/probes.py b/salt/states/probes.py index 171cc4d55c8..18ba05b90da 100644 --- a/salt/states/probes.py +++ b/salt/states/probes.py @@ -75,7 +75,7 @@ def _default_ret(name): def _retrieve_rpm_probes(): ''' - Will retrive the probes from the network device using salt module "probes" throught NAPALM proxy. + Will retrieve the probes from the network device using salt module "probes" throught NAPALM proxy. ''' return __salt__['probes.config']() diff --git a/salt/states/rsync.py b/salt/states/rsync.py index a57f1c04f25..6bf03876d00 100644 --- a/salt/states/rsync.py +++ b/salt/states/rsync.py @@ -43,7 +43,7 @@ def __virtual__(): def _get_summary(rsync_out): ''' - Get summary from the rsync successfull output. + Get summary from the rsync successful output. ''' return "- " + "\n- ".join([elm for elm in rsync_out.split("\n\n")[-1].replace(" ", "\n").split("\n") if elm]) @@ -51,7 +51,7 @@ def _get_summary(rsync_out): def _get_changes(rsync_out): ''' - Get changes from the rsync successfull output. + Get changes from the rsync successful output. ''' copied = list() deleted = list() diff --git a/salt/states/win_lgpo.py b/salt/states/win_lgpo.py index 684624edd6c..96e6ff729d1 100644 --- a/salt/states/win_lgpo.py +++ b/salt/states/win_lgpo.py @@ -44,7 +44,7 @@ Multiple policy configuration Minimum password age: 1 Minimum password length: 14 Password must meet complexity requirements: Enabled - Store passwords using reversible encrytion: Disabled + Store passwords using reversible encryption: Disabled Configure Automatic Updates: Configure automatic updating: 4 - Auto download and schedule the intsall Scheduled install day: 7 - Every Saturday diff --git a/salt/utils/__init__.py b/salt/utils/__init__.py index 65674fd5a52..b0f22628951 100644 --- a/salt/utils/__init__.py +++ b/salt/utils/__init__.py @@ -2271,7 +2271,7 @@ def namespaced_function(function, global_dict, defaults=None, preserve_context=F Redefine (clone) a function under a different globals() namespace scope preserve_context: - Allow to keep the context taken from orignal namespace, + Allow keeping the context taken from orignal namespace, and extend it with globals() taken from new targetted namespace. ''' diff --git a/salt/utils/extend.py b/salt/utils/extend.py index c66b4d9eae2..9a5885e50f1 100644 --- a/salt/utils/extend.py +++ b/salt/utils/extend.py @@ -227,7 +227,7 @@ def run(extension=None, name=None, description=None, salt_dir=None, merge=False, :param salt_dir: The targeted Salt source directory :type salt_dir: ``str`` - :param merge: Merge with salt directory, `False` to keep seperate, `True` to merge trees. + :param merge: Merge with salt directory, `False` to keep separate, `True` to merge trees. :type merge: ``bool`` :param temp_dir: The directory for generated code, if omitted, system temp will be used diff --git a/salt/utils/network.py b/salt/utils/network.py index 1c446b0da94..aee5be16abe 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -94,7 +94,7 @@ def _generate_minion_id(): class DistinctList(list): ''' - List, which allows to append only distinct objects. + List, which allows one to append only distinct objects. Needs to work on Python 2.6, because of collections.OrderedDict only since 2.7 version. Override 'filter()' for custom filtering. ''' diff --git a/salt/utils/process.py b/salt/utils/process.py index af07af357e3..aefe8731ee3 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -278,7 +278,7 @@ class ProcessManager(object): kwargs = {} if salt.utils.is_windows(): - # Need to ensure that 'log_queue' is correctly transfered to + # Need to ensure that 'log_queue' is correctly transferred to # processes that inherit from 'MultiprocessingProcess'. if type(MultiprocessingProcess) is type(tgt) and ( issubclass(tgt, MultiprocessingProcess)): diff --git a/salt/utils/templates.py b/salt/utils/templates.py index c974e6fff6c..825baeb5286 100644 --- a/salt/utils/templates.py +++ b/salt/utils/templates.py @@ -255,7 +255,7 @@ def _get_jinja_error(trace, context=None): ): add_log = True template_path = error[0] - # if we add a log, format explicitly the exeception here + # if we add a log, format explicitly the exception here # by telling to output the macro context after the macro # error log place at the beginning if add_log: diff --git a/setup.py b/setup.py index 10fa1aa4bb0..b67efa04ee2 100755 --- a/setup.py +++ b/setup.py @@ -800,8 +800,8 @@ class InstallLib(install_lib): chmod = [] for idx, inputfile in enumerate(inp): - for executeable in executables: - if inputfile.endswith(executeable): + for executable in executables: + if inputfile.endswith(executable): chmod.append(idx) for idx in chmod: filename = out[idx] diff --git a/tests/integration/cloud/providers/test_virtualbox.py b/tests/integration/cloud/providers/test_virtualbox.py index 2dd99d1dfcc..2d31b45bd0a 100644 --- a/tests/integration/cloud/providers/test_virtualbox.py +++ b/tests/integration/cloud/providers/test_virtualbox.py @@ -468,19 +468,19 @@ class XpcomConversionTests(TestCase): for key in expected_extras: self.assertIn(key, ret_keys) - def test_extra_nonexistant_attributes(self): + def test_extra_nonexistent_attributes(self): expected_extra_dict = { - "nonexistant": "" + "nonexistent": "" } xpcom = XpcomConversionTests._mock_xpcom_object() ret = vb_xpcom_to_attribute_dict(xpcom, extra_attributes=expected_extra_dict.keys()) self.assertDictEqual(ret, expected_extra_dict) - def test_extra_nonexistant_attribute_with_default(self): - expected_extras = [("nonexistant", list)] + def test_extra_nonexistent_attribute_with_default(self): + expected_extras = [("nonexistent", list)] expected_extra_dict = { - "nonexistant": [] + "nonexistent": [] } xpcom = XpcomConversionTests._mock_xpcom_object() diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py index d7bf36e60ef..1712f80d454 100644 --- a/tests/integration/netapi/rest_tornado/test_app.py +++ b/tests/integration/netapi/rest_tornado/test_app.py @@ -90,7 +90,7 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase): self.assertEqual(response.headers['Location'], '/login') # Local client tests - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_simple_local_post(self): ''' Test a basic API of / @@ -326,7 +326,7 @@ class TestMinionSaltAPIHandler(_SaltnadoIntegrationTestCase): for minion_id, grains in six.iteritems(response_obj['return'][0]): self.assertEqual(minion_id, grains['id']) - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_get(self): response = self.fetch('/minions/minion', method='GET', @@ -410,7 +410,7 @@ class TestJobsSaltAPIHandler(_SaltnadoIntegrationTestCase): application.event_listener = saltnado.EventListener({}, self.opts) return application - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_get(self): # test with no JID self.http_client.fetch(self.get_url('/jobs'), @@ -463,7 +463,7 @@ class TestRunSaltAPIHandler(_SaltnadoIntegrationTestCase): application.event_listener = saltnado.EventListener({}, self.opts) return application - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_get(self): low = [{'client': 'local', 'tgt': '*', diff --git a/tests/integration/runners/test_jobs.py b/tests/integration/runners/test_jobs.py index 171a132d37d..d5482f16994 100644 --- a/tests/integration/runners/test_jobs.py +++ b/tests/integration/runners/test_jobs.py @@ -30,7 +30,7 @@ class ManageTest(ShellCase): self.assertEqual(ret['return'], {}) self.assertEqual(ret['out'], []) - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_list_jobs(self): ''' jobs.list_jobs diff --git a/tests/integration/shell/test_call.py b/tests/integration/shell/test_call.py index 2fb854b2eab..90fd271e8c2 100644 --- a/tests/integration/shell/test_call.py +++ b/tests/integration/shell/test_call.py @@ -140,7 +140,7 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin self.assertNotEqual(0, retcode) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') - @skipIf(True, 'to be reenabled when #23623 is merged') + @skipIf(True, 'to be re-enabled when #23623 is merged') def test_return(self): self.run_call('cmd.run "echo returnTOmaster"') jobs = [a for a in self.run_run('jobs.list_jobs')] diff --git a/tests/integration/states/test_pkg.py b/tests/integration/states/test_pkg.py index 2b92a189b3d..73731332088 100644 --- a/tests/integration/states/test_pkg.py +++ b/tests/integration/states/test_pkg.py @@ -492,7 +492,7 @@ class PkgTest(ModuleCase, SaltReturnAssertsMixin): Need to check to ensure the package has been installed after the pkg_latest_epoch sls file has been run. This needs to be broken up into - a seperate method so I can add the requires_salt_modules + a separate method so I can add the requires_salt_modules decorator to only the pkg.info_installed command. ''' # Skip test if package manager not available diff --git a/tests/integration/utils/testprogram.py b/tests/integration/utils/testprogram.py index 879d25b8724..b133705528f 100644 --- a/tests/integration/utils/testprogram.py +++ b/tests/integration/utils/testprogram.py @@ -183,7 +183,7 @@ class TestProgram(six.with_metaclass(TestProgramMeta, object)): @property def start_pid(self): - '''PID of the called script prior to deamonizing.''' + '''PID of the called script prior to daemonizing.''' return self.process.pid if self.process else None @property diff --git a/tests/unit/modules/test_localemod.py b/tests/unit/modules/test_localemod.py index 1c1b34c3a55..82b8343ea52 100644 --- a/tests/unit/modules/test_localemod.py +++ b/tests/unit/modules/test_localemod.py @@ -43,17 +43,18 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin): Test for Get the current system locale ''' with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}): - with patch.multiple(localemod, - _parse_dbus_locale=MagicMock(return_value={'LANG': 'A'}), - HAS_DBUS=True): - self.assertEqual('A', localemod.get_locale()) - localemod._parse_dbus_locale.assert_called_once_with() - - with patch.multiple(localemod, - _parse_localectl=MagicMock(return_value={'LANG': 'A'}), - HAS_DBUS=False): - self.assertEqual('A', localemod.get_locale()) - localemod._parse_localectl.assert_called_once_with() + with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}): + with patch.multiple(localemod, + _parse_dbus_locale=MagicMock(return_value={'LANG': 'A'}), + HAS_DBUS=True): + self.assertEqual('A', localemod.get_locale()) + localemod._parse_dbus_locale.assert_called_once_with() + + with patch.multiple(localemod, + _parse_localectl=MagicMock(return_value={'LANG': 'A'}), + HAS_DBUS=False): + self.assertEqual('A', localemod.get_locale()) + localemod._parse_localectl.assert_called_once_with() with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}): with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}): @@ -79,8 +80,9 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin): Test for Sets the current system locale ''' with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}): - with patch.object(localemod, '_localectl_set', return_value=True): - self.assertTrue(localemod.set_locale('l')) + with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}): + with patch.object(localemod, '_localectl_set', return_value=True): + self.assertTrue(localemod.set_locale('l')) with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}): with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}): diff --git a/tests/unit/modules/test_win_powercfg.py b/tests/unit/modules/test_win_powercfg.py index f4c42048661..b717c7dbec3 100644 --- a/tests/unit/modules/test_win_powercfg.py +++ b/tests/unit/modules/test_win_powercfg.py @@ -27,7 +27,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): def setup_loader_modules(self): return {powercfg: {'__grains__': {'osrelease': 8}}} - query_ouput = '''Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate) + query_output = '''Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate) GUID Alias: SUB_SLEEP Power Setting GUID: 29f6c1db-86da-48c5-9fdb-f2b67b1f44da (Hibernate after) GUID Alias: HIBERNATEIDLE @@ -43,7 +43,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can set the monitor timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): powercfg.set_monitor_timeout(0, "dc") @@ -58,7 +58,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can set the disk timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): powercfg.set_disk_timeout(0, "dc") @@ -73,7 +73,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can set the standby timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): powercfg.set_standby_timeout(0, "dc") @@ -88,7 +88,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can set the hibernate timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): powercfg.set_hibernate_timeout(0, "dc") @@ -103,7 +103,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the monitor timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): ret = powercfg.get_monitor_timeout() @@ -120,7 +120,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the disk timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): ret = powercfg.get_disk_timeout() @@ -137,7 +137,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the standby timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): ret = powercfg.get_standby_timeout() @@ -154,7 +154,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the hibernate timeout value ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): ret = powercfg.get_hibernate_timeout() @@ -171,7 +171,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the hibernate timeout value on windows 7 ''' mock = MagicMock() - mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput] + mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): with patch.dict(powercfg.__grains__, {'osrelease': '7'}): @@ -189,7 +189,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can set the hibernate timeout value ''' mock = MagicMock() - mock.side_effect = [self.query_ouput] + mock.side_effect = [self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): powercfg.set_hibernate_timeout(0, "dc", scheme="SCHEME_MIN") @@ -203,7 +203,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): Test to make sure we can get the hibernate timeout value with a specified scheme ''' mock = MagicMock() - mock.side_effect = [self.query_ouput] + mock.side_effect = [self.query_output] with patch.dict(powercfg.__salt__, {'cmd.run': mock}): ret = powercfg.get_hibernate_timeout(scheme="SCHEME_MIN")