mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into jinja_unicode
This commit is contained in:
commit
57dc6226a2
71 changed files with 947 additions and 575 deletions
60
.github/CODEOWNERS
vendored
Normal file
60
.github/CODEOWNERS
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
# SALTSTACK CODE OWNERS
|
||||
|
||||
# See https://help.github.com/articles/about-codeowners/
|
||||
# for more info about CODEOWNERS file
|
||||
|
||||
# Lines starting with '#' are comments.
|
||||
# Each line is a file pattern followed by one or more owners.
|
||||
|
||||
# See https://help.github.com/articles/about-codeowners/
|
||||
# for more info about the CODEOWNERS file
|
||||
|
||||
# Team Boto
|
||||
salt/**/*boto* @saltstack/team-boto
|
||||
|
||||
# Team Core
|
||||
salt/auth/ @saltstack/team-core
|
||||
salt/cache/ @saltstack/team-core
|
||||
salt/cli/ @saltstack/team-core
|
||||
salt/client/* @saltstack/team-core
|
||||
salt/config/* @saltstack/team-core
|
||||
salt/daemons/ @saltstack/team-core
|
||||
salt/pillar/ @saltstack/team-core
|
||||
salt/loader.py @saltstack/team-core
|
||||
salt/payload.py @saltstack/team-core
|
||||
salt/**/master* @saltstack/team-core
|
||||
salt/**/minion* @saltstack/team-core
|
||||
|
||||
# Team Cloud
|
||||
salt/cloud/ @saltstack/team-cloud
|
||||
salt/utils/openstack/ @saltstack/team-cloud
|
||||
salt/utils/aws.py @saltstack/team-cloud
|
||||
salt/**/*cloud* @saltstack/team-cloud
|
||||
|
||||
# Team NetAPI
|
||||
salt/cli/api.py @saltstack/team-netapi
|
||||
salt/client/netapi.py @saltstack/team-netapi
|
||||
salt/netapi/ @saltstack/team-netapi
|
||||
|
||||
# Team Network
|
||||
salt/proxy/ @saltstack/team-proxy
|
||||
|
||||
# Team SPM
|
||||
salt/cli/spm.py @saltstack/team-spm
|
||||
salt/spm/ @saltstack/team-spm
|
||||
|
||||
# Team SSH
|
||||
salt/cli/ssh.py @saltstack/team-ssh
|
||||
salt/client/ssh/ @saltstack/team-ssh
|
||||
salt/runners/ssh.py @saltstack/team-ssh
|
||||
salt/**/thin.py @saltstack/team-ssh
|
||||
|
||||
# Team State
|
||||
salt/state.py @saltstack/team-state
|
||||
|
||||
# Team Transport
|
||||
salt/transport/ @saltstack/team-transport
|
||||
salt/utils/zeromq.py @saltstack/team-transport
|
||||
|
||||
# Team Windows
|
||||
salt/**/*win* @saltstack/team-windows
|
14
.mention-bot
14
.mention-bot
|
@ -1,5 +1,17 @@
|
|||
{
|
||||
"alwaysNotifyForPaths": [
|
||||
{
|
||||
"name": "ryan-lane",
|
||||
"files": ["salt/**/*boto*.py"],
|
||||
"skipTeamPrs": false
|
||||
},
|
||||
{
|
||||
"name": "tkwilliams",
|
||||
"files": ["salt/**/*boto*.py"],
|
||||
"skipTeamPrs": false
|
||||
}
|
||||
],
|
||||
"skipTitle": "Merge forward",
|
||||
"userBlacklist": ["cvrebert", "markusgattol", "olliewalsh"]
|
||||
"userBlacklist": ["cvrebert", "markusgattol", "olliewalsh", "basepi"]
|
||||
}
|
||||
|
||||
|
|
|
@ -373,7 +373,7 @@
|
|||
# interface: eth0
|
||||
# cidr: '10.0.0.0/8'
|
||||
|
||||
# The number of seconds a mine update runs.
|
||||
# The number of minutes between mine updates.
|
||||
#mine_interval: 60
|
||||
|
||||
# Windows platforms lack posix IPC and must rely on slower TCP based inter-
|
||||
|
|
|
@ -674,7 +674,7 @@ Note these can be defined in the pillar for a minion as well.
|
|||
|
||||
Default: ``60``
|
||||
|
||||
The number of seconds a mine update runs.
|
||||
The number of minutes between mine updates.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
|
|
@ -118,3 +118,53 @@ has to be closed after every command.
|
|||
.. code-block:: yaml
|
||||
|
||||
proxy_always_alive: False
|
||||
|
||||
``proxy_merge_pillar_in_opts``
|
||||
------------------------------
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
Default: ``False``.
|
||||
|
||||
Wheter the pillar data to be merged into the proxy configuration options.
|
||||
As multiple proxies can run on the same server, we may need different
|
||||
configuration options for each, while there's one single configuration file.
|
||||
The solution is merging the pillar data of each proxy minion into the opts.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
proxy_merge_pillar_in_opts: True
|
||||
|
||||
``proxy_deep_merge_pillar_in_opts``
|
||||
-----------------------------------
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
Default: ``False``.
|
||||
|
||||
Deep merge of pillar data into configuration opts.
|
||||
This option is evaluated only when :conf_proxy:`proxy_merge_pillar_in_opts` is
|
||||
enabled.
|
||||
|
||||
``proxy_merge_pillar_in_opts_strategy``
|
||||
---------------------------------------
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
Default: ``smart``.
|
||||
|
||||
The strategy used when merging pillar configuration into opts.
|
||||
This option is evaluated only when :conf_proxy:`proxy_merge_pillar_in_opts` is
|
||||
enabled.
|
||||
|
||||
``proxy_mines_pillar``
|
||||
----------------------
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
Default: ``True``.
|
||||
|
||||
Allow enabling mine details using pillar data. This evaluates the mine
|
||||
configuration under the pillar, for the following regular minion options that
|
||||
are also equally available on the proxy minion: :conf_minion:`mine_interval`,
|
||||
and :conf_minion:`mine_functions`.
|
||||
|
|
|
@ -263,9 +263,17 @@ against that branch.
|
|||
Release Branches
|
||||
----------------
|
||||
|
||||
For each release a branch will be created when we are ready to tag. The branch will be the same name as the tag minus the v. For example, the v2017.7.1 release was created from the 2017.7.1 branch. This branching strategy will allow for more stability when there is a need for a re-tag during the testing phase of our releases.
|
||||
For each release, a branch will be created when the SaltStack release team is
|
||||
ready to tag. The release branch is created from the parent branch and will be
|
||||
the same name as the tag minus the ``v``. For example, the ``2017.7.1`` release
|
||||
branch was created from the ``2017.7`` parent branch and the ``v2017.7.1``
|
||||
release was tagged at the ``HEAD`` of the ``2017.7.1`` branch. This branching
|
||||
strategy will allow for more stability when there is a need for a re-tag during
|
||||
the testing phase of the release process.
|
||||
|
||||
Once the branch is created, the fixes required for a given release, as determined by the SaltStack release team, will be added to this branch. All commits in this branch will be merged forward into the parent branch as well.
|
||||
Once the release branch is created, the fixes required for a given release, as
|
||||
determined by the SaltStack release team, will be added to this branch. All
|
||||
commits in this branch will be merged forward into the parent branch as well.
|
||||
|
||||
Keeping Salt Forks in Sync
|
||||
==========================
|
||||
|
|
|
@ -481,11 +481,17 @@ Alternatively the ``uninstaller`` can also simply repeat the URL of the msi file
|
|||
:param bool allusers: This parameter is specific to `.msi` installations. It
|
||||
tells `msiexec` to install the software for all users. The default is True.
|
||||
|
||||
:param bool cache_dir: If true, the entire directory where the installer resides
|
||||
will be recursively cached. This is useful for installers that depend on
|
||||
other files in the same directory for installation.
|
||||
:param bool cache_dir: If true when installer URL begins with salt://, the
|
||||
entire directory where the installer resides will be recursively cached.
|
||||
This is useful for installers that depend on other files in the same
|
||||
directory for installation.
|
||||
|
||||
.. note:: Only applies to salt: installer URLs.
|
||||
:param str cache_file:
|
||||
When installer URL begins with salt://, this indicates single file to copy
|
||||
down for use with the installer. Copied to the same location as the
|
||||
installer. Use this over ``cache_dir`` if there are many files in the
|
||||
directory and you only need a specific file and don't want to cache
|
||||
additional files that may reside in the installer directory.
|
||||
|
||||
Here's an example for a software package that has dependent files:
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ if Defined x (
|
|||
if %Python%==2 (
|
||||
Set "PyDir=C:\Python27"
|
||||
) else (
|
||||
Set "PyDir=C:\Program Files\Python35"
|
||||
Set "PyDir=C:\Python35"
|
||||
)
|
||||
Set "PATH=%PATH%;%PyDir%;%PyDir%\Scripts"
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ If (Test-Path "$($ini['Settings']['Python2Dir'])\python.exe") {
|
|||
DownloadFileWithProgress $url $file
|
||||
|
||||
Write-Output " - $script_name :: Installing $($ini[$bitPrograms]['Python2']) . . ."
|
||||
$p = Start-Process msiexec -ArgumentList "/i $file /qb ADDLOCAL=DefaultFeature,SharedCRT,Extensions,pip_feature,PrependPath TARGETDIR=$($ini['Settings']['Python2Dir'])" -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process msiexec -ArgumentList "/i $file /qb ADDLOCAL=DefaultFeature,SharedCRT,Extensions,pip_feature,PrependPath TARGETDIR=`"$($ini['Settings']['Python2Dir'])`"" -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
@ -191,7 +191,7 @@ If (!($Path.ToLower().Contains("$($ini['Settings']['Scripts2Dir'])".ToLower())))
|
|||
|
||||
#==============================================================================
|
||||
# Update PIP and SetupTools
|
||||
# caching depends on environmant variable SALT_PIP_LOCAL_CACHE
|
||||
# caching depends on environment variable SALT_PIP_LOCAL_CACHE
|
||||
#==============================================================================
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
|
||||
|
@ -212,7 +212,7 @@ if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
|
|||
|
||||
#==============================================================================
|
||||
# Install pypi resources using pip
|
||||
# caching depends on environmant variable SALT_REQ_LOCAL_CACHE
|
||||
# caching depends on environment variable SALT_REQ_LOCAL_CACHE
|
||||
#==============================================================================
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
|
||||
|
@ -230,6 +230,24 @@ if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
|
|||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req_2.txt" "pip install"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Move PyWin32 DLL's to site-packages\win32
|
||||
#==============================================================================
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
Move-Item "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['SitePkgs2Dir'])\win32" -Force
|
||||
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
Remove-Item "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32"
|
||||
|
||||
# Remove pythonwin directory
|
||||
Write-Output " - $script_name :: Removing pythonwin Directory . . ."
|
||||
Remove-Item "$($ini['Settings']['SitePkgs2Dir'])\pythonwin" -Force -Recurse
|
||||
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$($ini['Settings']['Scripts2Dir'])\pywin32_*" -Force -Recurse
|
||||
|
||||
#==============================================================================
|
||||
# Install PyYAML with CLoader
|
||||
# This has to be a compiled binary to get the CLoader
|
||||
|
|
|
@ -175,7 +175,7 @@ If (Test-Path "$($ini['Settings']['Python3Dir'])\python.exe") {
|
|||
DownloadFileWithProgress $url $file
|
||||
|
||||
Write-Output " - $script_name :: Installing $($ini[$bitPrograms]['Python3']) . . ."
|
||||
$p = Start-Process $file -ArgumentList '/passive InstallAllUsers=1 TargetDir="C:\Program Files\Python35" Include_doc=0 Include_tcltk=0 Include_test=0 Include_launcher=0 PrependPath=1 Shortcuts=0' -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process $file -ArgumentList "/passive InstallAllUsers=1 TargetDir=`"$($ini['Settings']['Python3Dir'])`" Include_doc=0 Include_tcltk=0 Include_test=0 Include_launcher=0 PrependPath=1 Shortcuts=0" -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
@ -247,7 +247,7 @@ Start_Process_and_test_exitcode "$($ini['Settings']['Scripts3Dir'])\pip.exe" "i
|
|||
|
||||
# Move DLL's to Python Root
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
Move-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['Python3Dir'])" -Force
|
||||
Move-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['SitePkgs3Dir'])\win32" -Force
|
||||
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
|
@ -257,6 +257,10 @@ Remove-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32"
|
|||
Write-Output " - $script_name :: Removing pythonwin Directory . . ."
|
||||
Remove-Item "$($ini['Settings']['SitePkgs3Dir'])\pythonwin" -Force -Recurse
|
||||
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$($ini['Settings']['Scripts3Dir'])\pywin32_*" -Force -Recurse
|
||||
|
||||
#==============================================================================
|
||||
# Fix PyCrypto
|
||||
#==============================================================================
|
||||
|
|
|
@ -56,7 +56,7 @@ if %Python%==2 (
|
|||
Set "PyVerMajor=2"
|
||||
Set "PyVerMinor=7"
|
||||
) else (
|
||||
Set "PyDir=C:\Program Files\Python35"
|
||||
Set "PyDir=C:\Python35"
|
||||
Set "PyVerMajor=3"
|
||||
Set "PyVerMinor=5"
|
||||
)
|
||||
|
|
|
@ -16,9 +16,10 @@ if %errorLevel%==0 (
|
|||
)
|
||||
echo.
|
||||
|
||||
:CheckPython2
|
||||
if exist "\Python27" goto RemovePython2
|
||||
if exist "\Program Files\Python35" goto RemovePython3
|
||||
goto eof
|
||||
|
||||
goto CheckPython3
|
||||
|
||||
:RemovePython2
|
||||
rem Uninstall Python 2.7
|
||||
|
@ -47,25 +48,30 @@ goto eof
|
|||
|
||||
goto eof
|
||||
|
||||
:CheckPython3
|
||||
if exist "\Python35" goto RemovePython3
|
||||
|
||||
goto eof
|
||||
|
||||
:RemovePython3
|
||||
echo %0 :: Uninstalling Python 3 ...
|
||||
echo ---------------------------------------------------------------------
|
||||
:: 64 bit
|
||||
if exist "%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}" (
|
||||
echo %0 :: - 3.5.3 64bit
|
||||
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall
|
||||
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall /passive
|
||||
)
|
||||
|
||||
:: 32 bit
|
||||
if exist "%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}" (
|
||||
echo %0 :: - 3.5.3 32bit
|
||||
"%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}\python-3.5.3" /uninstall
|
||||
"%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}\python-3.5.3" /uninstall /passive
|
||||
)
|
||||
|
||||
rem wipe the Python directory
|
||||
echo %0 :: Removing the C:\Program Files\Python35 Directory ...
|
||||
echo %0 :: Removing the C:\Python35 Directory ...
|
||||
echo ---------------------------------------------------------------------
|
||||
rd /s /q "C:\Program Files\Python35"
|
||||
rd /s /q "C:\Python35"
|
||||
if %errorLevel%==0 (
|
||||
echo Successful
|
||||
) else (
|
||||
|
|
|
@ -44,7 +44,7 @@ ${StrStrAdv}
|
|||
!define CPUARCH "x86"
|
||||
!endif
|
||||
|
||||
; Part of the Trim function for Strings
|
||||
# Part of the Trim function for Strings
|
||||
!define Trim "!insertmacro Trim"
|
||||
!macro Trim ResultVar String
|
||||
Push "${String}"
|
||||
|
@ -61,27 +61,27 @@ ${StrStrAdv}
|
|||
!define MUI_UNICON "salt.ico"
|
||||
!define MUI_WELCOMEFINISHPAGE_BITMAP "panel.bmp"
|
||||
|
||||
; Welcome page
|
||||
# Welcome page
|
||||
!insertmacro MUI_PAGE_WELCOME
|
||||
|
||||
; License page
|
||||
# License page
|
||||
!insertmacro MUI_PAGE_LICENSE "LICENSE.txt"
|
||||
|
||||
; Configure Minion page
|
||||
# Configure Minion page
|
||||
Page custom pageMinionConfig pageMinionConfig_Leave
|
||||
|
||||
; Instfiles page
|
||||
# Instfiles page
|
||||
!insertmacro MUI_PAGE_INSTFILES
|
||||
|
||||
; Finish page (Customized)
|
||||
# Finish page (Customized)
|
||||
!define MUI_PAGE_CUSTOMFUNCTION_SHOW pageFinish_Show
|
||||
!define MUI_PAGE_CUSTOMFUNCTION_LEAVE pageFinish_Leave
|
||||
!insertmacro MUI_PAGE_FINISH
|
||||
|
||||
; Uninstaller pages
|
||||
# Uninstaller pages
|
||||
!insertmacro MUI_UNPAGE_INSTFILES
|
||||
|
||||
; Language files
|
||||
# Language files
|
||||
!insertmacro MUI_LANGUAGE "English"
|
||||
|
||||
|
||||
|
@ -201,8 +201,8 @@ ShowInstDetails show
|
|||
ShowUnInstDetails show
|
||||
|
||||
|
||||
; Check and install Visual C++ redist packages
|
||||
; See http://blogs.msdn.com/b/astebner/archive/2009/01/29/9384143.aspx for more info
|
||||
# Check and install Visual C++ redist packages
|
||||
# See http://blogs.msdn.com/b/astebner/archive/2009/01/29/9384143.aspx for more info
|
||||
Section -Prerequisites
|
||||
|
||||
Var /GLOBAL VcRedistName
|
||||
|
@ -211,12 +211,12 @@ Section -Prerequisites
|
|||
Var /Global CheckVcRedist
|
||||
StrCpy $CheckVcRedist "False"
|
||||
|
||||
; Visual C++ 2015 redist packages
|
||||
# Visual C++ 2015 redist packages
|
||||
!define PY3_VC_REDIST_NAME "VC_Redist_2015"
|
||||
!define PY3_VC_REDIST_X64_GUID "{50A2BC33-C9CD-3BF1-A8FF-53C10A0B183C}"
|
||||
!define PY3_VC_REDIST_X86_GUID "{BBF2AC74-720C-3CB3-8291-5E34039232FA}"
|
||||
|
||||
; Visual C++ 2008 SP1 MFC Security Update redist packages
|
||||
# Visual C++ 2008 SP1 MFC Security Update redist packages
|
||||
!define PY2_VC_REDIST_NAME "VC_Redist_2008_SP1_MFC"
|
||||
!define PY2_VC_REDIST_X64_GUID "{5FCE6D76-F5DC-37AB-B2B8-22AB8CEDB1D4}"
|
||||
!define PY2_VC_REDIST_X86_GUID "{9BE518E6-ECC6-35A9-88E4-87755C07200F}"
|
||||
|
@ -239,7 +239,7 @@ Section -Prerequisites
|
|||
StrCpy $VcRedistGuid ${PY2_VC_REDIST_X86_GUID}
|
||||
${EndIf}
|
||||
|
||||
; VCRedist 2008 only needed on Windows Server 2008R2/Windows 7 and below
|
||||
# VCRedist 2008 only needed on Windows Server 2008R2/Windows 7 and below
|
||||
${If} ${AtMostWin2008R2}
|
||||
StrCpy $CheckVcRedist "True"
|
||||
${EndIf}
|
||||
|
@ -255,20 +255,41 @@ Section -Prerequisites
|
|||
"$VcRedistName is currently not installed. Would you like to install?" \
|
||||
/SD IDYES IDNO endVcRedist
|
||||
|
||||
ClearErrors
|
||||
; The Correct version of VCRedist is copied over by "build_pkg.bat"
|
||||
# The Correct version of VCRedist is copied over by "build_pkg.bat"
|
||||
SetOutPath "$INSTDIR\"
|
||||
File "..\prereqs\vcredist.exe"
|
||||
; /passive used by 2015 installer
|
||||
; /qb! used by 2008 installer
|
||||
; It just ignores the unrecognized switches...
|
||||
ExecWait "$INSTDIR\vcredist.exe /qb! /passive"
|
||||
IfErrors 0 endVcRedist
|
||||
# If an output variable is specified ($0 in the case below),
|
||||
# ExecWait sets the variable with the exit code (and only sets the
|
||||
# error flag if an error occurs; if an error occurs, the contents
|
||||
# of the user variable are undefined).
|
||||
# http://nsis.sourceforge.net/Reference/ExecWait
|
||||
# /passive used by 2015 installer
|
||||
# /qb! used by 2008 installer
|
||||
# It just ignores the unrecognized switches...
|
||||
ClearErrors
|
||||
ExecWait '"$INSTDIR\vcredist.exe" /qb! /passive /norestart' $0
|
||||
IfErrors 0 CheckVcRedistErrorCode
|
||||
MessageBox MB_OK \
|
||||
"$VcRedistName failed to install. Try installing the package manually." \
|
||||
/SD IDOK
|
||||
Goto endVcRedist
|
||||
|
||||
CheckVcRedistErrorCode:
|
||||
# Check for Reboot Error Code (3010)
|
||||
${If} $0 == 3010
|
||||
MessageBox MB_OK \
|
||||
"$VcRedistName installed but requires a restart to complete." \
|
||||
/SD IDOK
|
||||
|
||||
# Check for any other errors
|
||||
${ElseIfNot} $0 == 0
|
||||
MessageBox MB_OK \
|
||||
"$VcRedistName failed with ErrorCode: $0. Try installing the package manually." \
|
||||
/SD IDOK
|
||||
${EndIf}
|
||||
|
||||
endVcRedist:
|
||||
|
||||
${EndIf}
|
||||
|
||||
${EndIf}
|
||||
|
@ -294,12 +315,12 @@ Function .onInit
|
|||
|
||||
Call parseCommandLineSwitches
|
||||
|
||||
; Check for existing installation
|
||||
# Check for existing installation
|
||||
ReadRegStr $R0 HKLM \
|
||||
"Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" \
|
||||
"UninstallString"
|
||||
StrCmp $R0 "" checkOther
|
||||
; Found existing installation, prompt to uninstall
|
||||
# Found existing installation, prompt to uninstall
|
||||
MessageBox MB_OKCANCEL|MB_ICONEXCLAMATION \
|
||||
"${PRODUCT_NAME} is already installed.$\n$\n\
|
||||
Click `OK` to remove the existing installation." \
|
||||
|
@ -307,12 +328,12 @@ Function .onInit
|
|||
Abort
|
||||
|
||||
checkOther:
|
||||
; Check for existing installation of full salt
|
||||
# Check for existing installation of full salt
|
||||
ReadRegStr $R0 HKLM \
|
||||
"Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME_OTHER}" \
|
||||
"UninstallString"
|
||||
StrCmp $R0 "" skipUninstall
|
||||
; Found existing installation, prompt to uninstall
|
||||
# Found existing installation, prompt to uninstall
|
||||
MessageBox MB_OKCANCEL|MB_ICONEXCLAMATION \
|
||||
"${PRODUCT_NAME_OTHER} is already installed.$\n$\n\
|
||||
Click `OK` to remove the existing installation." \
|
||||
|
@ -321,22 +342,22 @@ Function .onInit
|
|||
|
||||
uninst:
|
||||
|
||||
; Get current Silent status
|
||||
# Get current Silent status
|
||||
StrCpy $R0 0
|
||||
${If} ${Silent}
|
||||
StrCpy $R0 1
|
||||
${EndIf}
|
||||
|
||||
; Turn on Silent mode
|
||||
# Turn on Silent mode
|
||||
SetSilent silent
|
||||
|
||||
; Don't remove all directories
|
||||
# Don't remove all directories
|
||||
StrCpy $DeleteInstallDir 0
|
||||
|
||||
; Uninstall silently
|
||||
# Uninstall silently
|
||||
Call uninstallSalt
|
||||
|
||||
; Set it back to Normal mode, if that's what it was before
|
||||
# Set it back to Normal mode, if that's what it was before
|
||||
${If} $R0 == 0
|
||||
SetSilent normal
|
||||
${EndIf}
|
||||
|
@ -350,7 +371,7 @@ Section -Post
|
|||
|
||||
WriteUninstaller "$INSTDIR\uninst.exe"
|
||||
|
||||
; Uninstall Registry Entries
|
||||
# Uninstall Registry Entries
|
||||
WriteRegStr ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" \
|
||||
"DisplayName" "$(^Name)"
|
||||
WriteRegStr ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" \
|
||||
|
@ -366,19 +387,19 @@ Section -Post
|
|||
WriteRegStr HKLM "SYSTEM\CurrentControlSet\services\salt-minion" \
|
||||
"DependOnService" "nsi"
|
||||
|
||||
; Set the estimated size
|
||||
# Set the estimated size
|
||||
${GetSize} "$INSTDIR\bin" "/S=OK" $0 $1 $2
|
||||
IntFmt $0 "0x%08X" $0
|
||||
WriteRegDWORD ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" \
|
||||
"EstimatedSize" "$0"
|
||||
|
||||
; Commandline Registry Entries
|
||||
# Commandline Registry Entries
|
||||
WriteRegStr HKLM "${PRODUCT_CALL_REGKEY}" "" "$INSTDIR\salt-call.bat"
|
||||
WriteRegStr HKLM "${PRODUCT_CALL_REGKEY}" "Path" "$INSTDIR\bin\"
|
||||
WriteRegStr HKLM "${PRODUCT_MINION_REGKEY}" "" "$INSTDIR\salt-minion.bat"
|
||||
WriteRegStr HKLM "${PRODUCT_MINION_REGKEY}" "Path" "$INSTDIR\bin\"
|
||||
|
||||
; Register the Salt-Minion Service
|
||||
# Register the Salt-Minion Service
|
||||
nsExec::Exec "nssm.exe install salt-minion $INSTDIR\bin\python.exe -E -s $INSTDIR\bin\Scripts\salt-minion -c $INSTDIR\conf -l quiet"
|
||||
nsExec::Exec "nssm.exe set salt-minion Description Salt Minion from saltstack.com"
|
||||
nsExec::Exec "nssm.exe set salt-minion Start SERVICE_AUTO_START"
|
||||
|
@ -398,12 +419,12 @@ SectionEnd
|
|||
|
||||
Function .onInstSuccess
|
||||
|
||||
; If StartMinionDelayed is 1, then set the service to start delayed
|
||||
# If StartMinionDelayed is 1, then set the service to start delayed
|
||||
${If} $StartMinionDelayed == 1
|
||||
nsExec::Exec "nssm.exe set salt-minion Start SERVICE_DELAYED_AUTO_START"
|
||||
${EndIf}
|
||||
|
||||
; If start-minion is 1, then start the service
|
||||
# If start-minion is 1, then start the service
|
||||
${If} $StartMinion == 1
|
||||
nsExec::Exec 'net start salt-minion'
|
||||
${EndIf}
|
||||
|
@ -413,10 +434,11 @@ FunctionEnd
|
|||
|
||||
Function un.onInit
|
||||
|
||||
; Load the parameters
|
||||
# Load the parameters
|
||||
${GetParameters} $R0
|
||||
|
||||
# Uninstaller: Remove Installation Directory
|
||||
ClearErrors
|
||||
${GetOptions} $R0 "/delete-install-dir" $R1
|
||||
IfErrors delete_install_dir_not_found
|
||||
StrCpy $DeleteInstallDir 1
|
||||
|
@ -434,7 +456,7 @@ Section Uninstall
|
|||
|
||||
Call un.uninstallSalt
|
||||
|
||||
; Remove C:\salt from the Path
|
||||
# Remove C:\salt from the Path
|
||||
Push "C:\salt"
|
||||
Call un.RemoveFromPath
|
||||
|
||||
|
@ -444,27 +466,27 @@ SectionEnd
|
|||
!macro uninstallSalt un
|
||||
Function ${un}uninstallSalt
|
||||
|
||||
; Make sure we're in the right directory
|
||||
# Make sure we're in the right directory
|
||||
${If} $INSTDIR == "c:\salt\bin\Scripts"
|
||||
StrCpy $INSTDIR "C:\salt"
|
||||
${EndIf}
|
||||
|
||||
; Stop and Remove salt-minion service
|
||||
# Stop and Remove salt-minion service
|
||||
nsExec::Exec 'net stop salt-minion'
|
||||
nsExec::Exec 'sc delete salt-minion'
|
||||
|
||||
; Stop and remove the salt-master service
|
||||
# Stop and remove the salt-master service
|
||||
nsExec::Exec 'net stop salt-master'
|
||||
nsExec::Exec 'sc delete salt-master'
|
||||
|
||||
; Remove files
|
||||
# Remove files
|
||||
Delete "$INSTDIR\uninst.exe"
|
||||
Delete "$INSTDIR\nssm.exe"
|
||||
Delete "$INSTDIR\salt*"
|
||||
Delete "$INSTDIR\vcredist.exe"
|
||||
RMDir /r "$INSTDIR\bin"
|
||||
|
||||
; Remove Registry entries
|
||||
# Remove Registry entries
|
||||
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}"
|
||||
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY_OTHER}"
|
||||
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_CALL_REGKEY}"
|
||||
|
@ -474,17 +496,17 @@ Function ${un}uninstallSalt
|
|||
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_MINION_REGKEY}"
|
||||
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_RUN_REGKEY}"
|
||||
|
||||
; Automatically close when finished
|
||||
# Automatically close when finished
|
||||
SetAutoClose true
|
||||
|
||||
; Prompt to remove the Installation directory
|
||||
# Prompt to remove the Installation directory
|
||||
${IfNot} $DeleteInstallDir == 1
|
||||
MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 \
|
||||
"Would you like to completely remove $INSTDIR and all of its contents?" \
|
||||
/SD IDNO IDNO finished
|
||||
${EndIf}
|
||||
|
||||
; Make sure you're not removing Program Files
|
||||
# Make sure you're not removing Program Files
|
||||
${If} $INSTDIR != 'Program Files'
|
||||
${AndIf} $INSTDIR != 'Program Files (x86)'
|
||||
RMDir /r "$INSTDIR"
|
||||
|
@ -526,7 +548,7 @@ FunctionEnd
|
|||
|
||||
Function Trim
|
||||
|
||||
Exch $R1 ; Original string
|
||||
Exch $R1 # Original string
|
||||
Push $R2
|
||||
|
||||
Loop:
|
||||
|
@ -558,36 +580,36 @@ Function Trim
|
|||
FunctionEnd
|
||||
|
||||
|
||||
;------------------------------------------------------------------------------
|
||||
; StrStr Function
|
||||
; - find substring in a string
|
||||
;
|
||||
; Usage:
|
||||
; Push "this is some string"
|
||||
; Push "some"
|
||||
; Call StrStr
|
||||
; Pop $0 ; "some string"
|
||||
;------------------------------------------------------------------------------
|
||||
#------------------------------------------------------------------------------
|
||||
# StrStr Function
|
||||
# - find substring in a string
|
||||
#
|
||||
# Usage:
|
||||
# Push "this is some string"
|
||||
# Push "some"
|
||||
# Call StrStr
|
||||
# Pop $0 ; "some string"
|
||||
#------------------------------------------------------------------------------
|
||||
!macro StrStr un
|
||||
Function ${un}StrStr
|
||||
|
||||
Exch $R1 ; $R1=substring, stack=[old$R1,string,...]
|
||||
Exch ; stack=[string,old$R1,...]
|
||||
Exch $R2 ; $R2=string, stack=[old$R2,old$R1,...]
|
||||
Push $R3 ; $R3=strlen(substring)
|
||||
Push $R4 ; $R4=count
|
||||
Push $R5 ; $R5=tmp
|
||||
StrLen $R3 $R1 ; Get the length of the Search String
|
||||
StrCpy $R4 0 ; Set the counter to 0
|
||||
Exch $R1 # $R1=substring, stack=[old$R1,string,...]
|
||||
Exch # stack=[string,old$R1,...]
|
||||
Exch $R2 # $R2=string, stack=[old$R2,old$R1,...]
|
||||
Push $R3 # $R3=strlen(substring)
|
||||
Push $R4 # $R4=count
|
||||
Push $R5 # $R5=tmp
|
||||
StrLen $R3 $R1 # Get the length of the Search String
|
||||
StrCpy $R4 0 # Set the counter to 0
|
||||
|
||||
loop:
|
||||
StrCpy $R5 $R2 $R3 $R4 ; Create a moving window of the string that is
|
||||
; the size of the length of the search string
|
||||
StrCmp $R5 $R1 done ; Is the contents of the window the same as
|
||||
; search string, then done
|
||||
StrCmp $R5 "" done ; Is the window empty, then done
|
||||
IntOp $R4 $R4 + 1 ; Shift the windows one character
|
||||
Goto loop ; Repeat
|
||||
StrCpy $R5 $R2 $R3 $R4 # Create a moving window of the string that is
|
||||
# the size of the length of the search string
|
||||
StrCmp $R5 $R1 done # Is the contents of the window the same as
|
||||
# search string, then done
|
||||
StrCmp $R5 "" done # Is the window empty, then done
|
||||
IntOp $R4 $R4 + 1 # Shift the windows one character
|
||||
Goto loop # Repeat
|
||||
|
||||
done:
|
||||
StrCpy $R1 $R2 "" $R4
|
||||
|
@ -595,7 +617,7 @@ Function ${un}StrStr
|
|||
Pop $R4
|
||||
Pop $R3
|
||||
Pop $R2
|
||||
Exch $R1 ; $R1=old$R1, stack=[result,...]
|
||||
Exch $R1 # $R1=old$R1, stack=[result,...]
|
||||
|
||||
FunctionEnd
|
||||
!macroend
|
||||
|
@ -603,74 +625,74 @@ FunctionEnd
|
|||
!insertmacro StrStr "un."
|
||||
|
||||
|
||||
;------------------------------------------------------------------------------
|
||||
; AddToPath Function
|
||||
; - Adds item to Path for All Users
|
||||
; - Overcomes NSIS ReadRegStr limitation of 1024 characters by using Native
|
||||
; Windows Commands
|
||||
;
|
||||
; Usage:
|
||||
; Push "C:\path\to\add"
|
||||
; Call AddToPath
|
||||
;------------------------------------------------------------------------------
|
||||
#------------------------------------------------------------------------------
|
||||
# AddToPath Function
|
||||
# - Adds item to Path for All Users
|
||||
# - Overcomes NSIS ReadRegStr limitation of 1024 characters by using Native
|
||||
# Windows Commands
|
||||
#
|
||||
# Usage:
|
||||
# Push "C:\path\to\add"
|
||||
# Call AddToPath
|
||||
#------------------------------------------------------------------------------
|
||||
!define Environ 'HKLM "SYSTEM\CurrentControlSet\Control\Session Manager\Environment"'
|
||||
Function AddToPath
|
||||
|
||||
Exch $0 ; Path to add
|
||||
Push $1 ; Current Path
|
||||
Push $2 ; Results of StrStr / Length of Path + Path to Add
|
||||
Push $3 ; Handle to Reg / Length of Path
|
||||
Push $4 ; Result of Registry Call
|
||||
Exch $0 # Path to add
|
||||
Push $1 # Current Path
|
||||
Push $2 # Results of StrStr / Length of Path + Path to Add
|
||||
Push $3 # Handle to Reg / Length of Path
|
||||
Push $4 # Result of Registry Call
|
||||
|
||||
; Open a handle to the key in the registry, handle in $3, Error in $4
|
||||
# Open a handle to the key in the registry, handle in $3, Error in $4
|
||||
System::Call "advapi32::RegOpenKey(i 0x80000002, t'SYSTEM\CurrentControlSet\Control\Session Manager\Environment', *i.r3) i.r4"
|
||||
; Make sure registry handle opened successfully (returned 0)
|
||||
# Make sure registry handle opened successfully (returned 0)
|
||||
IntCmp $4 0 0 done done
|
||||
|
||||
; Load the contents of path into $1, Error Code into $4, Path length into $2
|
||||
# Load the contents of path into $1, Error Code into $4, Path length into $2
|
||||
System::Call "advapi32::RegQueryValueEx(i $3, t'PATH', i 0, i 0, t.r1, *i ${NSIS_MAX_STRLEN} r2) i.r4"
|
||||
|
||||
; Close the handle to the registry ($3)
|
||||
# Close the handle to the registry ($3)
|
||||
System::Call "advapi32::RegCloseKey(i $3)"
|
||||
|
||||
; Check for Error Code 234, Path too long for the variable
|
||||
IntCmp $4 234 0 +4 +4 ; $4 == ERROR_MORE_DATA
|
||||
# Check for Error Code 234, Path too long for the variable
|
||||
IntCmp $4 234 0 +4 +4 # $4 == ERROR_MORE_DATA
|
||||
DetailPrint "AddToPath Failed: original length $2 > ${NSIS_MAX_STRLEN}"
|
||||
MessageBox MB_OK \
|
||||
"You may add C:\salt to the %PATH% for convenience when issuing local salt commands from the command line." \
|
||||
/SD IDOK
|
||||
Goto done
|
||||
|
||||
; If no error, continue
|
||||
IntCmp $4 0 +5 ; $4 != NO_ERROR
|
||||
; Error 2 means the Key was not found
|
||||
IntCmp $4 2 +3 ; $4 != ERROR_FILE_NOT_FOUND
|
||||
# If no error, continue
|
||||
IntCmp $4 0 +5 # $4 != NO_ERROR
|
||||
# Error 2 means the Key was not found
|
||||
IntCmp $4 2 +3 # $4 != ERROR_FILE_NOT_FOUND
|
||||
DetailPrint "AddToPath: unexpected error code $4"
|
||||
Goto done
|
||||
StrCpy $1 ""
|
||||
|
||||
; Check if already in PATH
|
||||
Push "$1;" ; The string to search
|
||||
Push "$0;" ; The string to find
|
||||
# Check if already in PATH
|
||||
Push "$1;" # The string to search
|
||||
Push "$0;" # The string to find
|
||||
Call StrStr
|
||||
Pop $2 ; The result of the search
|
||||
StrCmp $2 "" 0 done ; String not found, try again with ';' at the end
|
||||
; Otherwise, it's already in the path
|
||||
Push "$1;" ; The string to search
|
||||
Push "$0\;" ; The string to find
|
||||
Pop $2 # The result of the search
|
||||
StrCmp $2 "" 0 done # String not found, try again with ';' at the end
|
||||
# Otherwise, it's already in the path
|
||||
Push "$1;" # The string to search
|
||||
Push "$0\;" # The string to find
|
||||
Call StrStr
|
||||
Pop $2 ; The result
|
||||
StrCmp $2 "" 0 done ; String not found, continue (add)
|
||||
; Otherwise, it's already in the path
|
||||
Pop $2 # The result
|
||||
StrCmp $2 "" 0 done # String not found, continue (add)
|
||||
# Otherwise, it's already in the path
|
||||
|
||||
; Prevent NSIS string overflow
|
||||
StrLen $2 $0 ; Length of path to add ($2)
|
||||
StrLen $3 $1 ; Length of current path ($3)
|
||||
IntOp $2 $2 + $3 ; Length of current path + path to add ($2)
|
||||
IntOp $2 $2 + 2 ; Account for the additional ';'
|
||||
; $2 = strlen(dir) + strlen(PATH) + sizeof(";")
|
||||
# Prevent NSIS string overflow
|
||||
StrLen $2 $0 # Length of path to add ($2)
|
||||
StrLen $3 $1 # Length of current path ($3)
|
||||
IntOp $2 $2 + $3 # Length of current path + path to add ($2)
|
||||
IntOp $2 $2 + 2 # Account for the additional ';'
|
||||
# $2 = strlen(dir) + strlen(PATH) + sizeof(";")
|
||||
|
||||
; Make sure the new length isn't over the NSIS_MAX_STRLEN
|
||||
# Make sure the new length isn't over the NSIS_MAX_STRLEN
|
||||
IntCmp $2 ${NSIS_MAX_STRLEN} +4 +4 0
|
||||
DetailPrint "AddToPath: new length $2 > ${NSIS_MAX_STRLEN}"
|
||||
MessageBox MB_OK \
|
||||
|
@ -678,18 +700,18 @@ Function AddToPath
|
|||
/SD IDOK
|
||||
Goto done
|
||||
|
||||
; Append dir to PATH
|
||||
# Append dir to PATH
|
||||
DetailPrint "Add to PATH: $0"
|
||||
StrCpy $2 $1 1 -1 ; Copy the last character of the existing path
|
||||
StrCmp $2 ";" 0 +2 ; Check for trailing ';'
|
||||
StrCpy $1 $1 -1 ; remove trailing ';'
|
||||
StrCmp $1 "" +2 ; Make sure Path is not empty
|
||||
StrCpy $0 "$1;$0" ; Append new path at the end ($0)
|
||||
StrCpy $2 $1 1 -1 # Copy the last character of the existing path
|
||||
StrCmp $2 ";" 0 +2 # Check for trailing ';'
|
||||
StrCpy $1 $1 -1 # remove trailing ';'
|
||||
StrCmp $1 "" +2 # Make sure Path is not empty
|
||||
StrCpy $0 "$1;$0" # Append new path at the end ($0)
|
||||
|
||||
; We can use the NSIS command here. Only 'ReadRegStr' is affected
|
||||
# We can use the NSIS command here. Only 'ReadRegStr' is affected
|
||||
WriteRegExpandStr ${Environ} "PATH" $0
|
||||
|
||||
; Broadcast registry change to open programs
|
||||
# Broadcast registry change to open programs
|
||||
SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=5000
|
||||
|
||||
done:
|
||||
|
@ -702,16 +724,16 @@ Function AddToPath
|
|||
FunctionEnd
|
||||
|
||||
|
||||
;------------------------------------------------------------------------------
|
||||
; RemoveFromPath Function
|
||||
; - Removes item from Path for All Users
|
||||
; - Overcomes NSIS ReadRegStr limitation of 1024 characters by using Native
|
||||
; Windows Commands
|
||||
;
|
||||
; Usage:
|
||||
; Push "C:\path\to\add"
|
||||
; Call un.RemoveFromPath
|
||||
;------------------------------------------------------------------------------
|
||||
#------------------------------------------------------------------------------
|
||||
# RemoveFromPath Function
|
||||
# - Removes item from Path for All Users
|
||||
# - Overcomes NSIS ReadRegStr limitation of 1024 characters by using Native
|
||||
# Windows Commands
|
||||
#
|
||||
# Usage:
|
||||
# Push "C:\path\to\add"
|
||||
# Call un.RemoveFromPath
|
||||
#------------------------------------------------------------------------------
|
||||
Function un.RemoveFromPath
|
||||
|
||||
Exch $0
|
||||
|
@ -722,59 +744,59 @@ Function un.RemoveFromPath
|
|||
Push $5
|
||||
Push $6
|
||||
|
||||
; Open a handle to the key in the registry, handle in $3, Error in $4
|
||||
# Open a handle to the key in the registry, handle in $3, Error in $4
|
||||
System::Call "advapi32::RegOpenKey(i 0x80000002, t'SYSTEM\CurrentControlSet\Control\Session Manager\Environment', *i.r3) i.r4"
|
||||
; Make sure registry handle opened successfully (returned 0)
|
||||
# Make sure registry handle opened successfully (returned 0)
|
||||
IntCmp $4 0 0 done done
|
||||
|
||||
; Load the contents of path into $1, Error Code into $4, Path length into $2
|
||||
# Load the contents of path into $1, Error Code into $4, Path length into $2
|
||||
System::Call "advapi32::RegQueryValueEx(i $3, t'PATH', i 0, i 0, t.r1, *i ${NSIS_MAX_STRLEN} r2) i.r4"
|
||||
|
||||
; Close the handle to the registry ($3)
|
||||
# Close the handle to the registry ($3)
|
||||
System::Call "advapi32::RegCloseKey(i $3)"
|
||||
|
||||
; Check for Error Code 234, Path too long for the variable
|
||||
IntCmp $4 234 0 +4 +4 ; $4 == ERROR_MORE_DATA
|
||||
# Check for Error Code 234, Path too long for the variable
|
||||
IntCmp $4 234 0 +4 +4 # $4 == ERROR_MORE_DATA
|
||||
DetailPrint "AddToPath: original length $2 > ${NSIS_MAX_STRLEN}"
|
||||
Goto done
|
||||
|
||||
; If no error, continue
|
||||
IntCmp $4 0 +5 ; $4 != NO_ERROR
|
||||
; Error 2 means the Key was not found
|
||||
IntCmp $4 2 +3 ; $4 != ERROR_FILE_NOT_FOUND
|
||||
# If no error, continue
|
||||
IntCmp $4 0 +5 # $4 != NO_ERROR
|
||||
# Error 2 means the Key was not found
|
||||
IntCmp $4 2 +3 # $4 != ERROR_FILE_NOT_FOUND
|
||||
DetailPrint "AddToPath: unexpected error code $4"
|
||||
Goto done
|
||||
StrCpy $1 ""
|
||||
|
||||
; Ensure there's a trailing ';'
|
||||
StrCpy $5 $1 1 -1 ; Copy the last character of the path
|
||||
StrCmp $5 ";" +2 ; Check for trailing ';', if found continue
|
||||
StrCpy $1 "$1;" ; ensure trailing ';'
|
||||
# Ensure there's a trailing ';'
|
||||
StrCpy $5 $1 1 -1 # Copy the last character of the path
|
||||
StrCmp $5 ";" +2 # Check for trailing ';', if found continue
|
||||
StrCpy $1 "$1;" # ensure trailing ';'
|
||||
|
||||
; Check for our directory inside the path
|
||||
Push $1 ; String to Search
|
||||
Push "$0;" ; Dir to Find
|
||||
# Check for our directory inside the path
|
||||
Push $1 # String to Search
|
||||
Push "$0;" # Dir to Find
|
||||
Call un.StrStr
|
||||
Pop $2 ; The results of the search
|
||||
StrCmp $2 "" done ; If results are empty, we're done, otherwise continue
|
||||
Pop $2 # The results of the search
|
||||
StrCmp $2 "" done # If results are empty, we're done, otherwise continue
|
||||
|
||||
; Remove our Directory from the Path
|
||||
# Remove our Directory from the Path
|
||||
DetailPrint "Remove from PATH: $0"
|
||||
StrLen $3 "$0;" ; Get the length of our dir ($3)
|
||||
StrLen $4 $2 ; Get the length of the return from StrStr ($4)
|
||||
StrCpy $5 $1 -$4 ; $5 is now the part before the path to remove
|
||||
StrCpy $6 $2 "" $3 ; $6 is now the part after the path to remove
|
||||
StrCpy $3 "$5$6" ; Combine $5 and $6
|
||||
StrLen $3 "$0;" # Get the length of our dir ($3)
|
||||
StrLen $4 $2 # Get the length of the return from StrStr ($4)
|
||||
StrCpy $5 $1 -$4 # $5 is now the part before the path to remove
|
||||
StrCpy $6 $2 "" $3 # $6 is now the part after the path to remove
|
||||
StrCpy $3 "$5$6" # Combine $5 and $6
|
||||
|
||||
; Check for Trailing ';'
|
||||
StrCpy $5 $3 1 -1 ; Load the last character of the string
|
||||
StrCmp $5 ";" 0 +2 ; Check for ';'
|
||||
StrCpy $3 $3 -1 ; remove trailing ';'
|
||||
# Check for Trailing ';'
|
||||
StrCpy $5 $3 1 -1 # Load the last character of the string
|
||||
StrCmp $5 ";" 0 +2 # Check for ';'
|
||||
StrCpy $3 $3 -1 # remove trailing ';'
|
||||
|
||||
; Write the new path to the registry
|
||||
# Write the new path to the registry
|
||||
WriteRegExpandStr ${Environ} "PATH" $3
|
||||
|
||||
; Broadcast the change to all open applications
|
||||
# Broadcast the change to all open applications
|
||||
SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=5000
|
||||
|
||||
done:
|
||||
|
@ -808,6 +830,7 @@ Function getMinionConfig
|
|||
confFound:
|
||||
FileOpen $0 "$INSTDIR\conf\minion" r
|
||||
|
||||
ClearErrors
|
||||
confLoop:
|
||||
FileRead $0 $1
|
||||
IfErrors EndOfFile
|
||||
|
@ -838,68 +861,69 @@ FunctionEnd
|
|||
Function updateMinionConfig
|
||||
|
||||
ClearErrors
|
||||
FileOpen $0 "$INSTDIR\conf\minion" "r" ; open target file for reading
|
||||
GetTempFileName $R0 ; get new temp file name
|
||||
FileOpen $1 $R0 "w" ; open temp file for writing
|
||||
FileOpen $0 "$INSTDIR\conf\minion" "r" # open target file for reading
|
||||
GetTempFileName $R0 # get new temp file name
|
||||
FileOpen $1 $R0 "w" # open temp file for writing
|
||||
|
||||
loop: ; loop through each line
|
||||
FileRead $0 $2 ; read line from target file
|
||||
IfErrors done ; end if errors are encountered (end of line)
|
||||
loop: # loop through each line
|
||||
FileRead $0 $2 # read line from target file
|
||||
IfErrors done # end if errors are encountered (end of line)
|
||||
|
||||
${If} $MasterHost_State != "" ; if master is empty
|
||||
${AndIf} $MasterHost_State != "salt" ; and if master is not 'salt'
|
||||
${StrLoc} $3 $2 "master:" ">" ; where is 'master:' in this line
|
||||
${If} $3 == 0 ; is it in the first...
|
||||
${OrIf} $3 == 1 ; or second position (account for comments)
|
||||
StrCpy $2 "master: $MasterHost_State$\r$\n" ; write the master
|
||||
${EndIf} ; close if statement
|
||||
${EndIf} ; close if statement
|
||||
${If} $MasterHost_State != "" # if master is empty
|
||||
${AndIf} $MasterHost_State != "salt" # and if master is not 'salt'
|
||||
${StrLoc} $3 $2 "master:" ">" # where is 'master:' in this line
|
||||
${If} $3 == 0 # is it in the first...
|
||||
${OrIf} $3 == 1 # or second position (account for comments)
|
||||
StrCpy $2 "master: $MasterHost_State$\r$\n" # write the master
|
||||
${EndIf} # close if statement
|
||||
${EndIf} # close if statement
|
||||
|
||||
${If} $MinionName_State != "" ; if minion is empty
|
||||
${AndIf} $MinionName_State != "hostname" ; and if minion is not 'hostname'
|
||||
${StrLoc} $3 $2 "id:" ">" ; where is 'id:' in this line
|
||||
${If} $3 == 0 ; is it in the first...
|
||||
${OrIf} $3 == 1 ; or the second position (account for comments)
|
||||
StrCpy $2 "id: $MinionName_State$\r$\n" ; change line
|
||||
${EndIf} ; close if statement
|
||||
${EndIf} ; close if statement
|
||||
${If} $MinionName_State != "" # if minion is empty
|
||||
${AndIf} $MinionName_State != "hostname" # and if minion is not 'hostname'
|
||||
${StrLoc} $3 $2 "id:" ">" # where is 'id:' in this line
|
||||
${If} $3 == 0 # is it in the first...
|
||||
${OrIf} $3 == 1 # or the second position (account for comments)
|
||||
StrCpy $2 "id: $MinionName_State$\r$\n" # change line
|
||||
${EndIf} # close if statement
|
||||
${EndIf} # close if statement
|
||||
|
||||
FileWrite $1 $2 ; write changed or unchanged line to temp file
|
||||
FileWrite $1 $2 # write changed or unchanged line to temp file
|
||||
Goto loop
|
||||
|
||||
done:
|
||||
FileClose $0 ; close target file
|
||||
FileClose $1 ; close temp file
|
||||
Delete "$INSTDIR\conf\minion" ; delete target file
|
||||
CopyFiles /SILENT $R0 "$INSTDIR\conf\minion" ; copy temp file to target file
|
||||
Delete $R0 ; delete temp file
|
||||
FileClose $0 # close target file
|
||||
FileClose $1 # close temp file
|
||||
Delete "$INSTDIR\conf\minion" # delete target file
|
||||
CopyFiles /SILENT $R0 "$INSTDIR\conf\minion" # copy temp file to target file
|
||||
Delete $R0 # delete temp file
|
||||
|
||||
FunctionEnd
|
||||
|
||||
|
||||
Function parseCommandLineSwitches
|
||||
|
||||
; Load the parameters
|
||||
# Load the parameters
|
||||
${GetParameters} $R0
|
||||
|
||||
; Check for start-minion switches
|
||||
; /start-service is to be deprecated, so we must check for both
|
||||
# Check for start-minion switches
|
||||
# /start-service is to be deprecated, so we must check for both
|
||||
${GetOptions} $R0 "/start-service=" $R1
|
||||
${GetOptions} $R0 "/start-minion=" $R2
|
||||
|
||||
# Service: Start Salt Minion
|
||||
${IfNot} $R2 == ""
|
||||
; If start-minion was passed something, then set it
|
||||
# If start-minion was passed something, then set it
|
||||
StrCpy $StartMinion $R2
|
||||
${ElseIfNot} $R1 == ""
|
||||
; If start-service was passed something, then set StartMinion to that
|
||||
# If start-service was passed something, then set StartMinion to that
|
||||
StrCpy $StartMinion $R1
|
||||
${Else}
|
||||
; Otherwise default to 1
|
||||
# Otherwise default to 1
|
||||
StrCpy $StartMinion 1
|
||||
${EndIf}
|
||||
|
||||
# Service: Minion Startup Type Delayed
|
||||
ClearErrors
|
||||
${GetOptions} $R0 "/start-minion-delayed" $R1
|
||||
IfErrors start_minion_delayed_not_found
|
||||
StrCpy $StartMinionDelayed 1
|
||||
|
|
|
@ -19,9 +19,9 @@ Function Get-Settings {
|
|||
"Python2Dir" = "C:\Python27"
|
||||
"Scripts2Dir" = "C:\Python27\Scripts"
|
||||
"SitePkgs2Dir" = "C:\Python27\Lib\site-packages"
|
||||
"Python3Dir" = "C:\Program Files\Python35"
|
||||
"Scripts3Dir" = "C:\Program Files\Python35\Scripts"
|
||||
"SitePkgs3Dir" = "C:\Program Files\Python35\Lib\site-packages"
|
||||
"Python3Dir" = "C:\Python35"
|
||||
"Scripts3Dir" = "C:\Python35\Scripts"
|
||||
"SitePkgs3Dir" = "C:\Python35\Lib\site-packages"
|
||||
"DownloadDir" = "$env:Temp\DevSalt"
|
||||
}
|
||||
# The script deletes the DownLoadDir (above) for each install.
|
||||
|
|
|
@ -200,7 +200,7 @@ class LoadAuth(object):
|
|||
'''
|
||||
if not self.authenticate_eauth(load):
|
||||
return {}
|
||||
fstr = '{0}.auth'.format(load['eauth'])
|
||||
|
||||
hash_type = getattr(hashlib, self.opts.get('hash_type', 'md5'))
|
||||
tok = str(hash_type(os.urandom(512)).hexdigest())
|
||||
t_path = os.path.join(self.opts['token_dir'], tok)
|
||||
|
@ -224,8 +224,9 @@ class LoadAuth(object):
|
|||
acl_ret = self.__get_acl(load)
|
||||
tdata['auth_list'] = acl_ret
|
||||
|
||||
if 'groups' in load:
|
||||
tdata['groups'] = load['groups']
|
||||
groups = self.get_groups(load)
|
||||
if groups:
|
||||
tdata['groups'] = groups
|
||||
|
||||
try:
|
||||
with salt.utils.files.set_umask(0o177):
|
||||
|
@ -345,7 +346,7 @@ class LoadAuth(object):
|
|||
return False
|
||||
return True
|
||||
|
||||
def get_auth_list(self, load):
|
||||
def get_auth_list(self, load, token=None):
|
||||
'''
|
||||
Retrieve access list for the user specified in load.
|
||||
The list is built by eauth module or from master eauth configuration.
|
||||
|
@ -353,30 +354,37 @@ class LoadAuth(object):
|
|||
list if the user has no rights to execute anything on this master and returns non-empty list
|
||||
if user is allowed to execute particular functions.
|
||||
'''
|
||||
# Get auth list from token
|
||||
if token and self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
return token['auth_list']
|
||||
# Get acl from eauth module.
|
||||
auth_list = self.__get_acl(load)
|
||||
if auth_list is not None:
|
||||
return auth_list
|
||||
|
||||
if load['eauth'] not in self.opts['external_auth']:
|
||||
eauth = token['eauth'] if token else load['eauth']
|
||||
if eauth not in self.opts['external_auth']:
|
||||
# No matching module is allowed in config
|
||||
log.warning('Authorization failure occurred.')
|
||||
return None
|
||||
|
||||
name = self.load_name(load) # The username we are attempting to auth with
|
||||
groups = self.get_groups(load) # The groups this user belongs to
|
||||
eauth_config = self.opts['external_auth'][load['eauth']]
|
||||
if groups is None or groups is False:
|
||||
if token:
|
||||
name = token['name']
|
||||
groups = token.get('groups')
|
||||
else:
|
||||
name = self.load_name(load) # The username we are attempting to auth with
|
||||
groups = self.get_groups(load) # The groups this user belongs to
|
||||
eauth_config = self.opts['external_auth'][eauth]
|
||||
if not groups:
|
||||
groups = []
|
||||
group_perm_keys = [item for item in eauth_config if item.endswith('%')] # The configured auth groups
|
||||
|
||||
# First we need to know if the user is allowed to proceed via any of their group memberships.
|
||||
group_auth_match = False
|
||||
for group_config in group_perm_keys:
|
||||
group_config = group_config.rstrip('%')
|
||||
for group in groups:
|
||||
if group == group_config:
|
||||
group_auth_match = True
|
||||
if group_config.rstrip('%') in groups:
|
||||
group_auth_match = True
|
||||
break
|
||||
# If a group_auth_match is set it means only that we have a
|
||||
# user which matches at least one or more of the groups defined
|
||||
# in the configuration file.
|
||||
|
|
|
@ -306,7 +306,7 @@ def groups(username, **kwargs):
|
|||
'''
|
||||
group_list = []
|
||||
|
||||
bind = _bind(username, kwargs['password'],
|
||||
bind = _bind(username, kwargs.get('password'),
|
||||
anonymous=_config('anonymous', mandatory=False))
|
||||
if bind:
|
||||
log.debug('ldap bind to determine group membership succeeded!')
|
||||
|
@ -371,7 +371,7 @@ def groups(username, **kwargs):
|
|||
search_results = bind.search_s(search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[_config('accountattributename'), 'cn'])
|
||||
[_config('accountattributename'), 'cn', _config('groupattribute')])
|
||||
for _, entry in search_results:
|
||||
if username in entry[_config('accountattributename')]:
|
||||
group_list.append(entry['cn'][0])
|
||||
|
|
9
salt/cache/consul.py
vendored
9
salt/cache/consul.py
vendored
|
@ -4,6 +4,8 @@ Minion data cache plugin for Consul key/value data store.
|
|||
|
||||
.. versionadded:: 2016.11.2
|
||||
|
||||
:depends: python-consul >= 0.2.0
|
||||
|
||||
It is up to the system administrator to set up and configure the Consul
|
||||
infrastructure. All is needed for this plugin is a working Consul agent
|
||||
with a read-write access to the key-value store.
|
||||
|
@ -81,8 +83,11 @@ def __virtual__():
|
|||
'verify': __opts__.get('consul.verify', True),
|
||||
}
|
||||
|
||||
global api
|
||||
api = consul.Consul(**consul_kwargs)
|
||||
try:
|
||||
global api
|
||||
api = consul.Consul(**consul_kwargs)
|
||||
except AttributeError:
|
||||
return (False, "Failed to invoke consul.Consul, please make sure you have python-consul >= 0.2.0 installed")
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ from __future__ import absolute_import
|
|||
# Import Salt libs
|
||||
import salt.spm
|
||||
import salt.utils.parsers as parsers
|
||||
from salt.utils.verify import verify_log
|
||||
from salt.utils.verify import verify_log, verify_env
|
||||
|
||||
|
||||
class SPM(parsers.SPMParser):
|
||||
|
@ -29,6 +29,10 @@ class SPM(parsers.SPMParser):
|
|||
ui = salt.spm.SPMCmdlineInterface()
|
||||
self.parse_args()
|
||||
self.setup_logfile_logger()
|
||||
v_dirs = [
|
||||
self.config['cachedir'],
|
||||
]
|
||||
verify_env(v_dirs, self.config['user'],)
|
||||
verify_log(self.config)
|
||||
client = salt.spm.SPMClient(ui, self.config)
|
||||
client.run(self.args)
|
||||
|
|
|
@ -55,7 +55,7 @@ _DFLT_LOG_DATEFMT = '%H:%M:%S'
|
|||
_DFLT_LOG_DATEFMT_LOGFILE = '%Y-%m-%d %H:%M:%S'
|
||||
_DFLT_LOG_FMT_CONSOLE = '[%(levelname)-8s] %(message)s'
|
||||
_DFLT_LOG_FMT_LOGFILE = (
|
||||
'%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s][%(process)d] %(message)s'
|
||||
'%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(levelname)-8s][%(process)d] %(message)s'
|
||||
)
|
||||
_DFLT_REFSPECS = ['+refs/heads/*:refs/remotes/origin/*', '+refs/tags/*:refs/tags/*']
|
||||
|
||||
|
@ -334,7 +334,7 @@ VALID_OPTS = {
|
|||
# Whether or not scheduled mine updates should be accompanied by a job return for the job cache
|
||||
'mine_return_job': bool,
|
||||
|
||||
# Schedule a mine update every n number of seconds
|
||||
# The number of minutes between mine updates.
|
||||
'mine_interval': int,
|
||||
|
||||
# The ipc strategy. (i.e., sockets versus tcp, etc)
|
||||
|
@ -569,6 +569,23 @@ VALID_OPTS = {
|
|||
# False in 2016.3.0
|
||||
'add_proxymodule_to_opts': bool,
|
||||
|
||||
# Merge pillar data into configuration opts.
|
||||
# As multiple proxies can run on the same server, we may need different
|
||||
# configuration options for each, while there's one single configuration file.
|
||||
# The solution is merging the pillar data of each proxy minion into the opts.
|
||||
'proxy_merge_pillar_in_opts': bool,
|
||||
|
||||
# Deep merge of pillar data into configuration opts.
|
||||
# Evaluated only when `proxy_merge_pillar_in_opts` is True.
|
||||
'proxy_deep_merge_pillar_in_opts': bool,
|
||||
|
||||
# The strategy used when merging pillar into opts.
|
||||
# Considered only when `proxy_merge_pillar_in_opts` is True.
|
||||
'proxy_merge_pillar_in_opts_strategy': str,
|
||||
|
||||
# Allow enabling mine details using pillar data.
|
||||
'proxy_mines_pillar': bool,
|
||||
|
||||
# In some particular cases, always alive proxies are not beneficial.
|
||||
# This option can be used in those less dynamic environments:
|
||||
# the user can request the connection
|
||||
|
@ -1637,6 +1654,12 @@ DEFAULT_PROXY_MINION_OPTS = {
|
|||
'append_minionid_config_dirs': ['cachedir', 'pidfile', 'default_include', 'extension_modules'],
|
||||
'default_include': 'proxy.d/*.conf',
|
||||
|
||||
'proxy_merge_pillar_in_opts': False,
|
||||
'proxy_deep_merge_pillar_in_opts': False,
|
||||
'proxy_merge_pillar_in_opts_strategy': 'smart',
|
||||
|
||||
'proxy_mines_pillar': True,
|
||||
|
||||
# By default, proxies will preserve the connection.
|
||||
# If this option is set to False,
|
||||
# the connection with the remote dumb device
|
||||
|
|
|
@ -1055,12 +1055,7 @@ class LocalFuncs(object):
|
|||
return dict(error=dict(name=err_name,
|
||||
message='Authentication failure of type "token" occurred.'))
|
||||
username = token['name']
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
load['eauth'] = token['eauth']
|
||||
load['username'] = username
|
||||
auth_list = self.loadauth.get_auth_list(load)
|
||||
auth_list = self.loadauth.get_auth_list(load, token)
|
||||
else:
|
||||
auth_type = 'eauth'
|
||||
err_name = 'EauthAuthenticationError'
|
||||
|
@ -1102,12 +1097,7 @@ class LocalFuncs(object):
|
|||
return dict(error=dict(name=err_name,
|
||||
message='Authentication failure of type "token" occurred.'))
|
||||
username = token['name']
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
load['eauth'] = token['eauth']
|
||||
load['username'] = username
|
||||
auth_list = self.loadauth.get_auth_list(load)
|
||||
auth_list = self.loadauth.get_auth_list(load, token)
|
||||
elif 'eauth' in load:
|
||||
auth_type = 'eauth'
|
||||
err_name = 'EauthAuthenticationError'
|
||||
|
@ -1217,12 +1207,7 @@ class LocalFuncs(object):
|
|||
return ''
|
||||
|
||||
# Get acl from eauth module.
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
extra['eauth'] = token['eauth']
|
||||
extra['username'] = token['name']
|
||||
auth_list = self.loadauth.get_auth_list(extra)
|
||||
auth_list = self.loadauth.get_auth_list(extra, token)
|
||||
|
||||
# Authorize the request
|
||||
if not self.ckminions.auth_check(
|
||||
|
|
|
@ -1270,10 +1270,10 @@ class RemoteClient(Client):
|
|||
hash_type = self.opts.get('hash_type', 'md5')
|
||||
ret['hsum'] = salt.utils.get_hash(path, form=hash_type)
|
||||
ret['hash_type'] = hash_type
|
||||
return ret, list(os.stat(path))
|
||||
return ret
|
||||
load = {'path': path,
|
||||
'saltenv': saltenv,
|
||||
'cmd': '_file_hash_and_stat'}
|
||||
'cmd': '_file_hash'}
|
||||
return self.channel.send(load)
|
||||
|
||||
def hash_file(self, path, saltenv='base'):
|
||||
|
@ -1282,14 +1282,33 @@ class RemoteClient(Client):
|
|||
master file server prepend the path with salt://<file on server>
|
||||
otherwise, prepend the file with / for a local file.
|
||||
'''
|
||||
return self.__hash_and_stat_file(path, saltenv)[0]
|
||||
return self.__hash_and_stat_file(path, saltenv)
|
||||
|
||||
def hash_and_stat_file(self, path, saltenv='base'):
|
||||
'''
|
||||
The same as hash_file, but also return the file's mode, or None if no
|
||||
mode data is present.
|
||||
'''
|
||||
return self.__hash_and_stat_file(path, saltenv)
|
||||
hash_result = self.hash_file(path, saltenv)
|
||||
try:
|
||||
path = self._check_proto(path)
|
||||
except MinionError as err:
|
||||
if not os.path.isfile(path):
|
||||
return hash_result, None
|
||||
else:
|
||||
try:
|
||||
return hash_result, list(os.stat(path))
|
||||
except Exception:
|
||||
return hash_result, None
|
||||
load = {'path': path,
|
||||
'saltenv': saltenv,
|
||||
'cmd': '_file_find'}
|
||||
fnd = self.channel.send(load)
|
||||
try:
|
||||
stat_result = fnd.get('stat')
|
||||
except AttributeError:
|
||||
stat_result = None
|
||||
return hash_result, stat_result
|
||||
|
||||
def list_env(self, saltenv='base'):
|
||||
'''
|
||||
|
|
|
@ -17,6 +17,7 @@ metadata server set `metadata_server_grains: True`.
|
|||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
|
||||
|
@ -47,14 +48,28 @@ def _search(prefix="latest/"):
|
|||
Recursively look up all grains in the metadata server
|
||||
'''
|
||||
ret = {}
|
||||
for line in http.query(os.path.join(HOST, prefix))['body'].split('\n'):
|
||||
linedata = http.query(os.path.join(HOST, prefix))
|
||||
if 'body' not in linedata:
|
||||
return ret
|
||||
for line in linedata['body'].split('\n'):
|
||||
if line.endswith('/'):
|
||||
ret[line[:-1]] = _search(prefix=os.path.join(prefix, line))
|
||||
elif prefix == 'latest/':
|
||||
# (gtmanfred) The first level should have a forward slash since
|
||||
# they have stuff underneath. This will not be doubled up though,
|
||||
# because lines ending with a slash are checked first.
|
||||
ret[line] = _search(prefix=os.path.join(prefix, line + '/'))
|
||||
elif '=' in line:
|
||||
key, value = line.split('=')
|
||||
ret[value] = _search(prefix=os.path.join(prefix, key))
|
||||
else:
|
||||
ret[line] = http.query(os.path.join(HOST, prefix, line))['body']
|
||||
retdata = http.query(os.path.join(HOST, prefix, line)).get('body', None)
|
||||
# (gtmanfred) This try except block is slightly faster than
|
||||
# checking if the string starts with a curly brace
|
||||
try:
|
||||
ret[line] = json.loads(retdata)
|
||||
except ValueError:
|
||||
ret[line] = retdata
|
||||
return ret
|
||||
|
||||
|
||||
|
|
37
salt/key.py
37
salt/key.py
|
@ -489,7 +489,7 @@ class Key(object):
|
|||
minions = []
|
||||
for key, val in six.iteritems(keys):
|
||||
minions.extend(val)
|
||||
if not self.opts.get('preserve_minion_cache', False) or not preserve_minions:
|
||||
if not self.opts.get('preserve_minion_cache', False):
|
||||
m_cache = os.path.join(self.opts['cachedir'], self.ACC)
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
|
@ -736,7 +736,7 @@ class Key(object):
|
|||
def delete_key(self,
|
||||
match=None,
|
||||
match_dict=None,
|
||||
preserve_minions=False,
|
||||
preserve_minions=None,
|
||||
revoke_auth=False):
|
||||
'''
|
||||
Delete public keys. If "match" is passed, it is evaluated as a glob.
|
||||
|
@ -774,11 +774,10 @@ class Key(object):
|
|||
salt.utils.event.tagify(prefix='key'))
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if preserve_minions:
|
||||
preserve_minions_list = matches.get('minions', [])
|
||||
if self.opts.get('preserve_minions') is True:
|
||||
self.check_minion_cache(preserve_minions=matches.get('minions', []))
|
||||
else:
|
||||
preserve_minions_list = []
|
||||
self.check_minion_cache(preserve_minions=preserve_minions_list)
|
||||
self.check_minion_cache()
|
||||
if self.opts.get('rotate_aes_key'):
|
||||
salt.crypt.dropfile(self.opts['cachedir'], self.opts['user'])
|
||||
return (
|
||||
|
@ -969,16 +968,17 @@ class RaetKey(Key):
|
|||
minions.extend(val)
|
||||
|
||||
m_cache = os.path.join(self.opts['cachedir'], 'minions')
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
if minion not in minions:
|
||||
shutil.rmtree(os.path.join(m_cache, minion))
|
||||
cache = salt.cache.factory(self.opts)
|
||||
clist = cache.list(self.ACC)
|
||||
if clist:
|
||||
for minion in clist:
|
||||
if not self.opts.get('preserve_minion_cache', False):
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
if minion not in minions and minion not in preserve_minions:
|
||||
cache.flush('{0}/{1}'.format(self.ACC, minion))
|
||||
shutil.rmtree(os.path.join(m_cache, minion))
|
||||
cache = salt.cache.factory(self.opts)
|
||||
clist = cache.list(self.ACC)
|
||||
if clist:
|
||||
for minion in clist:
|
||||
if minion not in minions and minion not in preserve_minions:
|
||||
cache.flush('{0}/{1}'.format(self.ACC, minion))
|
||||
|
||||
kind = self.opts.get('__role', '') # application kind
|
||||
if kind not in salt.utils.kinds.APPL_KINDS:
|
||||
|
@ -1220,7 +1220,7 @@ class RaetKey(Key):
|
|||
def delete_key(self,
|
||||
match=None,
|
||||
match_dict=None,
|
||||
preserve_minions=False,
|
||||
preserve_minions=None,
|
||||
revoke_auth=False):
|
||||
'''
|
||||
Delete public keys. If "match" is passed, it is evaluated as a glob.
|
||||
|
@ -1251,7 +1251,10 @@ class RaetKey(Key):
|
|||
os.remove(os.path.join(self.opts['pki_dir'], status, key))
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
self.check_minion_cache(preserve_minions=matches.get('minions', []))
|
||||
if self.opts.get('preserve_minions') is True:
|
||||
self.check_minion_cache(preserve_minions=matches.get('minions', []))
|
||||
else:
|
||||
self.check_minion_cache()
|
||||
return (
|
||||
self.name_match(match) if match is not None
|
||||
else self.dict_match(matches)
|
||||
|
|
|
@ -270,7 +270,7 @@ def raw_mod(opts, name, functions, mod='modules'):
|
|||
testmod['test.ping']()
|
||||
'''
|
||||
loader = LazyLoader(
|
||||
_module_dirs(opts, mod, 'rawmodule'),
|
||||
_module_dirs(opts, mod, 'module'),
|
||||
opts,
|
||||
tag='rawmodule',
|
||||
virtual_enable=False,
|
||||
|
|
|
@ -1705,12 +1705,7 @@ class ClearFuncs(object):
|
|||
message='Authentication failure of type "token" occurred.'))
|
||||
|
||||
# Authorize
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
clear_load['eauth'] = token['eauth']
|
||||
clear_load['username'] = token['name']
|
||||
auth_list = self.loadauth.get_auth_list(clear_load)
|
||||
auth_list = self.loadauth.get_auth_list(clear_load, token)
|
||||
|
||||
if not self.ckminions.runner_check(auth_list, clear_load['fun']):
|
||||
return dict(error=dict(name='TokenAuthenticationError',
|
||||
|
@ -1774,12 +1769,7 @@ class ClearFuncs(object):
|
|||
message='Authentication failure of type "token" occurred.'))
|
||||
|
||||
# Authorize
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
clear_load['eauth'] = token['eauth']
|
||||
clear_load['username'] = token['name']
|
||||
auth_list = self.loadauth.get_auth_list(clear_load)
|
||||
auth_list = self.loadauth.get_auth_list(clear_load, token)
|
||||
if not self.ckminions.wheel_check(auth_list, clear_load['fun']):
|
||||
return dict(error=dict(name='TokenAuthenticationError',
|
||||
message=('Authentication failure of type "token" occurred for '
|
||||
|
@ -1900,12 +1890,7 @@ class ClearFuncs(object):
|
|||
return ''
|
||||
|
||||
# Get acl
|
||||
if self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
auth_list = token['auth_list']
|
||||
else:
|
||||
extra['eauth'] = token['eauth']
|
||||
extra['username'] = token['name']
|
||||
auth_list = self.loadauth.get_auth_list(extra)
|
||||
auth_list = self.loadauth.get_auth_list(extra, token)
|
||||
|
||||
# Authorize the request
|
||||
if not self.ckminions.auth_check(
|
||||
|
|
|
@ -100,6 +100,7 @@ import salt.defaults.exitcodes
|
|||
import salt.cli.daemons
|
||||
import salt.log.setup
|
||||
|
||||
import salt.utils.dictupdate
|
||||
from salt.config import DEFAULT_MINION_OPTS
|
||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||
from salt.executors import FUNCTION_EXECUTORS
|
||||
|
@ -3118,6 +3119,26 @@ class ProxyMinion(Minion):
|
|||
if 'proxy' not in self.opts:
|
||||
self.opts['proxy'] = self.opts['pillar']['proxy']
|
||||
|
||||
if self.opts.get('proxy_merge_pillar_in_opts'):
|
||||
# Override proxy opts with pillar data when the user required.
|
||||
self.opts = salt.utils.dictupdate.merge(self.opts,
|
||||
self.opts['pillar'],
|
||||
strategy=self.opts.get('proxy_merge_pillar_in_opts_strategy'),
|
||||
merge_lists=self.opts.get('proxy_deep_merge_pillar_in_opts', False))
|
||||
elif self.opts.get('proxy_mines_pillar'):
|
||||
# Even when not required, some details such as mine configuration
|
||||
# should be merged anyway whenever possible.
|
||||
if 'mine_interval' in self.opts['pillar']:
|
||||
self.opts['mine_interval'] = self.opts['pillar']['mine_interval']
|
||||
if 'mine_functions' in self.opts['pillar']:
|
||||
general_proxy_mines = self.opts.get('mine_functions', [])
|
||||
specific_proxy_mines = self.opts['pillar']['mine_functions']
|
||||
try:
|
||||
self.opts['mine_functions'] = general_proxy_mines + specific_proxy_mines
|
||||
except TypeError as terr:
|
||||
log.error('Unable to merge mine functions from the pillar in the opts, for proxy {}'.format(
|
||||
self.opts['id']))
|
||||
|
||||
fq_proxyname = self.opts['proxy']['proxytype']
|
||||
|
||||
# Need to load the modules so they get all the dunder variables
|
||||
|
|
|
@ -12,6 +12,7 @@ import logging
|
|||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.path
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
@ -241,4 +242,4 @@ def _read_link(name):
|
|||
Throws an OSError if the link does not exist
|
||||
'''
|
||||
alt_link_path = '/etc/alternatives/{0}'.format(name)
|
||||
return os.readlink(alt_link_path)
|
||||
return salt.utils.path.readlink(alt_link_path)
|
||||
|
|
|
@ -446,11 +446,15 @@ def config(name, config, edit=True):
|
|||
salt '*' apache.config /etc/httpd/conf.d/ports.conf config="[{'Listen': '22'}]"
|
||||
'''
|
||||
|
||||
configs = []
|
||||
for entry in config:
|
||||
key = next(six.iterkeys(entry))
|
||||
configs = _parse_config(entry[key], key)
|
||||
if edit:
|
||||
with salt.utils.fopen(name, 'w') as configfile:
|
||||
configfile.write('# This file is managed by Salt.\n')
|
||||
configfile.write(configs)
|
||||
return configs
|
||||
configs.append(_parse_config(entry[key], key))
|
||||
|
||||
# Python auto-correct line endings
|
||||
configstext = "\n".join(configs)
|
||||
if edit:
|
||||
with salt.utils.fopen(name, 'w') as configfile:
|
||||
configfile.write('# This file is managed by Salt.\n')
|
||||
configfile.write(configstext)
|
||||
return configstext
|
||||
|
|
|
@ -17,10 +17,10 @@ except ImportError:
|
|||
from pipes import quote as _cmd_quote
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.yast
|
||||
import salt.utils.preseed
|
||||
import salt.utils.kickstart
|
||||
import salt.utils.validate.path
|
||||
import salt.syspaths
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
|
@ -403,6 +403,11 @@ def _bootstrap_deb(
|
|||
log.error('Required tool debootstrap is not installed.')
|
||||
return False
|
||||
|
||||
if static_qemu and not salt.utils.validate.path.is_executable(static_qemu):
|
||||
log.error('Required tool qemu not '
|
||||
'present/readable at: {0}'.format(static_qemu))
|
||||
return False
|
||||
|
||||
if isinstance(pkgs, (list, tuple)):
|
||||
pkgs = ','.join(pkgs)
|
||||
if isinstance(exclude_pkgs, (list, tuple)):
|
||||
|
@ -427,11 +432,13 @@ def _bootstrap_deb(
|
|||
|
||||
__salt__['cmd.run'](deb_args, python_shell=False)
|
||||
|
||||
__salt__['cmd.run'](
|
||||
'cp {qemu} {root}/usr/bin/'.format(
|
||||
qemu=_cmd_quote(static_qemu), root=_cmd_quote(root)
|
||||
if static_qemu:
|
||||
__salt__['cmd.run'](
|
||||
'cp {qemu} {root}/usr/bin/'.format(
|
||||
qemu=_cmd_quote(static_qemu), root=_cmd_quote(root)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
env = {'DEBIAN_FRONTEND': 'noninteractive',
|
||||
'DEBCONF_NONINTERACTIVE_SEEN': 'true',
|
||||
'LC_ALL': 'C',
|
||||
|
|
|
@ -31,7 +31,7 @@ def __virtual__():
|
|||
if __grains__['kernel'] in ('Linux', 'OpenBSD', 'NetBSD'):
|
||||
return __virtualname__
|
||||
return (False, 'The groupadd execution module cannot be loaded: '
|
||||
' only available on Linux, OpenBSD and NetBSD')
|
||||
' only available on Linux, OpenBSD and NetBSD')
|
||||
|
||||
|
||||
def add(name, gid=None, system=False, root=None):
|
||||
|
@ -44,12 +44,12 @@ def add(name, gid=None, system=False, root=None):
|
|||
|
||||
salt '*' group.add foo 3456
|
||||
'''
|
||||
cmd = 'groupadd '
|
||||
cmd = ['groupadd']
|
||||
if gid:
|
||||
cmd += '-g {0} '.format(gid)
|
||||
cmd.append('-g {0}'.format(gid))
|
||||
if system and __grains__['kernel'] != 'OpenBSD':
|
||||
cmd += '-r '
|
||||
cmd += name
|
||||
cmd.append('-r')
|
||||
cmd.append(name)
|
||||
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
|
@ -69,7 +69,7 @@ def delete(name, root=None):
|
|||
|
||||
salt '*' group.delete foo
|
||||
'''
|
||||
cmd = ('groupdel', name)
|
||||
cmd = ['groupdel', name]
|
||||
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
|
@ -140,7 +140,7 @@ def chgid(name, gid, root=None):
|
|||
pre_gid = __salt__['file.group_to_gid'](name)
|
||||
if gid == pre_gid:
|
||||
return True
|
||||
cmd = ('groupmod', '-g', gid, name)
|
||||
cmd = ['groupmod', '-g', gid, name]
|
||||
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
|
@ -170,15 +170,15 @@ def adduser(name, username, root=None):
|
|||
|
||||
if __grains__['kernel'] == 'Linux':
|
||||
if on_redhat_5:
|
||||
cmd = ('gpasswd', '-a', username, name)
|
||||
cmd = ['gpasswd', '-a', username, name]
|
||||
elif on_suse_11:
|
||||
cmd = ('usermod', '-A', name, username)
|
||||
cmd = ['usermod', '-A', name, username]
|
||||
else:
|
||||
cmd = ('gpasswd', '--add', username, name)
|
||||
cmd = ['gpasswd', '--add', username, name]
|
||||
if root is not None:
|
||||
cmd.extend(('-Q', root))
|
||||
else:
|
||||
cmd = ('usermod', '-G', name, username)
|
||||
cmd = ['usermod', '-G', name, username]
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
|
||||
|
@ -208,20 +208,20 @@ def deluser(name, username, root=None):
|
|||
if username in grp_info['members']:
|
||||
if __grains__['kernel'] == 'Linux':
|
||||
if on_redhat_5:
|
||||
cmd = ('gpasswd', '-d', username, name)
|
||||
cmd = ['gpasswd', '-d', username, name]
|
||||
elif on_suse_11:
|
||||
cmd = ('usermod', '-R', name, username)
|
||||
cmd = ['usermod', '-R', name, username]
|
||||
else:
|
||||
cmd = ('gpasswd', '--del', username, name)
|
||||
cmd = ['gpasswd', '--del', username, name]
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
|
||||
elif __grains__['kernel'] == 'OpenBSD':
|
||||
out = __salt__['cmd.run_stdout']('id -Gn {0}'.format(username),
|
||||
python_shell=False)
|
||||
cmd = 'usermod -S '
|
||||
cmd += ','.join([g for g in out.split() if g != str(name)])
|
||||
cmd += ' {0}'.format(username)
|
||||
cmd = ['usermod', '-S']
|
||||
cmd.append(','.join([g for g in out.split() if g != str(name)]))
|
||||
cmd.append('{0}'.format(username))
|
||||
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
|
||||
else:
|
||||
log.error('group.deluser is not yet supported on this platform')
|
||||
|
@ -249,13 +249,13 @@ def members(name, members_list, root=None):
|
|||
|
||||
if __grains__['kernel'] == 'Linux':
|
||||
if on_redhat_5:
|
||||
cmd = ('gpasswd', '-M', members_list, name)
|
||||
cmd = ['gpasswd', '-M', members_list, name]
|
||||
elif on_suse_11:
|
||||
for old_member in __salt__['group.info'](name).get('members'):
|
||||
__salt__['cmd.run']('groupmod -R {0} {1}'.format(old_member, name), python_shell=False)
|
||||
cmd = ('groupmod', '-A', members_list, name)
|
||||
cmd = ['groupmod', '-A', members_list, name]
|
||||
else:
|
||||
cmd = ('gpasswd', '--members', members_list, name)
|
||||
cmd = ['gpasswd', '--members', members_list, name]
|
||||
if root is not None:
|
||||
cmd.extend(('-R', root))
|
||||
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
|
||||
|
@ -270,7 +270,7 @@ def members(name, members_list, root=None):
|
|||
for user in members_list.split(","):
|
||||
if user:
|
||||
retcode = __salt__['cmd.retcode'](
|
||||
'usermod -G {0} {1}'.format(name, user),
|
||||
['usermod', '-G', name, user],
|
||||
python_shell=False)
|
||||
if not retcode == 0:
|
||||
break
|
||||
|
|
|
@ -318,17 +318,18 @@ class _Section(OrderedDict):
|
|||
yield '{0}[{1}]{0}'.format(os.linesep, self.name)
|
||||
sections_dict = OrderedDict()
|
||||
for name, value in six.iteritems(self):
|
||||
# Handle Comment Lines
|
||||
if com_regx.match(name):
|
||||
yield '{0}{1}'.format(value, os.linesep)
|
||||
# Handle Sections
|
||||
elif isinstance(value, _Section):
|
||||
sections_dict.update({name: value})
|
||||
# Key / Value pairs
|
||||
# Adds spaces between the separator
|
||||
else:
|
||||
yield '{0}{1}{2}{3}'.format(
|
||||
name,
|
||||
(
|
||||
' {0} '.format(self.sep) if self.sep != ' '
|
||||
else self.sep
|
||||
),
|
||||
' {0} '.format(self.sep) if self.sep != ' ' else self.sep,
|
||||
value,
|
||||
os.linesep
|
||||
)
|
||||
|
|
|
@ -1455,6 +1455,8 @@ def _parser():
|
|||
add_arg('--or-mark', dest='or-mark', action='append')
|
||||
add_arg('--xor-mark', dest='xor-mark', action='append')
|
||||
add_arg('--set-mark', dest='set-mark', action='append')
|
||||
add_arg('--nfmask', dest='nfmask', action='append')
|
||||
add_arg('--ctmask', dest='ctmask', action='append')
|
||||
## CONNSECMARK
|
||||
add_arg('--save', dest='save', action='append')
|
||||
add_arg('--restore', dest='restore', action='append')
|
||||
|
|
|
@ -11,7 +11,6 @@ import logging
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
from salt.utils import which as _which
|
||||
from salt.exceptions import CommandNotFoundError, CommandExecutionError
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -1114,12 +1113,12 @@ def is_fuse_exec(cmd):
|
|||
|
||||
salt '*' mount.is_fuse_exec sshfs
|
||||
'''
|
||||
cmd_path = _which(cmd)
|
||||
cmd_path = salt.utils.which(cmd)
|
||||
|
||||
# No point in running ldd on a command that doesn't exist
|
||||
if not cmd_path:
|
||||
return False
|
||||
elif not _which('ldd'):
|
||||
elif not salt.utils.which('ldd'):
|
||||
raise CommandNotFoundError('ldd')
|
||||
|
||||
out = __salt__['cmd.run']('ldd {0}'.format(cmd_path), python_shell=False)
|
||||
|
|
|
@ -18,6 +18,8 @@ Module to provide redis functionality to Salt
|
|||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from salt.ext.six.moves import zip
|
||||
from salt.ext import six
|
||||
from datetime import datetime
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
|
@ -513,8 +515,14 @@ def lastsave(host=None, port=None, db=None, password=None):
|
|||
|
||||
salt '*' redis.lastsave
|
||||
'''
|
||||
# Use of %s to get the timestamp is not supported by Python. The reason it
|
||||
# works is because it's passed to the system strftime which may not support
|
||||
# it. See: https://stackoverflow.com/a/11743262
|
||||
server = _connect(host, port, db, password)
|
||||
return int(server.lastsave().strftime("%s"))
|
||||
if six.PY2:
|
||||
return int((server.lastsave() - datetime(1970, 1, 1)).total_seconds())
|
||||
else:
|
||||
return int(server.lastsave().timestamp())
|
||||
|
||||
|
||||
def llen(key, host=None, port=None, db=None, password=None):
|
||||
|
|
|
@ -168,7 +168,10 @@ def has_settable_hwclock():
|
|||
salt '*' system.has_settable_hwclock
|
||||
'''
|
||||
if salt.utils.which_bin(['hwclock']) is not None:
|
||||
res = __salt__['cmd.run_all'](['hwclock', '--test', '--systohc'], python_shell=False)
|
||||
res = __salt__['cmd.run_all'](
|
||||
['hwclock', '--test', '--systohc'], python_shell=False,
|
||||
output_loglevel='quiet', ignore_retcode=True
|
||||
)
|
||||
return res['retcode'] == 0
|
||||
return False
|
||||
|
||||
|
|
|
@ -199,12 +199,10 @@ def create(path,
|
|||
for entry in extra_search_dir:
|
||||
cmd.append('--extra-search-dir={0}'.format(entry))
|
||||
if never_download is True:
|
||||
if virtualenv_version_info >= (1, 10):
|
||||
if virtualenv_version_info >= (1, 10) and virtualenv_version_info < (14, 0, 0):
|
||||
log.info(
|
||||
'The virtualenv \'--never-download\' option has been '
|
||||
'deprecated in virtualenv(>=1.10), as such, the '
|
||||
'\'never_download\' option to `virtualenv.create()` has '
|
||||
'also been deprecated and it\'s not necessary anymore.'
|
||||
'--never-download was deprecated in 1.10.0, but reimplemented in 14.0.0. '
|
||||
'If this feature is needed, please install a supported virtualenv version.'
|
||||
)
|
||||
else:
|
||||
cmd.append('--never-download')
|
||||
|
|
|
@ -983,18 +983,6 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
# Version 1.2.3 will apply to packages foo and bar
|
||||
salt '*' pkg.install foo,bar version=1.2.3
|
||||
|
||||
cache_file (str):
|
||||
A single file to copy down for use with the installer. Copied to the
|
||||
same location as the installer. Use this over ``cache_dir`` if there
|
||||
are many files in the directory and you only need a specific file
|
||||
and don't want to cache additional files that may reside in the
|
||||
installer directory. Only applies to files on ``salt://``
|
||||
|
||||
cache_dir (bool):
|
||||
True will copy the contents of the installer directory. This is
|
||||
useful for installations that are not a single file. Only applies to
|
||||
directories on ``salt://``
|
||||
|
||||
extra_install_flags (str):
|
||||
Additional install flags that will be appended to the
|
||||
``install_flags`` defined in the software definition file. Only
|
||||
|
@ -1286,7 +1274,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
if use_msiexec:
|
||||
cmd = msiexec
|
||||
arguments = ['/i', cached_pkg]
|
||||
if pkginfo['version_num'].get('allusers', True):
|
||||
if pkginfo[version_num].get('allusers', True):
|
||||
arguments.append('ALLUSERS="1"')
|
||||
arguments.extend(salt.utils.shlex_split(install_flags))
|
||||
else:
|
||||
|
|
|
@ -857,8 +857,8 @@ def list_repo_pkgs(*args, **kwargs):
|
|||
_parse_output(out['stdout'], strict=True)
|
||||
else:
|
||||
for repo in repos:
|
||||
cmd = [_yum(), '--quiet', 'repository-packages', repo,
|
||||
'list', '--showduplicates']
|
||||
cmd = [_yum(), '--quiet', '--showduplicates',
|
||||
'repository-packages', repo, 'list']
|
||||
if cacheonly:
|
||||
cmd.append('-C')
|
||||
# Can't concatenate because args is a tuple, using list.extend()
|
||||
|
@ -2723,7 +2723,7 @@ def _parse_repo_file(filename):
|
|||
|
||||
for section in parsed._sections:
|
||||
section_dict = dict(parsed._sections[section])
|
||||
section_dict.pop('__name__')
|
||||
section_dict.pop('__name__', None)
|
||||
config[section] = section_dict
|
||||
|
||||
# Try to extract leading comments
|
||||
|
|
|
@ -20,7 +20,7 @@ try:
|
|||
except ImportError as exc:
|
||||
cpy_error = exc
|
||||
|
||||
__virtualname__ = os.path.abspath(__file__).rsplit('/')[-2] or 'rest_cherrypy'
|
||||
__virtualname__ = os.path.abspath(__file__).rsplit(os.sep)[-2] or 'rest_cherrypy'
|
||||
|
||||
logger = logging.getLogger(__virtualname__)
|
||||
cpy_min = '3.2.2'
|
||||
|
|
|
@ -10,7 +10,7 @@ import os
|
|||
import salt.auth
|
||||
from salt.utils.versions import StrictVersion as _StrictVersion
|
||||
|
||||
__virtualname__ = os.path.abspath(__file__).rsplit('/')[-2] or 'rest_tornado'
|
||||
__virtualname__ = os.path.abspath(__file__).rsplit(os.sep)[-2] or 'rest_tornado'
|
||||
|
||||
logger = logging.getLogger(__virtualname__)
|
||||
|
||||
|
|
|
@ -405,20 +405,19 @@ class Pillar(object):
|
|||
self.opts['pillarenv'], ', '.join(self.opts['file_roots'])
|
||||
)
|
||||
else:
|
||||
tops[self.opts['pillarenv']] = [
|
||||
compile_template(
|
||||
self.client.cache_file(
|
||||
self.opts['state_top'],
|
||||
self.opts['pillarenv']
|
||||
),
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
self.opts['renderer_blacklist'],
|
||||
self.opts['renderer_whitelist'],
|
||||
self.opts['pillarenv'],
|
||||
_pillar_rend=True,
|
||||
)
|
||||
]
|
||||
top = self.client.cache_file(self.opts['state_top'], self.opts['pillarenv'])
|
||||
if top:
|
||||
tops[self.opts['pillarenv']] = [
|
||||
compile_template(
|
||||
top,
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
self.opts['renderer_blacklist'],
|
||||
self.opts['renderer_whitelist'],
|
||||
self.opts['pillarenv'],
|
||||
_pillar_rend=True,
|
||||
)
|
||||
]
|
||||
else:
|
||||
for saltenv in self._get_envs():
|
||||
if self.opts.get('pillar_source_merging_strategy', None) == "none":
|
||||
|
|
|
@ -391,7 +391,6 @@ def clean_old_jobs():
|
|||
Clean out the old jobs from the job cache
|
||||
'''
|
||||
if __opts__['keep_jobs'] != 0:
|
||||
cur = time.time()
|
||||
jid_root = _job_dir()
|
||||
|
||||
if not os.path.exists(jid_root):
|
||||
|
@ -421,7 +420,7 @@ def clean_old_jobs():
|
|||
shutil.rmtree(t_path)
|
||||
elif os.path.isfile(jid_file):
|
||||
jid_ctime = os.stat(jid_file).st_ctime
|
||||
hours_difference = (cur - jid_ctime) / 3600.0
|
||||
hours_difference = (time.time()- jid_ctime) / 3600.0
|
||||
if hours_difference > __opts__['keep_jobs'] and os.path.exists(t_path):
|
||||
# Remove the entire t_path from the original JID dir
|
||||
shutil.rmtree(t_path)
|
||||
|
@ -435,7 +434,7 @@ def clean_old_jobs():
|
|||
# Checking the time again prevents a possible race condition where
|
||||
# t_path JID dirs were created, but not yet populated by a jid file.
|
||||
t_path_ctime = os.stat(t_path).st_ctime
|
||||
hours_difference = (cur - t_path_ctime) / 3600.0
|
||||
hours_difference = (time.time() - t_path_ctime) / 3600.0
|
||||
if hours_difference > __opts__['keep_jobs']:
|
||||
shutil.rmtree(t_path)
|
||||
|
||||
|
|
|
@ -14,9 +14,12 @@ import shutil
|
|||
import msgpack
|
||||
import hashlib
|
||||
import logging
|
||||
import pwd
|
||||
import grp
|
||||
import sys
|
||||
try:
|
||||
import pwd
|
||||
import grp
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Import Salt libs
|
||||
import salt.client
|
||||
|
@ -491,10 +494,18 @@ class SPMClient(object):
|
|||
|
||||
# No defaults for this in config.py; default to the current running
|
||||
# user and group
|
||||
uid = self.opts.get('spm_uid', os.getuid())
|
||||
gid = self.opts.get('spm_gid', os.getgid())
|
||||
uname = pwd.getpwuid(uid)[0]
|
||||
gname = grp.getgrgid(gid)[0]
|
||||
import salt.utils
|
||||
if salt.utils.is_windows():
|
||||
import salt.utils.win_functions
|
||||
uname = gname = salt.utils.win_functions.get_current_user()
|
||||
uname_sid = salt.utils.win_functions.get_sid_from_name(uname)
|
||||
uid = self.opts.get('spm_uid', uname_sid)
|
||||
gid = self.opts.get('spm_gid', uname_sid)
|
||||
else:
|
||||
uid = self.opts.get('spm_uid', os.getuid())
|
||||
gid = self.opts.get('spm_gid', os.getgid())
|
||||
uname = pwd.getpwuid(uid)[0]
|
||||
gname = grp.getgrgid(gid)[0]
|
||||
|
||||
# Second pass: install the files
|
||||
for member in pkg_files:
|
||||
|
@ -710,7 +721,7 @@ class SPMClient(object):
|
|||
raise SPMInvocationError('A path to a directory must be specified')
|
||||
|
||||
if args[1] == '.':
|
||||
repo_path = os.environ['PWD']
|
||||
repo_path = os.getcwdu()
|
||||
else:
|
||||
repo_path = args[1]
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ import salt.utils.dictupdate
|
|||
import salt.utils.event
|
||||
import salt.utils.url
|
||||
import salt.utils.process
|
||||
import salt.utils.files
|
||||
import salt.syspaths as syspaths
|
||||
from salt.utils import immutabletypes
|
||||
from salt.template import compile_template, compile_template_str
|
||||
|
@ -146,6 +147,13 @@ def _gen_tag(low):
|
|||
return '{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}'.format(low)
|
||||
|
||||
|
||||
def _clean_tag(tag):
|
||||
'''
|
||||
Make tag name safe for filenames
|
||||
'''
|
||||
return salt.utils.files.safe_filename_leaf(tag)
|
||||
|
||||
|
||||
def _l_tag(name, id_):
|
||||
low = {'name': 'listen_{0}'.format(name),
|
||||
'__id__': 'listen_{0}'.format(id_),
|
||||
|
@ -1695,7 +1703,7 @@ class State(object):
|
|||
trb)
|
||||
}
|
||||
troot = os.path.join(self.opts['cachedir'], self.jid)
|
||||
tfile = os.path.join(troot, tag)
|
||||
tfile = os.path.join(troot, _clean_tag(tag))
|
||||
if not os.path.isdir(troot):
|
||||
try:
|
||||
os.makedirs(troot)
|
||||
|
@ -2047,7 +2055,7 @@ class State(object):
|
|||
proc = running[tag].get('proc')
|
||||
if proc:
|
||||
if not proc.is_alive():
|
||||
ret_cache = os.path.join(self.opts['cachedir'], self.jid, tag)
|
||||
ret_cache = os.path.join(self.opts['cachedir'], self.jid, _clean_tag(tag))
|
||||
if not os.path.isfile(ret_cache):
|
||||
ret = {'result': False,
|
||||
'comment': 'Parallel process failed to return',
|
||||
|
@ -3116,7 +3124,7 @@ class BaseHighState(object):
|
|||
Returns:
|
||||
{'saltenv': ['state1', 'state2', ...]}
|
||||
'''
|
||||
matches = {}
|
||||
matches = DefaultOrderedDict(OrderedDict)
|
||||
# pylint: disable=cell-var-from-loop
|
||||
for saltenv, body in six.iteritems(top):
|
||||
if self.opts['environment']:
|
||||
|
|
|
@ -116,7 +116,7 @@ entry on the minion already contains a numeric value, then using the ``random``
|
|||
keyword will not modify it.
|
||||
|
||||
Added the opportunity to set a job with a special keyword like '@reboot' or
|
||||
'@hourly'.
|
||||
'@hourly'. Quotes must be used, otherwise PyYAML will strip the '@' sign.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -303,7 +303,8 @@ def present(name,
|
|||
edits. This defaults to the state id
|
||||
|
||||
special
|
||||
A special keyword to specify periodicity (eg. @reboot, @hourly...)
|
||||
A special keyword to specify periodicity (eg. @reboot, @hourly...).
|
||||
Quotes must be used, otherwise PyYAML will strip the '@' sign.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
|
@ -389,7 +390,8 @@ def absent(name,
|
|||
edits. This defaults to the state id
|
||||
|
||||
special
|
||||
The special keyword used in the job (eg. @reboot, @hourly...)
|
||||
The special keyword used in the job (eg. @reboot, @hourly...).
|
||||
Quotes must be used, otherwise PyYAML will strip the '@' sign.
|
||||
'''
|
||||
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
|
||||
### cannot be removed from the function definition, otherwise the use
|
||||
|
|
|
@ -1553,7 +1553,7 @@ def managed(name,
|
|||
the salt master and potentially run through a templating system.
|
||||
|
||||
name
|
||||
The location of the file to manage
|
||||
The location of the file to manage, as an absolute path.
|
||||
|
||||
source
|
||||
The source file to download to the minion, this source file can be
|
||||
|
@ -1723,13 +1723,15 @@ def managed(name,
|
|||
|
||||
group
|
||||
The group ownership set for the file, this defaults to the group salt
|
||||
is running as on the minion On Windows, this is ignored
|
||||
is running as on the minion. On Windows, this is ignored
|
||||
|
||||
mode
|
||||
The permissions to set on this file, e.g. ``644``, ``0775``, or ``4664``.
|
||||
The permissions to set on this file, e.g. ``644``, ``0775``, or
|
||||
``4664``.
|
||||
|
||||
The default mode for new files and directories corresponds umask of salt
|
||||
process. For existing files and directories it's not enforced.
|
||||
The default mode for new files and directories corresponds to the
|
||||
umask of the salt process. The mode of existing files and directories
|
||||
will only be changed if ``mode`` is specified.
|
||||
|
||||
.. note::
|
||||
This option is **not** supported on Windows.
|
||||
|
@ -2558,7 +2560,7 @@ def directory(name,
|
|||
Ensure that a named directory is present and has the right perms
|
||||
|
||||
name
|
||||
The location to create or manage a directory
|
||||
The location to create or manage a directory, as an absolute path
|
||||
|
||||
user
|
||||
The user to own the directory; this defaults to the user salt is
|
||||
|
|
|
@ -15,6 +15,8 @@ DEVICE="{{name}}"
|
|||
{%endif%}{% if onparent %}ONPARENT={{onparent}}
|
||||
{%endif%}{% if ipv4_failure_fatal %}IPV4_FAILURE_FATAL="{{ipv4_failure_fatal}}"
|
||||
{%endif%}{% if ipaddr %}IPADDR="{{ipaddr}}"
|
||||
{%endif%}{% if ipaddr_start %}IPADDR_START="{{ipaddr_start}}"
|
||||
{%endif%}{% if ipaddr_end %}IPADDR_END="{{ipaddr_end}}"
|
||||
{%endif%}{% if netmask %}NETMASK="{{netmask}}"
|
||||
{%endif%}{% if prefix %}PREFIX="{{prefix}}"
|
||||
{%endif%}{% if gateway %}GATEWAY="{{gateway}}"
|
||||
|
|
|
@ -2098,7 +2098,7 @@ def is_true(value=None):
|
|||
pass
|
||||
|
||||
# Now check for truthiness
|
||||
if isinstance(value, (int, float)):
|
||||
if isinstance(value, (six.integer_types, float)):
|
||||
return value > 0
|
||||
elif isinstance(value, six.string_types):
|
||||
return str(value).lower() == 'true'
|
||||
|
@ -2874,7 +2874,7 @@ def repack_dictlist(data,
|
|||
if val_cb is None:
|
||||
val_cb = lambda x, y: y
|
||||
|
||||
valid_non_dict = (six.string_types, int, float)
|
||||
valid_non_dict = (six.string_types, six.integer_types, float)
|
||||
if isinstance(data, list):
|
||||
for element in data:
|
||||
if isinstance(element, valid_non_dict):
|
||||
|
|
|
@ -392,7 +392,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
|
|||
service_url = prov_dict.get('service_url', 'amazonaws.com')
|
||||
|
||||
if not location:
|
||||
location = get_location(opts, provider)
|
||||
location = get_location(opts, prov_dict)
|
||||
|
||||
if endpoint is None:
|
||||
if not requesturl:
|
||||
|
|
|
@ -293,12 +293,14 @@ def salt_config_to_yaml(configuration, line_break='\n'):
|
|||
Dumper=SafeOrderedDumper)
|
||||
|
||||
|
||||
def bootstrap(vm_, opts):
|
||||
def bootstrap(vm_, opts=None):
|
||||
'''
|
||||
This is the primary entry point for logging into any system (POSIX or
|
||||
Windows) to install Salt. It will make the decision on its own as to which
|
||||
deploy function to call.
|
||||
'''
|
||||
if opts is None:
|
||||
opts = __opts__
|
||||
deploy_config = salt.config.get_cloud_config_value(
|
||||
'deploy',
|
||||
vm_, opts, default=False)
|
||||
|
|
|
@ -38,7 +38,6 @@ from salt.utils.versions import LooseVersion as _LooseVersion
|
|||
# Import third party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
VALID_PROVIDERS = ('pygit2', 'gitpython')
|
||||
# Optional per-remote params that can only be used on a per-remote basis, and
|
||||
# thus do not have defaults in salt/config.py.
|
||||
PER_REMOTE_ONLY = ('name',)
|
||||
|
@ -164,7 +163,7 @@ class GitProvider(object):
|
|||
directly.
|
||||
|
||||
self.provider should be set in the sub-class' __init__ function before
|
||||
invoking GitProvider.__init__().
|
||||
invoking the parent class' __init__.
|
||||
'''
|
||||
def __init__(self, opts, remote, per_remote_defaults, per_remote_only,
|
||||
override_params, cache_root, role='gitfs'):
|
||||
|
@ -857,8 +856,10 @@ class GitPython(GitProvider):
|
|||
def __init__(self, opts, remote, per_remote_defaults, per_remote_only,
|
||||
override_params, cache_root, role='gitfs'):
|
||||
self.provider = 'gitpython'
|
||||
GitProvider.__init__(self, opts, remote, per_remote_defaults,
|
||||
per_remote_only, override_params, cache_root, role)
|
||||
super(GitPython, self).__init__(
|
||||
opts, remote, per_remote_defaults, per_remote_only,
|
||||
override_params, cache_root, role
|
||||
)
|
||||
|
||||
def add_refspecs(self, *refspecs):
|
||||
'''
|
||||
|
@ -1192,8 +1193,10 @@ class Pygit2(GitProvider):
|
|||
def __init__(self, opts, remote, per_remote_defaults, per_remote_only,
|
||||
override_params, cache_root, role='gitfs'):
|
||||
self.provider = 'pygit2'
|
||||
GitProvider.__init__(self, opts, remote, per_remote_defaults,
|
||||
per_remote_only, override_params, cache_root, role)
|
||||
super(Pygit2, self).__init__(
|
||||
opts, remote, per_remote_defaults, per_remote_only,
|
||||
override_params, cache_root, role
|
||||
)
|
||||
|
||||
def add_refspecs(self, *refspecs):
|
||||
'''
|
||||
|
@ -1877,11 +1880,17 @@ class Pygit2(GitProvider):
|
|||
fp_.write(blob.data)
|
||||
|
||||
|
||||
GIT_PROVIDERS = {
|
||||
'pygit2': Pygit2,
|
||||
'gitpython': GitPython,
|
||||
}
|
||||
|
||||
|
||||
class GitBase(object):
|
||||
'''
|
||||
Base class for gitfs/git_pillar
|
||||
'''
|
||||
def __init__(self, opts, valid_providers=VALID_PROVIDERS, cache_root=None):
|
||||
def __init__(self, opts, git_providers=None, cache_root=None):
|
||||
'''
|
||||
IMPORTANT: If specifying a cache_root, understand that this is also
|
||||
where the remotes will be cloned. A non-default cache_root is only
|
||||
|
@ -1889,8 +1898,9 @@ class GitBase(object):
|
|||
out into the winrepo locations and not within the cachedir.
|
||||
'''
|
||||
self.opts = opts
|
||||
self.valid_providers = valid_providers
|
||||
self.get_provider()
|
||||
self.git_providers = git_providers if git_providers is not None \
|
||||
else GIT_PROVIDERS
|
||||
self.verify_provider()
|
||||
if cache_root is not None:
|
||||
self.cache_root = self.remote_root = cache_root
|
||||
else:
|
||||
|
@ -1948,7 +1958,7 @@ class GitBase(object):
|
|||
|
||||
self.remotes = []
|
||||
for remote in remotes:
|
||||
repo_obj = self.provider_class(
|
||||
repo_obj = self.git_providers[self.provider](
|
||||
self.opts,
|
||||
remote,
|
||||
per_remote_defaults,
|
||||
|
@ -2202,7 +2212,7 @@ class GitBase(object):
|
|||
# Hash file won't exist if no files have yet been served up
|
||||
pass
|
||||
|
||||
def get_provider(self):
|
||||
def verify_provider(self):
|
||||
'''
|
||||
Determine which provider to use
|
||||
'''
|
||||
|
@ -2223,12 +2233,12 @@ class GitBase(object):
|
|||
# Should only happen if someone does something silly like
|
||||
# set the provider to a numeric value.
|
||||
desired_provider = str(desired_provider).lower()
|
||||
if desired_provider not in self.valid_providers:
|
||||
if desired_provider not in self.git_providers:
|
||||
log.critical(
|
||||
'Invalid {0}_provider \'{1}\'. Valid choices are: {2}'
|
||||
.format(self.role,
|
||||
desired_provider,
|
||||
', '.join(self.valid_providers))
|
||||
', '.join(self.git_providers))
|
||||
)
|
||||
failhard(self.role)
|
||||
elif desired_provider == 'pygit2' and self.verify_pygit2():
|
||||
|
@ -2241,17 +2251,13 @@ class GitBase(object):
|
|||
.format(self.role)
|
||||
)
|
||||
failhard(self.role)
|
||||
if self.provider == 'pygit2':
|
||||
self.provider_class = Pygit2
|
||||
elif self.provider == 'gitpython':
|
||||
self.provider_class = GitPython
|
||||
|
||||
def verify_gitpython(self, quiet=False):
|
||||
'''
|
||||
Check if GitPython is available and at a compatible version (>= 0.3.0)
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_PYGIT2 and 'pygit2' in self.valid_providers:
|
||||
if HAS_PYGIT2 and 'pygit2' in self.git_providers:
|
||||
log.error(_RECOMMEND_PYGIT2.format(self.role))
|
||||
|
||||
if not HAS_GITPYTHON:
|
||||
|
@ -2262,7 +2268,7 @@ class GitBase(object):
|
|||
)
|
||||
_recommend()
|
||||
return False
|
||||
elif 'gitpython' not in self.valid_providers:
|
||||
elif 'gitpython' not in self.git_providers:
|
||||
return False
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
@ -2302,7 +2308,7 @@ class GitBase(object):
|
|||
Pygit2 must be at least 0.20.3 and libgit2 must be at least 0.20.0.
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_GITPYTHON and 'gitpython' in self.valid_providers:
|
||||
if HAS_GITPYTHON and 'gitpython' in self.git_providers:
|
||||
log.error(_RECOMMEND_GITPYTHON.format(self.role))
|
||||
|
||||
if not HAS_PYGIT2:
|
||||
|
@ -2313,7 +2319,7 @@ class GitBase(object):
|
|||
)
|
||||
_recommend()
|
||||
return False
|
||||
elif 'pygit2' not in self.valid_providers:
|
||||
elif 'pygit2' not in self.git_providers:
|
||||
return False
|
||||
|
||||
# pylint: disable=no-member
|
||||
|
@ -2432,7 +2438,7 @@ class GitFS(GitBase):
|
|||
'''
|
||||
def __init__(self, opts):
|
||||
self.role = 'gitfs'
|
||||
GitBase.__init__(self, opts)
|
||||
super(GitFS, self).__init__(opts)
|
||||
|
||||
def dir_list(self, load):
|
||||
'''
|
||||
|
@ -2735,7 +2741,7 @@ class GitPillar(GitBase):
|
|||
'''
|
||||
def __init__(self, opts):
|
||||
self.role = 'git_pillar'
|
||||
GitBase.__init__(self, opts)
|
||||
super(GitPillar, self).__init__(opts)
|
||||
|
||||
def checkout(self):
|
||||
'''
|
||||
|
@ -2837,7 +2843,7 @@ class WinRepo(GitBase):
|
|||
'''
|
||||
def __init__(self, opts, winrepo_dir):
|
||||
self.role = 'winrepo'
|
||||
GitBase.__init__(self, opts, cache_root=winrepo_dir)
|
||||
super(WinRepo, self).__init__(opts, cache_root=winrepo_dir)
|
||||
|
||||
def checkout(self):
|
||||
'''
|
||||
|
|
|
@ -2359,6 +2359,16 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
|
|||
'Default: %default.')
|
||||
)
|
||||
|
||||
self.add_option(
|
||||
'--preserve-minions',
|
||||
default=False,
|
||||
help=('Setting this to True prevents the master from deleting '
|
||||
'the minion cache when keys are deleted, this may have '
|
||||
'security implications if compromised minions auth with '
|
||||
'a previous deleted minion ID. '
|
||||
'Default: %default.')
|
||||
)
|
||||
|
||||
key_options_group = optparse.OptionGroup(
|
||||
self, 'Key Generation Options'
|
||||
)
|
||||
|
@ -2458,6 +2468,13 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
|
|||
elif self.options.rotate_aes_key.lower() == 'false':
|
||||
self.options.rotate_aes_key = False
|
||||
|
||||
def process_preserve_minions(self):
|
||||
if hasattr(self.options, 'preserve_minions') and isinstance(self.options.preserve_minions, str):
|
||||
if self.options.preserve_minions.lower() == 'true':
|
||||
self.options.preserve_minions = True
|
||||
elif self.options.preserve_minions.lower() == 'false':
|
||||
self.options.preserve_minions = False
|
||||
|
||||
def process_list(self):
|
||||
# Filter accepted list arguments as soon as possible
|
||||
if not self.options.list:
|
||||
|
|
|
@ -842,7 +842,8 @@ class Schedule(object):
|
|||
if argspec.keywords:
|
||||
# this function accepts **kwargs, pack in the publish data
|
||||
for key, val in six.iteritems(ret):
|
||||
kwargs['__pub_{0}'.format(key)] = copy.deepcopy(val)
|
||||
if key is not 'kwargs':
|
||||
kwargs['__pub_{0}'.format(key)] = copy.deepcopy(val)
|
||||
|
||||
ret['return'] = self.functions[func](*args, **kwargs)
|
||||
|
||||
|
|
|
@ -64,3 +64,14 @@ def is_readable(path):
|
|||
|
||||
# The path does not exist
|
||||
return False
|
||||
|
||||
|
||||
def is_executable(path):
|
||||
'''
|
||||
Check if a given path is executable by the current user.
|
||||
|
||||
:param path: The path to check
|
||||
:returns: True or False
|
||||
'''
|
||||
|
||||
return os.access(path, os.X_OK)
|
||||
|
|
|
@ -2,42 +2,39 @@
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.cloud.clouds import ec2
|
||||
from salt.exceptions import SaltCloudSystemExit
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, PropertyMock
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class EC2TestCase(TestCase, LoaderModuleMockMixin):
|
||||
class EC2TestCase(TestCase):
|
||||
'''
|
||||
Unit TestCase for salt.cloud.clouds.ec2 module.
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {ec2: {}}
|
||||
|
||||
def test__validate_key_path_and_mode(self):
|
||||
with tempfile.NamedTemporaryFile() as f:
|
||||
key_file = f.name
|
||||
|
||||
os.chmod(key_file, 0o644)
|
||||
self.assertRaises(SaltCloudSystemExit,
|
||||
ec2._validate_key_path_and_mode,
|
||||
key_file)
|
||||
os.chmod(key_file, 0o600)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode(key_file))
|
||||
os.chmod(key_file, 0o400)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode(key_file))
|
||||
# Key file exists
|
||||
with patch('os.path.exists', return_value=True):
|
||||
with patch('os.stat') as patched_stat:
|
||||
|
||||
# tmp file removed
|
||||
self.assertRaises(SaltCloudSystemExit,
|
||||
ec2._validate_key_path_and_mode,
|
||||
key_file)
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o644)
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
|
||||
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o600)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
|
||||
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o400)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
|
||||
|
||||
# Key file does not exist
|
||||
with patch('os.path.exists', return_value=False):
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
|
||||
|
|
|
@ -66,30 +66,28 @@ class AlternativesTestCase(TestCase, LoaderModuleMockMixin):
|
|||
)
|
||||
|
||||
def test_show_current(self):
|
||||
with patch('os.readlink') as os_readlink_mock:
|
||||
os_readlink_mock.return_value = '/etc/alternatives/salt'
|
||||
mock = MagicMock(return_value='/etc/alternatives/salt')
|
||||
with patch('salt.utils.path.readlink', mock):
|
||||
ret = alternatives.show_current('better-world')
|
||||
self.assertEqual('/etc/alternatives/salt', ret)
|
||||
os_readlink_mock.assert_called_once_with(
|
||||
'/etc/alternatives/better-world'
|
||||
)
|
||||
mock.assert_called_once_with('/etc/alternatives/better-world')
|
||||
|
||||
with TestsLoggingHandler() as handler:
|
||||
os_readlink_mock.side_effect = OSError('Hell was not found!!!')
|
||||
mock.side_effect = OSError('Hell was not found!!!')
|
||||
self.assertFalse(alternatives.show_current('hell'))
|
||||
os_readlink_mock.assert_called_with('/etc/alternatives/hell')
|
||||
mock.assert_called_with('/etc/alternatives/hell')
|
||||
self.assertIn('ERROR:alternative: hell does not exist',
|
||||
handler.messages)
|
||||
|
||||
def test_check_installed(self):
|
||||
with patch('os.readlink') as os_readlink_mock:
|
||||
os_readlink_mock.return_value = '/etc/alternatives/salt'
|
||||
mock = MagicMock(return_value='/etc/alternatives/salt')
|
||||
with patch('salt.utils.path.readlink', mock):
|
||||
self.assertTrue(
|
||||
alternatives.check_installed(
|
||||
'better-world', '/etc/alternatives/salt'
|
||||
)
|
||||
)
|
||||
os_readlink_mock.return_value = False
|
||||
mock.return_value = False
|
||||
self.assertFalse(
|
||||
alternatives.check_installed(
|
||||
'help', '/etc/alternatives/salt'
|
||||
|
|
|
@ -36,7 +36,8 @@ class ChefTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Test if it execute a chef client run and return a dict
|
||||
'''
|
||||
self.assertDictEqual(chef.client(), {})
|
||||
with patch.dict(chef.__opts__, {'cachedir': r'c:\salt\var\cache\salt\minion'}):
|
||||
self.assertDictEqual(chef.client(), {})
|
||||
|
||||
# 'solo' function tests: 1
|
||||
|
||||
|
@ -44,4 +45,5 @@ class ChefTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Test if it execute a chef solo run and return a dict
|
||||
'''
|
||||
self.assertDictEqual(chef.solo('/dev/sda1'), {})
|
||||
with patch.dict(chef.__opts__, {'cachedir': r'c:\salt\var\cache\salt\minion'}):
|
||||
self.assertDictEqual(chef.solo('/dev/sda1'), {})
|
||||
|
|
|
@ -65,7 +65,8 @@ class TestGemModule(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(gem.__salt__,
|
||||
{'rvm.is_installed': MagicMock(return_value=False),
|
||||
'rbenv.is_installed': MagicMock(return_value=True),
|
||||
'rbenv.do': mock}):
|
||||
'rbenv.do': mock}),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
gem._gem(['install', 'rails'])
|
||||
mock.assert_called_once_with(
|
||||
['gem', 'install', 'rails'],
|
||||
|
|
|
@ -94,9 +94,11 @@ class GenesisTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'cmd.run': MagicMock(),
|
||||
'disk.blkid': MagicMock(return_value={})}):
|
||||
with patch('salt.modules.genesis.salt.utils.which', return_value=True):
|
||||
param_set['params'].update(common_parms)
|
||||
self.assertEqual(genesis.bootstrap(**param_set['params']), None)
|
||||
genesis.__salt__['cmd.run'].assert_any_call(param_set['cmd'], python_shell=False)
|
||||
with patch('salt.modules.genesis.salt.utils.validate.path.is_executable',
|
||||
return_value=True):
|
||||
param_set['params'].update(common_parms)
|
||||
self.assertEqual(genesis.bootstrap(**param_set['params']), None)
|
||||
genesis.__salt__['cmd.run'].assert_any_call(param_set['cmd'], python_shell=False)
|
||||
|
||||
with patch.object(genesis, '_bootstrap_pacman', return_value='A') as pacman_patch:
|
||||
with patch.dict(genesis.__salt__, {'mount.umount': MagicMock(),
|
||||
|
|
|
@ -118,16 +118,16 @@ class GroupAddTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
os_version_list = [
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'RedHat', 'osmajorrelease': '5'},
|
||||
'cmd': ('gpasswd', '-a', 'root', 'test')},
|
||||
'cmd': ['gpasswd', '-a', 'root', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'Suse', 'osmajorrelease': '11'},
|
||||
'cmd': ('usermod', '-A', 'test', 'root')},
|
||||
'cmd': ['usermod', '-A', 'test', 'root']},
|
||||
|
||||
{'grains': {'kernel': 'Linux'},
|
||||
'cmd': ('gpasswd', '--add', 'root', 'test')},
|
||||
'cmd': ['gpasswd', '--add', 'root', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'OTHERKERNEL'},
|
||||
'cmd': ('usermod', '-G', 'test', 'root')},
|
||||
'cmd': ['usermod', '-G', 'test', 'root']},
|
||||
]
|
||||
|
||||
for os_version in os_version_list:
|
||||
|
@ -145,16 +145,16 @@ class GroupAddTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
os_version_list = [
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'RedHat', 'osmajorrelease': '5'},
|
||||
'cmd': ('gpasswd', '-d', 'root', 'test')},
|
||||
'cmd': ['gpasswd', '-d', 'root', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'Suse', 'osmajorrelease': '11'},
|
||||
'cmd': ('usermod', '-R', 'test', 'root')},
|
||||
'cmd': ['usermod', '-R', 'test', 'root']},
|
||||
|
||||
{'grains': {'kernel': 'Linux'},
|
||||
'cmd': ('gpasswd', '--del', 'root', 'test')},
|
||||
'cmd': ['gpasswd', '--del', 'root', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'OpenBSD'},
|
||||
'cmd': 'usermod -S foo root'},
|
||||
'cmd': ['usermod', '-S', 'foo', 'root']},
|
||||
]
|
||||
|
||||
for os_version in os_version_list:
|
||||
|
@ -180,16 +180,16 @@ class GroupAddTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
os_version_list = [
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'RedHat', 'osmajorrelease': '5'},
|
||||
'cmd': ('gpasswd', '-M', 'foo', 'test')},
|
||||
'cmd': ['gpasswd', '-M', 'foo', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'Linux', 'os_family': 'Suse', 'osmajorrelease': '11'},
|
||||
'cmd': ('groupmod', '-A', 'foo', 'test')},
|
||||
'cmd': ['groupmod', '-A', 'foo', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'Linux'},
|
||||
'cmd': ('gpasswd', '--members', 'foo', 'test')},
|
||||
'cmd': ['gpasswd', '--members', 'foo', 'test']},
|
||||
|
||||
{'grains': {'kernel': 'OpenBSD'},
|
||||
'cmd': 'usermod -G test foo'},
|
||||
'cmd': ['usermod', '-G', 'test', 'foo']},
|
||||
]
|
||||
|
||||
for os_version in os_version_list:
|
||||
|
|
|
@ -16,6 +16,7 @@ from tests.support.mock import (
|
|||
)
|
||||
# Import Salt Libs
|
||||
import salt.modules.hosts as hosts
|
||||
import salt.utils
|
||||
from salt.ext.six.moves import StringIO
|
||||
|
||||
|
||||
|
@ -92,8 +93,12 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Tests true if the alias is set
|
||||
'''
|
||||
hosts_file = '/etc/hosts'
|
||||
if salt.utils.is_windows():
|
||||
hosts_file = r'C:\Windows\System32\Drivers\etc\hosts'
|
||||
|
||||
with patch('salt.modules.hosts.__get_hosts_filename',
|
||||
MagicMock(return_value='/etc/hosts')), \
|
||||
MagicMock(return_value=hosts_file)), \
|
||||
patch('os.path.isfile', MagicMock(return_value=False)), \
|
||||
patch.dict(hosts.__salt__,
|
||||
{'config.option': MagicMock(return_value=None)}):
|
||||
|
@ -139,7 +144,16 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.close()
|
||||
|
||||
def close(self):
|
||||
data[0] = self.getvalue()
|
||||
# Don't save unless there's something there. In Windows
|
||||
# the class gets initialized the first time with mode = w
|
||||
# which sets the initial value to ''. When the class closes
|
||||
# it clears out data and causes the test to fail.
|
||||
# I don't know why it get's initialized with a mode of 'w'
|
||||
# For the purposes of this test data shouldn't be empty
|
||||
# This is a problem with this class and not with the hosts
|
||||
# module
|
||||
if self.getvalue():
|
||||
data[0] = self.getvalue()
|
||||
StringIO.close(self)
|
||||
|
||||
expected = '\n'.join((
|
||||
|
@ -151,6 +165,7 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
mock_opt = MagicMock(return_value=None)
|
||||
with patch.dict(hosts.__salt__, {'config.option': mock_opt}):
|
||||
self.assertTrue(hosts.set_host('1.1.1.1', ' '))
|
||||
|
||||
self.assertEqual(data[0], expected)
|
||||
|
||||
# 'rm_host' function tests: 2
|
||||
|
@ -182,9 +197,13 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Tests if specified host entry gets added from the hosts file
|
||||
'''
|
||||
hosts_file = '/etc/hosts'
|
||||
if salt.utils.is_windows():
|
||||
hosts_file = r'C:\Windows\System32\Drivers\etc\hosts'
|
||||
|
||||
with patch('salt.utils.fopen', mock_open()), \
|
||||
patch('salt.modules.hosts.__get_hosts_filename',
|
||||
MagicMock(return_value='/etc/hosts')):
|
||||
MagicMock(return_value=hosts_file)):
|
||||
mock_opt = MagicMock(return_value=None)
|
||||
with patch.dict(hosts.__salt__, {'config.option': mock_opt}):
|
||||
self.assertTrue(hosts.add_host('10.10.10.10', 'Salt1'))
|
||||
|
|
|
@ -15,38 +15,38 @@ import salt.modules.ini_manage as ini
|
|||
|
||||
class IniManageTestCase(TestCase):
|
||||
|
||||
TEST_FILE_CONTENT = '''\
|
||||
# Comment on the first line
|
||||
|
||||
# First main option
|
||||
option1=main1
|
||||
|
||||
# Second main option
|
||||
option2=main2
|
||||
|
||||
|
||||
[main]
|
||||
# Another comment
|
||||
test1=value 1
|
||||
|
||||
test2=value 2
|
||||
|
||||
[SectionB]
|
||||
test1=value 1B
|
||||
|
||||
# Blank line should be above
|
||||
test3 = value 3B
|
||||
|
||||
[SectionC]
|
||||
# The following option is empty
|
||||
empty_option=
|
||||
'''
|
||||
TEST_FILE_CONTENT = os.linesep.join([
|
||||
'# Comment on the first line',
|
||||
'',
|
||||
'# First main option',
|
||||
'option1=main1',
|
||||
'',
|
||||
'# Second main option',
|
||||
'option2=main2',
|
||||
'',
|
||||
'',
|
||||
'[main]',
|
||||
'# Another comment',
|
||||
'test1=value 1',
|
||||
'',
|
||||
'test2=value 2',
|
||||
'',
|
||||
'[SectionB]',
|
||||
'test1=value 1B',
|
||||
'',
|
||||
'# Blank line should be above',
|
||||
'test3 = value 3B',
|
||||
'',
|
||||
'[SectionC]',
|
||||
'# The following option is empty',
|
||||
'empty_option='
|
||||
])
|
||||
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
self.tfile = tempfile.NamedTemporaryFile(delete=False, mode='w+')
|
||||
self.tfile.write(self.TEST_FILE_CONTENT)
|
||||
self.tfile = tempfile.NamedTemporaryFile(delete=False, mode='w+b')
|
||||
self.tfile.write(salt.utils.to_bytes(self.TEST_FILE_CONTENT))
|
||||
self.tfile.close()
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -121,40 +121,42 @@ empty_option=
|
|||
})
|
||||
with salt.utils.fopen(self.tfile.name, 'r') as fp:
|
||||
file_content = fp.read()
|
||||
self.assertIn('\nempty_option = \n', file_content,
|
||||
'empty_option was not preserved')
|
||||
expected = '{0}{1}{0}'.format(os.linesep, 'empty_option = ')
|
||||
self.assertIn(expected, file_content, 'empty_option was not preserved')
|
||||
|
||||
def test_empty_lines_preserved_after_edit(self):
|
||||
ini.set_option(self.tfile.name, {
|
||||
'SectionB': {'test3': 'new value 3B'},
|
||||
})
|
||||
expected = os.linesep.join([
|
||||
'# Comment on the first line',
|
||||
'',
|
||||
'# First main option',
|
||||
'option1 = main1',
|
||||
'',
|
||||
'# Second main option',
|
||||
'option2 = main2',
|
||||
'',
|
||||
'[main]',
|
||||
'# Another comment',
|
||||
'test1 = value 1',
|
||||
'',
|
||||
'test2 = value 2',
|
||||
'',
|
||||
'[SectionB]',
|
||||
'test1 = value 1B',
|
||||
'',
|
||||
'# Blank line should be above',
|
||||
'test3 = new value 3B',
|
||||
'',
|
||||
'[SectionC]',
|
||||
'# The following option is empty',
|
||||
'empty_option = ',
|
||||
''
|
||||
])
|
||||
with salt.utils.fopen(self.tfile.name, 'r') as fp:
|
||||
file_content = fp.read()
|
||||
self.assertEqual('''\
|
||||
# Comment on the first line
|
||||
|
||||
# First main option
|
||||
option1 = main1
|
||||
|
||||
# Second main option
|
||||
option2 = main2
|
||||
|
||||
[main]
|
||||
# Another comment
|
||||
test1 = value 1
|
||||
|
||||
test2 = value 2
|
||||
|
||||
[SectionB]
|
||||
test1 = value 1B
|
||||
|
||||
# Blank line should be above
|
||||
test3 = new value 3B
|
||||
|
||||
[SectionC]
|
||||
# The following option is empty
|
||||
empty_option =
|
||||
''', file_content)
|
||||
self.assertEqual(expected, file_content)
|
||||
|
||||
def test_empty_lines_preserved_after_multiple_edits(self):
|
||||
ini.set_option(self.tfile.name, {
|
||||
|
|
|
@ -19,7 +19,7 @@ from tests.support.mock import (
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.exceptions import CommandExecutionError, CommandNotFoundError
|
||||
import salt.modules.mount as mount
|
||||
|
||||
MOCK_SHELL_FILE = 'A B C D F G\n'
|
||||
|
@ -242,15 +242,26 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Returns true if the command passed is a fuse mountable application
|
||||
'''
|
||||
with patch.object(salt.utils, 'which', return_value=None):
|
||||
# Return False if fuse doesn't exist
|
||||
with patch('salt.utils.which', return_value=None):
|
||||
self.assertFalse(mount.is_fuse_exec('cmd'))
|
||||
|
||||
with patch.object(salt.utils, 'which', return_value=True):
|
||||
self.assertFalse(mount.is_fuse_exec('cmd'))
|
||||
# Return CommandNotFoundError if fuse exists, but ldd doesn't exist
|
||||
with patch('salt.utils.which', side_effect=[True, False]):
|
||||
self.assertRaises(CommandNotFoundError, mount.is_fuse_exec, 'cmd')
|
||||
|
||||
mock = MagicMock(side_effect=[1, 0])
|
||||
with patch.object(salt.utils, 'which', mock):
|
||||
self.assertFalse(mount.is_fuse_exec('cmd'))
|
||||
# Return False if fuse exists, ldd exists, but libfuse is not in the
|
||||
# return
|
||||
with patch('salt.utils.which', side_effect=[True, True]):
|
||||
mock = MagicMock(return_value='not correct')
|
||||
with patch.dict(mount.__salt__, {'cmd.run': mock}):
|
||||
self.assertFalse(mount.is_fuse_exec('cmd'))
|
||||
|
||||
# Return True if fuse exists, ldd exists, and libfuse is in the return
|
||||
with patch('salt.utils.which', side_effect=[True, True]):
|
||||
mock = MagicMock(return_value='contains libfuse')
|
||||
with patch.dict(mount.__salt__, {'cmd.run': mock}):
|
||||
self.assertTrue(mount.is_fuse_exec('cmd'))
|
||||
|
||||
def test_swaps(self):
|
||||
'''
|
||||
|
|
|
@ -34,7 +34,8 @@ class PamTestCase(TestCase):
|
|||
'''
|
||||
Test if the parsing function works
|
||||
'''
|
||||
with patch('salt.utils.fopen', mock_open(read_data=MOCK_FILE)):
|
||||
with patch('os.path.exists', return_value=True), \
|
||||
patch('salt.utils.fopen', mock_open(read_data=MOCK_FILE)):
|
||||
self.assertListEqual(pam.read_file('/etc/pam.d/login'),
|
||||
[{'arguments': [], 'control_flag': 'ok',
|
||||
'interface': 'ok', 'module': 'ignore'}])
|
||||
|
|
|
@ -49,21 +49,24 @@ class PartedTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
def test_virtual_bails_without_parted(self):
|
||||
'''If parted not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.which', lambda exe: not exe == "parted"):
|
||||
with patch('salt.utils.which', lambda exe: not exe == "parted"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load parted binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
||||
def test_virtual_bails_without_lsblk(self):
|
||||
'''If lsblk not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.which', lambda exe: not exe == "lsblk"):
|
||||
with patch('salt.utils.which', lambda exe: not exe == "lsblk"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load lsblk binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
||||
def test_virtual_bails_without_partprobe(self):
|
||||
'''If partprobe not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.which', lambda exe: not exe == "partprobe"):
|
||||
with patch('salt.utils.which', lambda exe: not exe == "partprobe"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load partprobe binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
|
|
@ -18,6 +18,7 @@ from tests.support.mock import (
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.modules.pw_group as pw_group
|
||||
import salt.utils
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
|
@ -44,6 +45,7 @@ class PwGroupTestCase(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(pw_group.__salt__, {'cmd.run_all': mock}):
|
||||
self.assertTrue(pw_group.delete('a'))
|
||||
|
||||
@skipIf(salt.utils.is_windows(), 'grp not available on Windows')
|
||||
def test_info(self):
|
||||
'''
|
||||
Tests to return information about a group
|
||||
|
@ -57,6 +59,7 @@ class PwGroupTestCase(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(pw_group.grinfo, mock):
|
||||
self.assertDictEqual(pw_group.info('name'), {})
|
||||
|
||||
@skipIf(salt.utils.is_windows(), 'grp not available on Windows')
|
||||
def test_getent(self):
|
||||
'''
|
||||
Tests for return info on all groups
|
||||
|
|
|
@ -80,15 +80,14 @@ class QemuNbdTestCase(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(qemu_nbd.__salt__, {'cmd.run': mock}):
|
||||
self.assertEqual(qemu_nbd.init('/srv/image.qcow2'), '')
|
||||
|
||||
with patch.object(os.path, 'isfile', mock):
|
||||
with patch.object(glob, 'glob',
|
||||
MagicMock(return_value=['/dev/nbd0'])):
|
||||
with patch.dict(qemu_nbd.__salt__,
|
||||
{'cmd.run': mock,
|
||||
'mount.mount': mock,
|
||||
'cmd.retcode': MagicMock(side_effect=[1, 0])}):
|
||||
self.assertDictEqual(qemu_nbd.init('/srv/image.qcow2'),
|
||||
{'{0}/nbd/nbd0/nbd0'.format(tempfile.gettempdir()): '/dev/nbd0'})
|
||||
with patch.object(os.path, 'isfile', mock),\
|
||||
patch.object(glob, 'glob', MagicMock(return_value=['/dev/nbd0'])),\
|
||||
patch.dict(qemu_nbd.__salt__,
|
||||
{'cmd.run': mock,
|
||||
'mount.mount': mock,
|
||||
'cmd.retcode': MagicMock(side_effect=[1, 0])}):
|
||||
expected = {os.sep.join([tempfile.gettempdir(), 'nbd', 'nbd0', 'nbd0']): '/dev/nbd0'}
|
||||
self.assertDictEqual(qemu_nbd.init('/srv/image.qcow2'), expected)
|
||||
|
||||
# 'clear' function tests: 1
|
||||
|
||||
|
|
|
@ -47,14 +47,19 @@ class SeedTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Test to update and get the random script to a random place
|
||||
'''
|
||||
with patch.dict(seed.__salt__,
|
||||
{'config.gather_bootstrap_script': MagicMock(return_value='BS_PATH/BS')}):
|
||||
with patch.object(uuid, 'uuid4', return_value='UUID'):
|
||||
with patch.object(os.path, 'exists', return_value=True):
|
||||
with patch.object(os, 'chmod', return_value=None):
|
||||
with patch.object(shutil, 'copy', return_value=None):
|
||||
self.assertEqual(seed.prep_bootstrap('MPT'), ('MPT/tmp/UUID/BS', '/tmp/UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap('/MPT'), ('/MPT/tmp/UUID/BS', '/tmp/UUID'))
|
||||
with patch.dict(seed.__salt__, {'config.gather_bootstrap_script': MagicMock(return_value=os.path.join('BS_PATH', 'BS'))}),\
|
||||
patch.object(uuid, 'uuid4', return_value='UUID'),\
|
||||
patch.object(os.path, 'exists', return_value=True),\
|
||||
patch.object(os, 'chmod', return_value=None),\
|
||||
patch.object(shutil, 'copy', return_value=None):
|
||||
|
||||
expect = (os.path.join('MPT', 'tmp', 'UUID', 'BS'),
|
||||
os.sep + os.path.join('tmp', 'UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap('MPT'), expect)
|
||||
|
||||
expect = (os.sep + os.path.join('MPT', 'tmp', 'UUID', 'BS'),
|
||||
os.sep + os.path.join('tmp', 'UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap(os.sep + 'MPT'), expect)
|
||||
|
||||
def test_apply_(self):
|
||||
'''
|
||||
|
|
|
@ -109,10 +109,9 @@ class VirtualenvTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
# Are we logging the deprecation information?
|
||||
self.assertIn(
|
||||
'INFO:The virtualenv \'--never-download\' option has been '
|
||||
'deprecated in virtualenv(>=1.10), as such, the '
|
||||
'\'never_download\' option to `virtualenv.create()` has '
|
||||
'also been deprecated and it\'s not necessary anymore.',
|
||||
'INFO:--never-download was deprecated in 1.10.0, '
|
||||
'but reimplemented in 14.0.0. If this feature is needed, '
|
||||
'please install a supported virtualenv version.',
|
||||
handler.messages
|
||||
)
|
||||
|
||||
|
|
|
@ -95,7 +95,10 @@ class LocalCacheCleanOldJobsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
local_cache.clean_old_jobs()
|
||||
|
||||
# Get the name of the JID directory that was created to test against
|
||||
jid_dir_name = jid_dir.rpartition('/')[2]
|
||||
if salt.utils.is_windows():
|
||||
jid_dir_name = jid_dir.rpartition('\\')[2]
|
||||
else:
|
||||
jid_dir_name = jid_dir.rpartition('/')[2]
|
||||
|
||||
# Assert the JID directory is still present to be cleaned after keep_jobs interval
|
||||
self.assertEqual([jid_dir_name], os.listdir(TMP_JID_DIR))
|
||||
|
|
|
@ -66,7 +66,7 @@ class TestGitFSProvider(TestCase):
|
|||
('git_pillar', salt.utils.gitfs.GitPillar),
|
||||
('winrepo', salt.utils.gitfs.WinRepo)):
|
||||
key = '{0}_provider'.format(role_name)
|
||||
for provider in salt.utils.gitfs.VALID_PROVIDERS:
|
||||
for provider in salt.utils.gitfs.GIT_PROVIDERS:
|
||||
verify = 'verify_gitpython'
|
||||
mock1 = _get_mock(verify, provider)
|
||||
with patch.object(role_class, verify, mock1):
|
||||
|
|
Loading…
Add table
Reference in a new issue