Merge branch '2018.3' into 'develop'

Conflicts:
  - doc/ref/configuration/minion.rst
  - salt/client/__init__.py
  - salt/grains/core.py
  - salt/modules/timezone.py
  - tests/support/case.py
  - tests/unit/modules/test_file.py
  - tests/unit/roster/test_sshconfig.py
This commit is contained in:
rallytime 2018-06-25 11:26:25 -04:00
commit 6177d6e3c1
No known key found for this signature in database
GPG key ID: E8F1A4B90D0DEA19
78 changed files with 3310 additions and 1312 deletions

View file

@ -43,23 +43,20 @@ provisioner:
repo: git
testingdir: /testing
salt_copy_filter:
- .bundle
- .kitchen
- .kitchen.yml
- artifacts
- Gemfile
- Gemfile.lock
- README.rst
- .travis.yml
- '*.pyc'
- __pycache__
- '*.pyc'
- .bundle
- .tox
- .kitchen
- artifacts
- Gemfile.lock
state_top:
base:
"os:Windows":
- match: grain
- prep_windows
"*":
- git.salt
- <%= ENV['KITCHEN_STATE'] || 'git.salt' %>
pillars:
top.sls:
base:

View file

@ -1422,6 +1422,38 @@ List of hosts to bypass HTTP proxy
no_proxy: [ '127.0.0.1', 'foo.tld' ]
Docker Configuration
====================
.. conf_minion:: docker.update_mine
``docker.update_mine``
----------------------
.. versionadded:: 2017.7.8,2018.3.3
.. versionchanged:: Fluorine
The default value is now ``False``
Default: ``True``
If enabled, when containers are added, removed, stopped, started, etc., the
:ref:`mine <salt-mine>` will be updated with the results of :py:func:`docker.ps
verbose=True all=True host=True <salt.modules.dockermod.ps>`. This mine data is
used by :py:func:`mine.get_docker <salt.modules.mine.get_docker>`. Set this
option to ``False`` to keep Salt from updating the mine with this information.
.. note::
This option can also be set in Grains or Pillar data, with Grains
overriding Pillar and the minion config file overriding Grains.
.. note::
Disabling this will of course keep :py:func:`mine.get_docker
<salt.modules.mine.get_docker>` from returning any information for a given
minion.
.. code-block:: yaml
docker.update_mine: False
.. conf_minion:: docker.compare_container_networks
@ -1457,6 +1489,7 @@ Specifies which keys are examined by
- GlobalIPv6Address
- IPv6Gateway
Minion Execution Module Management
==================================

View file

@ -373,6 +373,7 @@ execution modules
s3
s6
salt_proxy
saltcheck
saltcloudmod
saltutil
schedule

View file

@ -0,0 +1,6 @@
======================
salt.modules.saltcheck
======================
.. automodule:: salt.modules.saltcheck
:members:

View file

@ -180,6 +180,404 @@ available, since that's not actually part of what's being tested, we mocked that
import by patching ``sys.modules`` when tests are running.
Mocking Filehandles
-------------------
.. note::
This documentation applies to the 2018.3 release cycle and newer. The
extended functionality for ``mock_open`` described below does not exist in
the 2017.7 and older release branches.
Opening files in Salt is done using ``salt.utils.files.fopen()``. When testing
code that reads from files, the ``mock_open`` helper can be used to mock
filehandles. Note that is not the same ``mock_open`` as
:py:func:`unittest.mock.mock_open` from the Python standard library, but rather
a separate implementation which has additional functionality.
.. code-block:: python
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch
mock_open,
NO_MOCK,
NO_MOCK_REASON,
)
import salt.modules.mymod as mymod
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MyAwesomeTestCase(TestCase):
def test_something(self):
fopen_mock = mock_open(read_data='foo\nbar\nbaz\n')
with patch('salt.utils.files.fopen', fopen_mock):
result = mymod.myfunc()
assert result is True
This will force any filehandle opened to mimic a filehandle which, when read,
produces the specified contents.
.. important::
**String Types**
When running tests on Python 2, ``mock_open`` will convert any ``unicode``
types to ``str`` types to more closely reproduce Python 2 behavior (file
reads are always ``str`` types in Python 2, irrespective of mode).
However, when configuring your read_data, make sure that you are using
bytestrings (e.g. ``b'foo\nbar\nbaz\n'``) when the code you are testing is
opening a file for binary reading, otherwise the tests will fail on Python
3. The mocked filehandles produced by ``mock_open`` will raise a
:py:obj:`TypeError` if you attempt to read a bytestring when opening for
non-binary reading, and similarly will not let you read a string when
opening a file for binary reading. They will also not permit bytestrings to
be "written" if the mocked filehandle was opened for non-binary writing,
and vice-versa when opened for non-binary writing. These enhancements force
test writers to write more accurate tests.
More Complex Scenarios
**********************
.. _unit-tests-multiple-file-paths:
Multiple File Paths
+++++++++++++++++++
What happens when the code being tested reads from more than one file? For
those cases, you can pass ``read_data`` as a dictionary:
.. code-block:: python
import textwrap
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch
mock_open,
NO_MOCK,
NO_MOCK_REASON,
)
import salt.modules.mymod as mymod
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MyAwesomeTestCase(TestCase):
def test_something(self):
contents = {
'/etc/foo.conf': textwrap.dedent('''\
foo
bar
baz
'''),
'/etc/b*.conf': textwrap.dedent('''\
one
two
three
'''),
}
fopen_mock = mock_open(read_data=contents)
with patch('salt.utils.files.fopen', fopen_mock):
result = mymod.myfunc()
assert result is True
This would make ``salt.utils.files.fopen()`` produce filehandles with different
contents depending on which file was being opened by the code being tested.
``/etc/foo.conf`` and any file matching the pattern ``/etc/b*.conf`` would
work, while opening any other path would result in a
:py:obj:`FileNotFoundError` being raised (in Python 2, an ``IOError``).
Since file patterns are supported, it is possible to use a pattern of ``'*'``
to define a fallback if no other patterns match the filename being opened. The
below two ``mock_open`` calls would produce identical results:
.. code-block:: python
mock_open(read_data='foo\n')
mock_open(read_data={'*': 'foo\n'})
.. note::
Take care when specifying the ``read_data`` as a dictionary, in cases where
the patterns overlap (e.g. when both ``/etc/b*.conf`` and ``/etc/bar.conf``
are in the ``read_data``). Dictionary iteration order will determine which
pattern is attempted first, second, etc., with the exception of ``*`` which
is used when no other pattern matches. If your test case calls for
specifying overlapping patterns, and you are not running Python 3.6 or
newer, then an ``OrderedDict`` can be used to ensure matching is handled in
the desired way:
.. code-block:: python
contents = OrderedDict()
contents['/etc/bar.conf'] = 'foo\nbar\nbaz\n'
contents['/etc/b*.conf'] = IOError(errno.EACCES, 'Permission denied')
contents['*'] = 'This is a fallback for files not beginning with "/etc/b"\n'
fopen_mock = mock_open(read_data=contents)
Raising Exceptions
++++++++++++++++++
Instead of a string, an exception can also be used as the ``read_data``:
.. code-block:: python
import errno
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch
mock_open,
NO_MOCK,
NO_MOCK_REASON,
)
import salt.modules.mymod as mymod
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MyAwesomeTestCase(TestCase):
def test_something(self):
exc = IOError(errno.EACCES, 'Permission denied')
fopen_mock = mock_open(read_data=exc)
with patch('salt.utils.files.fopen', fopen_mock):
mymod.myfunc()
The above example would raise the specified exception when any file is opened.
The expectation would be that ``mymod.myfunc()`` would gracefully handle the
IOError, so a failure to do that would result in it being raised and causing
the test to fail.
Multiple File Contents
++++++++++++++++++++++
For cases in which a file is being read more than once, and it is necessary to
test a function's behavior based on what the file looks like the second (or
third, etc.) time it is read, just specify the the contents for that file as a
list. Each time the file is opened, ``mock_open`` will cycle through the list
and produce a mocked filehandle with the specified contents. For example:
.. code-block:: python
import errno
import textwrap
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch
mock_open,
NO_MOCK,
NO_MOCK_REASON,
)
import salt.modules.mymod as mymod
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MyAwesomeTestCase(TestCase):
def test_something(self):
contents = {
'/etc/foo.conf': [
textwrap.dedent('''\
foo
bar
'''),
textwrap.dedent('''\
foo
bar
baz
'''),
],
'/etc/b*.conf': [
IOError(errno.ENOENT, 'No such file or directory'),
textwrap.dedent('''\
one
two
three
'''),
],
}
fopen_mock = mock_open(read_data=contents)
with patch('salt.utils.files.fopen', fopen_mock):
result = mymod.myfunc()
assert result is True
Using this example, the first time ``/etc/foo.conf`` is opened, it will
simulate a file with the first string in the list as its contents, while the
second time it is opened, the simulated file's contents will be the second
string in the list.
If no more items remain in the list, then attempting to open the file will
raise a :py:obj:`RuntimeError`. In the example above, if ``/etc/foo.conf`` were
to be opened a third time, a :py:obj:`RuntimeError` would be raised.
Note that exceptions can also be mixed in with strings when using this
technique. In the above example, if ``/etc/bar.conf`` were to be opened twice,
the first time would simulate the file not existing, while the second time
would simulate a file with string defined in the second element of the list.
.. note::
Notice that the second path in the ``contents`` dictionary above
(``/etc/b*.conf``) contains an asterisk. The items in the list are cycled
through for each match of a given pattern (*not* separately for each
individual file path), so this means that only two files matching that
pattern could be opened before the next one would raise a
:py:obj:`RuntimeError`.
Accessing the Mocked Filehandles in a Test
******************************************
.. note::
The code for the ``MockOpen``, ``MockCall``, and ``MockFH`` classes
(referenced below) can be found in ``tests/support/mock.py``. There are
extensive unit tests for them located in ``tests/unit/test_mock.py``.
The above examples simply show how to mock ``salt.utils.files.fopen()`` to
simulate files with the contents you desire, but you can also access the mocked
filehandles (and more), and use them to craft assertions in your tests. To do
so, just add an ``as`` clause to the end of the ``patch`` statement:
.. code-block:: python
fopen_mock = mock_open(read_data='foo\nbar\nbaz\n')
with patch('salt.utils.files.fopen', fopen_mock) as m_open:
# do testing here
...
...
When doing this, ``m_open`` will be a ``MockOpen`` instance. It will contain
several useful attributes:
- **read_data** - A dictionary containing the ``read_data`` passed when
``mock_open`` was invoked. In the event that :ref:`multiple file paths
<unit-tests-multiple-file-paths>` are not used, then this will be a
dictionary mapping ``*`` to the ``read_data`` passed to ``mock_open``.
- **call_count** - An integer representing how many times
``salt.utils.files.fopen()`` was called to open a file.
- **calls** - A list of ``MockCall`` objects. A ``MockCall`` object is a simple
class which stores the arguments passed to it, making the positional
arguments available via its ``args`` attribute, and the keyword arguments
available via its ``kwargs`` attribute.
.. code-block:: python
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch
mock_open,
MockCall,
NO_MOCK,
NO_MOCK_REASON,
)
import salt.modules.mymod as mymod
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MyAwesomeTestCase(TestCase):
def test_something(self):
with patch('salt.utils.files.fopen', mock_open(read_data=b'foo\n')) as mopen:
mymod.myfunc()
# Assert that only two opens attempted
assert m_open.call_count == 2
# Assert that only /etc/foo.conf was opened
assert all(call.args[0] == '/etc/foo.conf' for call in m_open.calls)
# Asser that the first open was for binary read, and the
# second was for binary write.
assert m_open.calls = [
MockCall('/etc/foo.conf', 'rb'),
MockCall('/etc/foo.conf', 'wb'),
]
Note that ``MockCall`` is imported from ``tests.support.mock`` in the above
example. Also, the second assert above is redundant since it is covered in
the final assert, but both are included simply as an example.
- **filehandles** - A dictionary mapping the unique file paths opened, to lists
of ``MockFH`` objects. Each open creates a unique ``MockFH`` object. Each
``MockFH`` object itself has a number of useful attributes:
- **filename** - The path to the file which was opened using
``salt.utils.files.fopen()``
- **call** - A ``MockCall`` object representing the arguments passed to
``salt.utils.files.fopen()``. Note that this ``MockCall`` is also available
in the parent ``MockOpen`` instance's **calls** list.
- The following methods are mocked using :py:class:`unittest.mock.Mock`
objects, and Mock's built-in asserts (as well as the call data) can be used
as you would with any other Mock object:
- **.read()**
- **.readlines()**
- **.readline()**
- **.close()**
- **.write()**
- **.writelines()**
- **.seek()**
- The read functions (**.read()**, **.readlines()**, **.readline()**) all
work as expected, as does iterating through the file line by line (i.e.
``for line in fh:``).
- The **.tell()** method is also implemented in such a way that it updates
after each time the mocked filehandle is read, and will report the correct
position. The one caveat here is that **.seek()** doesn't actually work
(it's simply mocked), and will not change the position. Additionally,
neither **.write()** or **.writelines()** will modify the mocked
filehandle's contents.
- The attributes **.write_calls** and **.writelines_calls** (no parenthesis)
are available as shorthands and correspond to lists containing the contents
passed for all calls to **.write()** and **.writelines()**, respectively.
Examples
++++++++
.. code-block:: python
with patch('salt.utils.files.fopen', mock_open(read_data=contents)) as m_open:
# Run the code you are unit testing
mymod.myfunc()
# Check that only the expected file was opened, and that it was opened
# only once.
assert m_open.call_count == 1
assert list(m_open.filehandles) == ['/etc/foo.conf']
# "opens" will be a list of all the mocked filehandles opened
opens = m_open.filehandles['/etc/foo.conf']
# Check that we wrote the expected lines ("expected" here is assumed to
# be a list of strings)
assert opens[0].write_calls == expected
.. code-block:: python
with patch('salt.utils.files.fopen', mock_open(read_data=contents)) as m_open:
# Run the code you are unit testing
mymod.myfunc()
# Check that .readlines() was called (remember, it's a Mock)
m_open.filehandles['/etc/foo.conf'][0].readlines.assert_called()
.. code-block:: python
with patch('salt.utils.files.fopen', mock_open(read_data=contents)) as m_open:
# Run the code you are unit testing
mymod.myfunc()
# Check that we read the file and also wrote to it
m_open.filehandles['/etc/foo.conf'][0].read.assert_called_once()
m_open.filehandles['/etc/foo.conf'][1].writelines.assert_called_once()
.. _`Mock()`: https://github.com/testing-cabal/mock
Naming Conventions
------------------
@ -198,7 +596,7 @@ prepended with the ``test_`` naming syntax, as described above.
If a function does not start with ``test_``, then the function acts as a "normal"
function and is not considered a testing function. It will not be included in the
test run or testing output. The same principle applies to unit test files that
do not have the ``test_*.py`` naming syntax. This test file naming convention
do not have the ``test_*.py`` naming syntax. This test file naming convention
is how the test runner recognizes that a test file contains unit tests.
@ -209,8 +607,7 @@ Most commonly, the following imports are necessary to create a unit test:
.. code-block:: python
# Import Salt Testing libs
from tests.support.unit import skipIf, TestCase
from tests.support.unit import TestCase, skipIf
If you need mock support to your tests, please also import:
@ -513,7 +910,7 @@ This function contains two raise statements and one return statement, so we
know that we will need (at least) three tests. It has two function arguments
and many references to non-builtin functions. In the tests below you will see
that MagicMock's ``patch()`` method may be used as a context manager or as a
decorator. When patching the salt dunders however, please use the context
decorator. When patching the salt dunders however, please use the context
manager approach.
There are three test functions, one for each raise and return statement in the

View file

@ -2,9 +2,7 @@
Salt 2017.7.6 Release Notes
===========================
Version 2017.7.6 is a bugfix release for :ref:`2017.7.0
<release-2017-7-0>`.
Version 2017.7.6 is a bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
Statistics
==========
@ -15,6 +13,15 @@ Statistics
- Contributors: **47** (`Ch3LL`_, `DmitryKuzmenko`_, `GwiYeong`_, `Quarky9`_, `RichardW42`_, `UtahDave`_, `amaclean199`_, `arif-ali`_, `baniobloom`_, `bdrung`_, `benediktwerner`_, `bmiguel-teixeira`_, `cachedout`_, `dafenko`_, `damon-atkins`_, `dwoz`_, `ezh`_, `folti`_, `fpicot`_, `frogunder`_, `garethgreenaway`_, `gtmanfred`_, `isbm`_, `jeroennijhof`_, `jfindlay`_, `jfoboss`_, `kstreee`_, `lomeroe`_, `mattp-`_, `meaksh`_, `mirceaulinic`_, `myinitialsarepm`_, `mzbroch`_, `nages13`_, `paclat`_, `pcjeff`_, `pruiz`_, `psyer`_, `rallytime`_, `s0undt3ch`_, `skizunov`_, `smitty42`_, `terminalmage`_, `twangboy`_, `vutny`_, `yagnik`_, `yannj-fr`_)
Tornado 5.0 Support for Python 2 Only
-------------------------------------
Tornado 5.0 moves to using asyncio for all python3 versions. Because of this
and changes in asyncio between python 3.4 and 3.5 to only be able to use one
ioloop, which requires some rearchitecting, support for tornado 5.0 and python3
versions of salt has been delayed to a later release.
For now, to use tornado 5.0, the python 2 version of salt must be used.
Tornado 5.0 Support for Python 2 Only
-------------------------------------

View file

@ -5,15 +5,38 @@ In Progress: Salt 2017.7.7 Release Notes
Version 2017.7.7 is an **unreleased** bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
This release is still in progress and has not been released yet.
New win_snmp behavior
=====================
The ``2017.7.7`` release contains only a single fix for Issue `#48038`_, which
is a critical bug that occurs in a multi-syndic setup where the same job is run
multiple times on a minion.
- :py:func:`win_snmp.get_community_names
<salt.modules.win_snmp.get_community_names>` now returns the SNMP settings
actually in effect on the box. If settings are managed via GroupPolicy, those
settings will be returned. Otherwise, normal settings are returned.
Statistics
==========
- :py:func:`win_snmp.set_community_names
<salt.modules.win_snmp.set_community_names>` now raises an error when SNMP
settings are being managed by GroupPolicy.
- Total Merges: **1**
- Total Issue References: **1**
- Total PR References: **2**
- Contributors: **2** (`garethgreenaway`_, `rallytime`_)
Changelog for v2017.7.6..v2017.7.7
==================================
*Generated at: 2018-06-14 15:43:34 UTC*
* **ISSUE** `#48038`_: (`austinpapp`_) jobs are not dedup'ing minion side (refs: `#48075`_)
* **PR** `#48098`_: (`rallytime`_) Back-port `#48075`_ to 2017.7.7
@ *2018-06-14 12:53:42 UTC*
* **PR** `#48075`_: (`garethgreenaway`_) [2017.7] Ensure that the shared list of jids is passed (refs: `#48098`_)
* 084de927fe Merge pull request `#48098`_ from rallytime/bp-48075-2017.7.7
* e4e62e8b3a Ensure that the shared list of jids is passed when creating the Minion. Fixes an issue when minions are pointed at multiple syndics.
.. _`#48038`: https://github.com/saltstack/salt/issues/48038
.. _`#48075`: https://github.com/saltstack/salt/pull/48075
.. _`#48098`: https://github.com/saltstack/salt/pull/48098
.. _`austinpapp`: https://github.com/austinpapp
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`rallytime`: https://github.com/rallytime

View file

@ -0,0 +1,31 @@
========================================
In Progress: Salt 2017.7.8 Release Notes
========================================
Version 2017.7.8 is an **unreleased** bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
This release is still in progress and has not been released yet.
New win_snmp behavior
=====================
- :py:func:`win_snmp.get_community_names
<salt.modules.win_snmp.get_community_names>` now returns the SNMP settings
actually in effect on the box. If settings are managed via GroupPolicy, those
settings will be returned. Otherwise, normal settings are returned.
- :py:func:`win_snmp.set_community_names
<salt.modules.win_snmp.set_community_names>` now raises an error when SNMP
settings are being managed by GroupPolicy.
Option Added to Disable Docker Mine Updates
===========================================
When a docker container is added, removed, started, stopped, etc., the results
of a :py:func:`docker.ps verbose=True all=True host=True
<salt.modules.dockermod.ps>` are sent to the :ref:`mine <salt-mine>`, to be
used by :py:func:`mine.get_docker <salt.modules.mine.get_docker>`.
A new config option (:conf_minion:`docker.update_mine`) has been added. When
set to ``False``, Salt will not send this information to the mine. This is
useful in cases where sensitive information is stored in the container's
environment.

View file

@ -46,7 +46,66 @@ to the flat roster file. This behavior can also be enabled by setting
Changelog for v2018.3.0..v2018.3.1
==================================
*Generated at: 2018-05-30 14:09:03 UTC*
*Generated at: 2018-06-06 17:43:01 UTC*
* **ISSUE** `#47955`_: (`frogunder`_) 2018.3.1 Creating Windows machine in Amazon using salt-cloud fails. (refs: `#47989`_)
* **PR** `#47998`_: (`rallytime`_) Back-port `#47989`_ to 2018.3.1
@ *2018-06-06 17:08:04 UTC*
* **PR** `#47989`_: (`dwoz`_) Properly decode password from aws using m2crypto (refs: `#47998`_)
* 605463ca0d Merge pull request `#47998`_ from rallytime/bp-47989
* 1b7e9ac2d3 Lint fixes
* 0545152ddd Properly decode password from aws using m2crypto
* **PR** `#47965`_: (`Ch3LL`_) Add PR 47924 from 2018.3 branch
@ *2018-06-06 13:54:09 UTC*
* dbc798ac68 Merge pull request `#47965`_ from Ch3LL/gitpy_mac_3.1
* bf608abd44 Catch all exceptions in git import for salt.utils.gitfs
* **PR** `#47973`_: (`terminalmage`_) salt.modules.testinframod: fix TypeError invoking types.FunctionType
@ *2018-06-06 13:53:46 UTC*
* 864d640633 Merge pull request `#47973`_ from terminalmage/fix-testinfra
* 4518c89484 Lint: Remove unused six import
* c6816b2149 salt.modules.testinframod: fix TypeError invoking types.FunctionType
* **ISSUE** `#47236`_: (`MorphBonehunter`_) x509.private_key_managed broken after upgrade to 2018.3.0 (refs: `#47957`_)
* **PR** `#47967`_: (`rallytime`_) Back-port `#47957`_ to 2018.3.1
@ *2018-06-06 13:53:28 UTC*
* **PR** `#47957`_: (`garethgreenaway`_) [2018.8] Ensure x509 passphrase is a string (refs: `#47967`_)
* 5ddcfff420 Merge pull request `#47967`_ from rallytime/bp-47957
* 9a55579af1 removing unnecessary change
* 329b2e5956 Ensuring that when a passphrase is passed in, it is returned as a string from the passphrase callback.
* **PR** `#47902`_: (`Ch3LL`_) Remove In Progress for 2018.3.1 Release Notes
@ *2018-05-30 18:26:49 UTC*
* 9c964fdbce Merge pull request `#47902`_ from Ch3LL/rn_in_progress
* f560a151cd Remove In Progress for 2018.3.1 Release Notes
* **PR** `#47897`_: (`Ch3LL`_) Add changelog to 2018.3.1 release notes
@ *2018-05-30 15:04:42 UTC*
* ea7b4fdc08 Merge pull request `#47897`_ from Ch3LL/rn_2018
* e27ee273a7 Add == line to changelog line for release notes
* 61e56d275d Add changelog to 2018.3.1 release notes
* **ISSUE** `#47784`_: (`jpsv`_) win_lgpo.py line 5368; AttributeError: 'OrderedDict' object has no attribute 'lower' (refs: `#47848`_)
@ -519,7 +578,7 @@ Changelog for v2018.3.0..v2018.3.1
* fd9bc06aab bytes file that decodes the same utf-8 and cp1252
* **ISSUE** `#46660`_: (`mruepp`_) top file merging same does produce conflicting ids with gitfs (refs: `#46751`_, `#47354`_)
* **ISSUE** `#46660`_: (`mruepp`_) top file merging same does produce conflicting ids with gitfs (refs: `#47354`_, `#46751`_)
* **PR** `#47465`_: (`rallytime`_) Back-port `#47354`_ to 2018.3
@ *2018-05-04 13:06:04 UTC*
@ -768,9 +827,9 @@ Changelog for v2018.3.0..v2018.3.1
* **PR** `#47368`_: (`rallytime`_) [2018.3] Merge forward from 2017.7 to 2018.3
@ *2018-05-01 18:56:20 UTC*
* **PR** `#47106`_: (`DmitryKuzmenko`_) Tornado50 compatibility fixes (refs: `#47368`_, `#47374`_, `#47433`_)
* **PR** `#47106`_: (`DmitryKuzmenko`_) Tornado50 compatibility fixes (refs: `#47374`_, `#47368`_, `#47433`_)
* **PR** `#46002`_: (`isbm`_) Pyzmq 17.0.0 proper handling (refs: `#47368`_, `#47374`_)
* **PR** `#46002`_: (`isbm`_) Pyzmq 17.0.0 proper handling (refs: `#47374`_, `#47368`_)
* 0bdfaa5ffe Merge pull request `#47368`_ from rallytime/merge-2018.3
@ -1037,9 +1096,9 @@ Changelog for v2018.3.0..v2018.3.1
* **PR** `#47374`_: (`DmitryKuzmenko`_) tornado50 merge forward for 2018.3
@ *2018-04-29 16:29:12 UTC*
* **PR** `#47106`_: (`DmitryKuzmenko`_) Tornado50 compatibility fixes (refs: `#47368`_, `#47374`_, `#47433`_)
* **PR** `#47106`_: (`DmitryKuzmenko`_) Tornado50 compatibility fixes (refs: `#47374`_, `#47368`_, `#47433`_)
* **PR** `#46002`_: (`isbm`_) Pyzmq 17.0.0 proper handling (refs: `#47368`_, `#47374`_)
* **PR** `#46002`_: (`isbm`_) Pyzmq 17.0.0 proper handling (refs: `#47374`_, `#47368`_)
* 3400f829c4 Merge pull request `#47374`_ from DSRCorporation/bugs/tornado50-2018.3
@ -1139,7 +1198,7 @@ Changelog for v2018.3.0..v2018.3.1
* cc2538e08f The grp modules is not available on windows
* **ISSUE** `#46862`_: (`kivoli`_) Setting locale.system fails in 2018.3 (refs: `#46869`_, `#47280`_)
* **ISSUE** `#46862`_: (`kivoli`_) Setting locale.system fails in 2018.3 (refs: `#47280`_, `#46869`_)
* **PR** `#47280`_: (`gtmanfred`_) make sure not to send invalid information
@ *2018-04-25 17:46:45 UTC*
@ -1263,18 +1322,18 @@ Changelog for v2018.3.0..v2018.3.1
* b8630a70be Fix virtual package detection
* **ISSUE** `#47225`_: (`pruiz`_) zfs.filesystem_present takes forever on a dataset with lots (10k+) of snapshots (refs: `#47226`_, `#47227`_, `#47228`_)
* **ISSUE** `#47225`_: (`pruiz`_) zfs.filesystem_present takes forever on a dataset with lots (10k+) of snapshots (refs: `#47228`_, `#47227`_, `#47226`_)
* **PR** `#47228`_: (`pruiz`_) Fix issue `#47225`_: avoid zfs.filesystem_present slowdown when dataset has lots of snapshots (2018.3 branch)
@ *2018-04-24 13:35:21 UTC*
* **PR** `#47226`_: (`pruiz`_) Fix issue `#47225`_: avoid zfs.filesystem_present slowdown when dataset has lots of snapshots (refs: `#47227`_, `#47228`_)
* **PR** `#47226`_: (`pruiz`_) Fix issue `#47225`_: avoid zfs.filesystem_present slowdown when dataset has lots of snapshots (refs: `#47228`_, `#47227`_)
* 428e915d6a Merge pull request `#47228`_ from pruiz/pruiz/zfs-dataset-present-slow-2018.3
* cfbf136ab2 Fix issue `#47225`_: avoid zfs.filesystem_present slowdown when dataset has lots of snapshots
* **ISSUE** `#46943`_: (`Auha`_) Slack.Engine could not start (refs: `#47109`_, `#47262`_)
* **ISSUE** `#46943`_: (`Auha`_) Slack.Engine could not start (refs: `#47262`_, `#47109`_)
* **PR** `#47262`_: (`garethgreenaway`_) [2018.3] Fixes to targeting in Slack engine
@ *2018-04-24 13:18:36 UTC*
@ -1642,7 +1701,7 @@ Changelog for v2018.3.0..v2018.3.1
* 92eeaa51bd Put some error checking in the shell command
* **ISSUE** `#46943`_: (`Auha`_) Slack.Engine could not start (refs: `#47109`_, `#47262`_)
* **ISSUE** `#46943`_: (`Auha`_) Slack.Engine could not start (refs: `#47262`_, `#47109`_)
* **PR** `#47109`_: (`garethgreenaway`_) [2018.3] fixes to Slack engine
@ *2018-04-17 13:56:27 UTC*
@ -2010,7 +2069,7 @@ Changelog for v2018.3.0..v2018.3.1
* **ISSUE** `#46834`_: (`oeuftete`_) strftime filter not found in 2018.3.0 (refs: `#46848`_)
* **ISSUE** `#46668`_: (`anlutro`_) Jinja2 filter strftime stopped working in salt-ssh 2018.3 (refs: `#46744`_, `#46848`_)
* **ISSUE** `#46668`_: (`anlutro`_) Jinja2 filter strftime stopped working in salt-ssh 2018.3 (refs: `#46848`_, `#46744`_)
* **PR** `#46848`_: (`garethgreenaway`_) [2018.8] salt-ssh jinja filters tests
@ *2018-04-10 16:19:51 UTC*
@ -2248,7 +2307,7 @@ Changelog for v2018.3.0..v2018.3.1
* d9511d04d4 `#43499`_, zmq setsockopt need to adapt python3
* **ISSUE** `#46862`_: (`kivoli`_) Setting locale.system fails in 2018.3 (refs: `#46869`_, `#47280`_)
* **ISSUE** `#46862`_: (`kivoli`_) Setting locale.system fails in 2018.3 (refs: `#47280`_, `#46869`_)
* **PR** `#46869`_: (`gtmanfred`_) Always return dictionary for _localectl_status
@ *2018-04-05 13:25:14 UTC*
@ -2782,7 +2841,7 @@ Changelog for v2018.3.0..v2018.3.1
* 19bd1d9db5 handle user-data for metadata grains
* **ISSUE** `#46668`_: (`anlutro`_) Jinja2 filter strftime stopped working in salt-ssh 2018.3 (refs: `#46744`_, `#46848`_)
* **ISSUE** `#46668`_: (`anlutro`_) Jinja2 filter strftime stopped working in salt-ssh 2018.3 (refs: `#46848`_, `#46744`_)
* **PR** `#46744`_: (`garethgreenaway`_) [2018.3] Ensure salt.utils.dateutils is available for templates via salt-ssh
@ *2018-03-28 21:09:46 UTC*
@ -3441,14 +3500,14 @@ Changelog for v2018.3.0..v2018.3.1
* e0940a9fc4 Properly detect use of the state.orch alias and add orch jid to kwargs
* **ISSUE** `#42932`_: (`bobrik`_) cmd.run with bg: true doesn't fail properly (refs: `#45932`_, `#46172`_)
* **ISSUE** `#42932`_: (`bobrik`_) cmd.run with bg: true doesn't fail properly (refs: `#46172`_, `#45932`_)
* **PR** `#46172`_: (`The-Loeki`_) cmdmod: reimplementation of `#45932`_ for Oxygen
@ *2018-02-28 19:14:26 UTC*
* **PR** `#45932`_: (`The-Loeki`_) Fix cmd run_all bg error (refs: `#46172`_)
* **PR** `#39980`_: (`vutny`_) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function (refs: `#45932`_, `#46172`_)
* **PR** `#39980`_: (`vutny`_) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function (refs: `#46172`_, `#45932`_)
* 20d869c228 Merge pull request `#46172`_ from The-Loeki/fix_cmd_run_all_bg_oxygen
@ -4503,6 +4562,7 @@ Changelog for v2018.3.0..v2018.3.1
.. _`#47226`: https://github.com/saltstack/salt/pull/47226
.. _`#47227`: https://github.com/saltstack/salt/pull/47227
.. _`#47228`: https://github.com/saltstack/salt/pull/47228
.. _`#47236`: https://github.com/saltstack/salt/issues/47236
.. _`#47239`: https://github.com/saltstack/salt/issues/47239
.. _`#47241`: https://github.com/saltstack/salt/pull/47241
.. _`#47242`: https://github.com/saltstack/salt/pull/47242
@ -4664,6 +4724,15 @@ Changelog for v2018.3.0..v2018.3.1
.. _`#47848`: https://github.com/saltstack/salt/pull/47848
.. _`#47874`: https://github.com/saltstack/salt/pull/47874
.. _`#47881`: https://github.com/saltstack/salt/pull/47881
.. _`#47897`: https://github.com/saltstack/salt/pull/47897
.. _`#47902`: https://github.com/saltstack/salt/pull/47902
.. _`#47955`: https://github.com/saltstack/salt/issues/47955
.. _`#47957`: https://github.com/saltstack/salt/pull/47957
.. _`#47965`: https://github.com/saltstack/salt/pull/47965
.. _`#47967`: https://github.com/saltstack/salt/pull/47967
.. _`#47973`: https://github.com/saltstack/salt/pull/47973
.. _`#47989`: https://github.com/saltstack/salt/pull/47989
.. _`#47998`: https://github.com/saltstack/salt/pull/47998
.. _`AmbicaY`: https://github.com/AmbicaY
.. _`Auha`: https://github.com/Auha
.. _`Ch3LL`: https://github.com/Ch3LL
@ -4673,6 +4742,7 @@ Changelog for v2018.3.0..v2018.3.1
.. _`Kimol`: https://github.com/Kimol
.. _`L4rS6`: https://github.com/L4rS6
.. _`LukeCarrier`: https://github.com/LukeCarrier
.. _`MorphBonehunter`: https://github.com/MorphBonehunter
.. _`OrlandoArcapix`: https://github.com/OrlandoArcapix
.. _`PhilippeAB`: https://github.com/PhilippeAB
.. _`SynPrime`: https://github.com/SynPrime
@ -4707,6 +4777,7 @@ Changelog for v2018.3.0..v2018.3.1
.. _`ezh`: https://github.com/ezh
.. _`femnad`: https://github.com/femnad
.. _`folti`: https://github.com/folti
.. _`frogunder`: https://github.com/frogunder
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`gtmanfred`: https://github.com/gtmanfred
.. _`isbm`: https://github.com/isbm

View file

@ -5,25 +5,63 @@ In Progress: Salt 2018.3.2 Release Notes
Version 2018.3.2 is an **unreleased** bugfix release for :ref:`2018.3.0 <release-2018-3-0>`.
This release is still in progress and has not been released yet.
The ``2018.3.2`` release contains only a small number of fixes, detailed below.
The ``2018.3.2`` release contains only a small number of fixes, which are detailed
below.
Mainly, this release fixes Issue `#48038`_, which is a critical bug that occurs
in a multi-syndic setup where the same job is run multiple times on a minion.
This release fixes two critical issues.
The first is Issue `#48038`_, which is a critical bug that occurs in a multi-syndic
setup where the same job is run multiple times on a minion.
The second issue is `#48130`_. This bug appears in certain setups where the Master
reports a Minion time-out, even though the job is still running on the Minion.
Both of these issues have been fixed with this release.
Statistics
==========
- Total Merges: **3**
- Total Issue References: **1**
- Total PR References: **6**
- Total Merges: **7**
- Total Issue References: **2**
- Total PR References: **10**
- Contributors: **3** (`cro`_, `garethgreenaway`_, `rallytime`_)
- Contributors: **4** (`cro`_, `garethgreenaway`_, `gtmanfred`_, `rallytime`_)
Changelog for v2018.3.1..v2018.3.2
==================================
*Generated at: 2018-06-14 13:24:42 UTC*
*Generated at: 2018-06-17 19:17:16 UTC*
* **ISSUE** `#48130`_: (`rmarchei`_) Minion timeouts with 2018.3.1 (refs: `#48158`_)
* **PR** `#48158`_: (`gtmanfred`_) always listen when gathering job info
@ *2018-06-17 19:04:03 UTC*
* 521e926458 Merge pull request `#48158`_ from gtmanfred/2018.3.2
* cecf564433 always listen when gathering job info
* **PR** `#48138`_: (`rallytime`_) Update man pages for 2018.3.2
@ *2018-06-14 21:22:34 UTC*
* f154545aff Merge pull request `#48138`_ from rallytime/man-pages-2018.3.2
* 8c340134f5 Update man pages for 2018.3.2
* **PR** `#48137`_: (`gtmanfred`_) [2018.3.2] bootstrap kitchen branch tests with 2017.7.6
@ *2018-06-14 21:20:28 UTC*
* b49271b76d Merge pull request `#48137`_ from gtmanfred/2018.3.2
* 6128519e8b bootstrap kitchen branch tests with 2017.7.6
* **PR** `#48129`_: (`rallytime`_) Add release notes for 2018.3.2
@ *2018-06-14 15:48:36 UTC*
* 21aaf1cbc4 Merge pull request `#48129`_ from rallytime/release-notes-2018.3.2
* 0b13be0111 Add release notes for 2018.3.2
* **PR** `#48100`_: (`rallytime`_) Back-port `#48014`_ to 2018.3.2
@ *2018-06-14 12:54:52 UTC*
@ -67,7 +105,14 @@ Changelog for v2018.3.1..v2018.3.2
.. _`#48097`: https://github.com/saltstack/salt/pull/48097
.. _`#48099`: https://github.com/saltstack/salt/pull/48099
.. _`#48100`: https://github.com/saltstack/salt/pull/48100
.. _`#48129`: https://github.com/saltstack/salt/pull/48129
.. _`#48130`: https://github.com/saltstack/salt/issues/48130
.. _`#48137`: https://github.com/saltstack/salt/pull/48137
.. _`#48138`: https://github.com/saltstack/salt/pull/48138
.. _`#48158`: https://github.com/saltstack/salt/pull/48158
.. _`austinpapp`: https://github.com/austinpapp
.. _`cro`: https://github.com/cro
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`gtmanfred`: https://github.com/gtmanfred
.. _`rallytime`: https://github.com/rallytime
.. _`rmarchei`: https://github.com/rmarchei

View file

@ -597,9 +597,8 @@ overrides all levels below it):
.. code-block:: yaml
gitfs_saltenv:
- saltenv:
- dev:
- mountpoint: salt://bar
- dev:
- mountpoint: salt://bar
3. Per-remote configuration parameter

View file

@ -1,6 +1,7 @@
[Unit]
Description=The Salt Syndic daemon
After=network.target
PartOf=salt-master.service
[Service]
Type=notify

View file

@ -2,6 +2,7 @@
Description=The Salt Master Server
Documentation=man:salt-syndic(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target
PartOf=salt-master.service
[Service]
Type=notify

View file

@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
} else {
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
}
#==============================================================================
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req.txt" "pip install"
} else {
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
}
#==============================================================================

View file

@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
} else {
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
}
#==============================================================================
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req.txt" "pip install"
} else {
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
}
#==============================================================================

View file

@ -1,4 +0,0 @@
[pytest]
addopts = --ssh-tests -ra -sv
testpaths = tests
norecursedirs = tests/kitchen

View file

@ -1,3 +1,4 @@
pytest>=3.5.0
pytest-helpers-namespace
pytest-tempdir
pytest-cov

View file

@ -284,7 +284,7 @@ class LoadAuth(object):
return False
if load['eauth'] not in self.opts['external_auth']:
# The eauth system is not enabled, fail
log.debug('The eauth system "%s" is not enabled', load['eauth'])
log.warning('Authentication failure of type "eauth" occurred.')
return False
@ -362,6 +362,7 @@ class LoadAuth(object):
eauth = token['eauth'] if token else load['eauth']
if eauth not in self.opts['external_auth']:
# No matching module is allowed in config
log.debug('The eauth system "%s" is not enabled', eauth)
log.warning('Authorization failure occurred.')
return None
@ -372,6 +373,9 @@ class LoadAuth(object):
name = self.load_name(load) # The username we are attempting to auth with
groups = self.get_groups(load) # The groups this user belongs to
eauth_config = self.opts['external_auth'][eauth]
if not eauth_config:
log.debug('eauth "%s" configuration is empty', eauth)
if not groups:
groups = []
@ -691,6 +695,7 @@ class Resolver(object):
if fstr not in self.auth:
print(('The specified external authentication system "{0}" is '
'not available').format(eauth))
print("Available eauth types: {0}".format(", ".join(self.auth.file_mapping.keys())))
return ret
args = salt.utils.args.arg_lookup(self.auth[fstr])

View file

@ -120,7 +120,7 @@ class BaseCaller(object):
'''
Print out the grains
'''
grains = salt.loader.grains(self.opts)
grains = self.minion.opts.get('grains') or salt.loader.grains(self.opts)
salt.output.display_output({'local': grains}, 'grains', self.opts)
def run(self):

View file

@ -83,9 +83,10 @@ def get_configured_provider():
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
('personal_access_token',)
opts=__opts__,
provider=__active_provider_name__ or __virtualname__,
aliases=__virtual_aliases__,
required_keys=('personal_access_token',)
)

View file

@ -2439,7 +2439,7 @@ def wait_for_instance(
)
pprint.pprint(console)
time.sleep(5)
output = console['output_decoded']
output = salt.utils.stringutils.to_unicode(console['output_decoded'])
comps = output.split('-----BEGIN SSH HOST KEY KEYS-----')
if len(comps) < 2:
# Fail; there are no host keys

View file

@ -3350,7 +3350,7 @@ def get_cloud_config_value(name, vm_, opts, default=None, search_global=True):
return value
def is_provider_configured(opts, provider, required_keys=(), log_message=True):
def is_provider_configured(opts, provider, required_keys=(), log_message=True, aliases=()):
'''
Check and return the first matching and fully configured cloud provider
configuration.
@ -3378,7 +3378,7 @@ def is_provider_configured(opts, provider, required_keys=(), log_message=True):
for alias, drivers in six.iteritems(opts['providers']):
for driver, provider_details in six.iteritems(drivers):
if driver != provider:
if driver != provider and driver not in aliases:
continue
# If we reached this far, we have a matching provider, let's see if

View file

@ -947,7 +947,7 @@ def _virtual(osdata):
if os.path.isfile('/sys/devices/virtual/dmi/id/product_name'):
try:
with salt.utils.files.fopen('/sys/devices/virtual/dmi/id/product_name', 'r') as fhr:
output = fhr.read()
output = salt.utils.stringutils.to_unicode(fhr.read())
if 'VirtualBox' in output:
grains['virtual'] = 'VirtualBox'
elif 'RHEV Hypervisor' in output:
@ -1428,6 +1428,18 @@ _OS_FAMILY_MAP = {
'AIX': 'AIX',
}
# Matches any possible format:
# DISTRIB_ID="Ubuntu"
# DISTRIB_ID='Mageia'
# DISTRIB_ID=Fedora
# DISTRIB_RELEASE='10.10'
# DISTRIB_CODENAME='squeeze'
# DISTRIB_DESCRIPTION='Ubuntu 10.10'
_LSB_REGEX = re.compile((
'^(DISTRIB_(?:ID|RELEASE|CODENAME|DESCRIPTION))=(?:\'|")?'
'([\\w\\s\\.\\-_]+)(?:\'|")?'
))
def _linux_bin_exists(binary):
'''
@ -1460,32 +1472,49 @@ def _get_interfaces():
return _INTERFACES
def _parse_os_release(os_release_files):
def _parse_lsb_release():
ret = {}
try:
log.trace('Attempting to parse /etc/lsb-release')
with salt.utils.files.fopen('/etc/lsb-release') as ifile:
for line in ifile:
try:
key, value = _LSB_REGEX.match(line.rstrip('\n')).groups()[:2]
except AttributeError:
pass
else:
# Adds lsb_distrib_{id,release,codename,description}
ret['lsb_{0}'.format(key.lower())] = value.rstrip()
except (IOError, OSError) as exc:
log.trace('Failed to parse /etc/lsb-release: %s', exc)
return ret
def _parse_os_release(*os_release_files):
'''
Parse os-release and return a parameter dictionary
See http://www.freedesktop.org/software/systemd/man/os-release.html
for specification of the file format.
'''
data = dict()
ret = {}
for filename in os_release_files:
if os.path.isfile(filename):
try:
with salt.utils.files.fopen(filename) as ifile:
regex = re.compile('^([\\w]+)=(?:\'|")?(.*?)(?:\'|")?$')
for line in ifile:
match = regex.match(line.strip())
if match:
# Shell special characters ("$", quotes, backslash,
# backtick) are escaped with backslashes
ret[match.group(1)] = re.sub(
r'\\([$"\'\\`])', r'\1', match.group(2)
)
break
else:
# None of the specified os-release files exist
return data
except (IOError, OSError):
pass
with salt.utils.files.fopen(filename) as ifile:
regex = re.compile('^([\\w]+)=(?:\'|")?(.*?)(?:\'|")?$')
for line in ifile:
match = regex.match(line.strip())
if match:
# Shell special characters ("$", quotes, backslash, backtick)
# are escaped with backslashes
data[match.group(1)] = re.sub(r'\\([$"\'\\`])', r'\1', match.group(2))
return data
return ret
def os_data():
@ -1548,6 +1577,7 @@ def os_data():
elif salt.utils.platform.is_linux():
# Add SELinux grain, if you have it
if _linux_bin_exists('selinuxenabled'):
log.trace('Adding selinux grains')
grains['selinux'] = {}
grains['selinux']['enabled'] = __salt__['cmd.retcode'](
'selinuxenabled'
@ -1559,6 +1589,7 @@ def os_data():
# Add systemd grain, if you have it
if _linux_bin_exists('systemctl') and _linux_bin_exists('localectl'):
log.trace('Adding systemd grains')
grains['systemd'] = {}
systemd_info = __salt__['cmd.run'](
'systemctl --version'
@ -1568,74 +1599,77 @@ def os_data():
# Add init grain
grains['init'] = 'unknown'
log.trace('Adding init grain')
try:
os.stat('/run/systemd/system')
grains['init'] = 'systemd'
except (OSError, IOError):
if os.path.exists('/proc/1/cmdline'):
try:
with salt.utils.files.fopen('/proc/1/cmdline') as fhr:
init_cmdline = fhr.read().replace('\x00', ' ').split()
except (IOError, OSError):
pass
else:
try:
init_bin = salt.utils.path.which(init_cmdline[0])
except IndexError:
# Emtpy init_cmdline
init_bin = None
log.warning('Unable to fetch data from /proc/1/cmdline')
if init_bin is not None and init_bin.endswith('bin/init'):
supported_inits = (b'upstart', b'sysvinit', b'systemd')
edge_len = max(len(x) for x in supported_inits) - 1
try:
init_bin = salt.utils.path.which(init_cmdline[0])
except IndexError:
# Emtpy init_cmdline
init_bin = None
log.warning(
"Unable to fetch data from /proc/1/cmdline"
)
if init_bin is not None and init_bin.endswith('bin/init'):
supported_inits = (b'upstart', b'sysvinit', b'systemd')
edge_len = max(len(x) for x in supported_inits) - 1
try:
buf_size = __opts__['file_buffer_size']
except KeyError:
# Default to the value of file_buffer_size for the minion
buf_size = 262144
try:
with salt.utils.files.fopen(init_bin, 'rb') as fp_:
buf = True
edge = b''
buf_size = __opts__['file_buffer_size']
except KeyError:
# Default to the value of file_buffer_size for the minion
buf_size = 262144
try:
with salt.utils.files.fopen(init_bin, 'rb') as fp_:
edge = b''
buf = fp_.read(buf_size).lower()
while buf:
buf = edge + buf
for item in supported_inits:
if item in buf:
if six.PY3:
item = item.decode('utf-8')
grains['init'] = item
buf = b''
break
edge = buf[-edge_len:]
buf = fp_.read(buf_size).lower()
while buf:
buf = edge + buf
for item in supported_inits:
if item in buf:
if six.PY3:
item = item.decode('utf-8')
grains['init'] = item
buf = b''
break
edge = buf[-edge_len:]
buf = fp_.read(buf_size).lower()
except (IOError, OSError) as exc:
log.error(
'Unable to read from init_bin (%s): %s',
init_bin, exc
)
elif salt.utils.path.which('supervisord') in init_cmdline:
grains['init'] = 'supervisord'
elif salt.utils.path.which('dumb-init') in init_cmdline:
# https://github.com/Yelp/dumb-init
grains['init'] = 'dumb-init'
elif salt.utils.path.which('tini') in init_cmdline:
# https://github.com/krallin/tini
grains['init'] = 'tini'
elif init_cmdline == ['runit']:
grains['init'] = 'runit'
elif '/sbin/my_init' in init_cmdline:
#Phusion Base docker container use runit for srv mgmt, but my_init as pid1
grains['init'] = 'runit'
else:
log.info(
'Could not determine init system from command line: (%s)',
' '.join(init_cmdline)
except (IOError, OSError) as exc:
log.error(
'Unable to read from init_bin (%s): %s',
init_bin, exc
)
elif salt.utils.path.which('supervisord') in init_cmdline:
grains['init'] = 'supervisord'
elif salt.utils.path.which('dumb-init') in init_cmdline:
# https://github.com/Yelp/dumb-init
grains['init'] = 'dumb-init'
elif salt.utils.path.which('tini') in init_cmdline:
# https://github.com/krallin/tini
grains['init'] = 'tini'
elif init_cmdline == ['runit']:
grains['init'] = 'runit'
elif '/sbin/my_init' in init_cmdline:
# Phusion Base docker container use runit for srv mgmt, but
# my_init as pid1
grains['init'] = 'runit'
else:
log.info(
'Could not determine init system from command line: (%s)',
' '.join(init_cmdline)
)
# Add lsb grains on any distro with lsb-release. Note that this import
# can fail on systems with lsb-release installed if the system package
# does not install the python package for the python interpreter used by
# Salt (i.e. python2 or python3)
try:
log.trace('Getting lsb_release distro information')
import lsb_release # pylint: disable=import-error
release = lsb_release.get_distro_information()
for key, value in six.iteritems(release):
@ -1648,35 +1682,21 @@ def os_data():
# Catch a NameError to workaround possible breakage in lsb_release
# See https://github.com/saltstack/salt/issues/37867
except (ImportError, NameError):
# if the python library isn't available, default to regex
if os.path.isfile('/etc/lsb-release'):
# Matches any possible format:
# DISTRIB_ID="Ubuntu"
# DISTRIB_ID='Mageia'
# DISTRIB_ID=Fedora
# DISTRIB_RELEASE='10.10'
# DISTRIB_CODENAME='squeeze'
# DISTRIB_DESCRIPTION='Ubuntu 10.10'
regex = re.compile((
'^(DISTRIB_(?:ID|RELEASE|CODENAME|DESCRIPTION))=(?:\'|")?'
'([\\w\\s\\.\\-_]+)(?:\'|")?'
))
with salt.utils.files.fopen('/etc/lsb-release') as ifile:
for line in ifile:
match = regex.match(line.rstrip('\n'))
if match:
# Adds:
# lsb_distrib_{id,release,codename,description}
grains[
'lsb_{0}'.format(match.groups()[0].lower())
] = match.groups()[1].rstrip()
# if the python library isn't available, try to parse
# /etc/lsb-release using regex
log.trace('lsb_release python bindings not available')
grains.update(_parse_lsb_release())
if grains.get('lsb_distrib_description', '').lower().startswith('antergos'):
# Antergos incorrectly configures their /etc/lsb-release,
# setting the DISTRIB_ID to "Arch". This causes the "os" grain
# to be incorrectly set to "Arch".
grains['osfullname'] = 'Antergos Linux'
elif 'lsb_distrib_id' not in grains:
os_release = _parse_os_release(['/etc/os-release', '/usr/lib/os-release'])
log.trace(
'Failed to get lsb_distrib_id, trying to parse os-release'
)
os_release = _parse_os_release('/etc/os-release', '/usr/lib/os-release')
if os_release:
if 'NAME' in os_release:
grains['lsb_distrib_id'] = os_release['NAME'].strip()
@ -1701,6 +1721,7 @@ def os_data():
elif os_release.get("VERSION") == "Tumbleweed":
grains['osfullname'] = os_release["VERSION"]
elif os.path.isfile('/etc/SuSE-release'):
log.trace('Parsing distrib info from /etc/SuSE-release')
grains['lsb_distrib_id'] = 'SUSE'
version = ''
patch = ''
@ -1722,6 +1743,7 @@ def os_data():
if not grains.get('lsb_distrib_codename'):
grains['lsb_distrib_codename'] = 'n.a'
elif os.path.isfile('/etc/altlinux-release'):
log.trace('Parsing distrib info from /etc/altlinux-release')
# ALT Linux
grains['lsb_distrib_id'] = 'altlinux'
with salt.utils.files.fopen('/etc/altlinux-release') as ifile:
@ -1737,6 +1759,7 @@ def os_data():
grains['lsb_distrib_codename'] = \
comps[3].replace('(', '').replace(')', '')
elif os.path.isfile('/etc/centos-release'):
log.trace('Parsing distrib info from /etc/centos-release')
# CentOS Linux
grains['lsb_distrib_id'] = 'CentOS'
with salt.utils.files.fopen('/etc/centos-release') as ifile:
@ -1754,6 +1777,9 @@ def os_data():
elif os.path.isfile('/etc.defaults/VERSION') \
and os.path.isfile('/etc.defaults/synoinfo.conf'):
grains['osfullname'] = 'Synology'
log.trace(
'Parsing Synology distrib info from /etc/.defaults/VERSION'
)
with salt.utils.files.fopen('/etc.defaults/VERSION', 'r') as fp_:
synoinfo = {}
for line in fp_:
@ -1777,6 +1803,10 @@ def os_data():
# Use the already intelligent platform module to get distro info
# (though apparently it's not intelligent enough to strip quotes)
log.trace(
'Getting OS name, release, and codename from '
'platform.linux_distribution()'
)
(osname, osrelease, oscodename) = \
[x.strip('"').strip("'") for x in
linux_distribution(supported_dists=_supported_dists)]

View file

@ -2079,6 +2079,8 @@ class ClearFuncs(object):
if not authorized:
# Authorization error occurred. Do not continue.
if auth_type == 'eauth' and not auth_list and 'username' in extra and 'eauth' in extra:
log.debug('Auth configuration for eauth "%s" and user "%s" is empty', extra['eauth'], extra['username'])
log.warning(err_msg)
return {'error': {'name': 'AuthorizationError',
'message': 'Authorization error occurred.'}}

View file

@ -1073,7 +1073,7 @@ class Minion(MinionBase):
# Flag meaning minion has finished initialization including first connect to the master.
# True means the Minion is fully functional and ready to handle events.
self.ready = False
self.jid_queue = jid_queue or []
self.jid_queue = [] if jid_queue is None else jid_queue
self.periodic_callbacks = {}
if io_loop is None:
@ -1172,10 +1172,11 @@ class Minion(MinionBase):
# I made the following 3 line oddity to preserve traceback.
# Please read PR #23978 before changing, hopefully avoiding regressions.
# Good luck, we're all counting on you. Thanks.
future_exception = self._connect_master_future.exception()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if self._connect_master_future.done():
future_exception = self._connect_master_future.exception()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if timeout and self._sync_connect_master_success is False:
raise SaltDaemonNotRunning('Failed to connect to the salt-master')
@ -1667,7 +1668,9 @@ class Minion(MinionBase):
)
ret['out'] = 'nested'
except TypeError as exc:
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(function_name, exc, func.__doc__)
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(
function_name, exc, func.__doc__ or ''
)
log.warning(msg, exc_info_on_loglevel=logging.DEBUG)
ret['return'] = msg
ret['out'] = 'nested'

View file

@ -304,7 +304,7 @@ def install(name=None,
# We don't support installing specific version for now
# so transform the dict in list ignoring version provided
pkgs = [
p.keys()[0] for p in pkgs
next(iter(p)) for p in pkgs
if isinstance(p, dict)
]
pkg_to_install.extend(pkgs)

View file

@ -445,11 +445,20 @@ def _refresh_mine_cache(wrapped):
refresh salt mine on exit.
'''
returned = wrapped(*args, **__utils__['args.clean_kwargs'](**kwargs))
__salt__['mine.send']('docker.ps', verbose=True, all=True, host=True)
if _check_update_mine():
__salt__['mine.send']('docker.ps', verbose=True, all=True, host=True)
return returned
return wrapper
def _check_update_mine():
try:
ret = __context__['docker.update_mine']
except KeyError:
ret = __context__['docker.update_mine'] = __salt__['config.get']('docker.update_mine', default=True)
return ret
# Helper functions
def _change_state(name, action, expected, *args, **kwargs):
'''

View file

@ -2045,7 +2045,12 @@ def line(path, content=None, match=None, mode=None, location=None,
fh_ = None
try:
# Make sure we match the file mode from salt.utils.files.fopen
mode = 'wb' if six.PY2 and salt.utils.platform.is_windows() else 'w'
if six.PY2 and salt.utils.platform.is_windows():
mode = 'wb'
body = salt.utils.stringutils.to_bytes(body)
else:
mode = 'w'
body = salt.utils.stringutils.to_str(body)
fh_ = salt.utils.atomicfile.atomic_open(path, mode)
fh_.write(''.join(body))
finally:

View file

@ -3504,7 +3504,9 @@ def bootstrap(name,
configdir = '/var/tmp/.c_{0}'.format(rstr)
cmd = 'install -m 0700 -d {0}'.format(configdir)
if run(name, cmd, python_shell=False):
if run_all(
name, cmd, path=path, python_shell=False
)['retcode'] != 0:
log.error('tmpdir %s creation failed %s', configdir, cmd)
return False
@ -3514,6 +3516,7 @@ def bootstrap(name,
copy_to(name, bs_, script, path=path)
result = run_all(name,
'sh -c "chmod +x {0}"'.format(script),
path=path,
python_shell=True)
copy_to(name, cfg_files['config'],
@ -3539,6 +3542,7 @@ def bootstrap(name,
run_all(name,
'sh -c \'if [ -f "{0}" ];then rm -f "{0}";fi\''
''.format(script),
path=path,
ignore_retcode=True,
python_shell=True)
else:

View file

@ -361,10 +361,18 @@ def flush():
def get_docker(interfaces=None, cidrs=None, with_container_id=False):
'''
Get all mine data for 'docker.get_containers' and run an aggregation
routine. The "interfaces" parameter allows for specifying which network
interfaces to select ip addresses from. The "cidrs" parameter allows for
specifying a list of cidrs which the ip address must match.
.. versionchanged:: 2017.7.8,2018.3.3
When :conf_minion:`docker.update_mine` is set to ``False`` for a given
minion, no mine data will be populated for that minion, and thus none
will be returned for it.
.. versionchanged:: Fluorine
:conf_minion:`docker.update_mine` now defaults to ``False``
Get all mine data for :py:func:`docker.ps <salt.modules.dockermod.ps_>` and
run an aggregation routine. The ``interfaces`` parameter allows for
specifying the network interfaces from which to select IP addresses. The
``cidrs`` parameter allows for specifying a list of subnets which the IP
address must match.
with_container_id
Boolean, to expose container_id in the list of results

View file

@ -35,6 +35,7 @@ Module to provide MySQL compatibility to salt.
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import hashlib
import time
import logging
import re
@ -200,6 +201,12 @@ def __virtual__():
return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
def __mysql_hash_password(password):
_password = hashlib.sha1(password).digest()
_password = '*{0}'.format(hashlib.sha1(_password).hexdigest().upper())
return _password
def __check_table(name, table, **connection_args):
dbc = _connect(**connection_args)
if dbc is None:
@ -1201,6 +1208,7 @@ def user_exists(user,
salt '*' mysql.user_exists 'username' passwordless=True
salt '*' mysql.user_exists 'username' password_column='authentication_string'
'''
server_version = version(**connection_args)
dbc = _connect(**connection_args)
# Did we fail to connect with the user we are checking
# Its password might have previously change with the same command/state
@ -1232,8 +1240,14 @@ def user_exists(user,
else:
qry += ' AND ' + password_column + ' = \'\''
elif password:
qry += ' AND ' + password_column + ' = PASSWORD(%(password)s)'
args['password'] = six.text_type(password)
if salt.utils.versions.version_cmp(server_version, '8.0.11') <= 0:
# Hash the password before comparing
_password = __mysql_hash_password(password)
qry += ' AND ' + password_column + ' = %(password)s'
else:
_password = password
qry += ' AND ' + password_column + ' = PASSWORD(%(password)s)'
args['password'] = six.text_type(_password)
elif password_hash:
qry += ' AND ' + password_column + ' = %(password)s'
args['password'] = password_hash
@ -1331,6 +1345,7 @@ def user_create(user,
salt '*' mysql.user_create 'username' 'hostname' password_hash='hash'
salt '*' mysql.user_create 'username' 'hostname' allow_passwordless=True
'''
server_version = version(**connection_args)
if user_exists(user, host, **connection_args):
log.info('User \'%s\'@\'%s\' already exists', user, host)
return False
@ -1351,7 +1366,10 @@ def user_create(user,
qry += ' IDENTIFIED BY %(password)s'
args['password'] = six.text_type(password)
elif password_hash is not None:
qry += ' IDENTIFIED BY PASSWORD %(password)s'
if salt.utils.versions.version_cmp(server_version, '8.0.11') <= 0:
qry += ' IDENTIFIED BY %(password)s'
else:
qry += ' IDENTIFIED BY PASSWORD %(password)s'
args['password'] = password_hash
elif salt.utils.data.is_true(allow_passwordless):
if salt.utils.data.is_true(unix_socket):
@ -1431,9 +1449,13 @@ def user_chpass(user,
salt '*' mysql.user_chpass frank localhost password_hash='hash'
salt '*' mysql.user_chpass frank localhost allow_passwordless=True
'''
server_version = version(**connection_args)
args = {}
if password is not None:
password_sql = 'PASSWORD(%(password)s)'
if salt.utils.versions.version_cmp(server_version, '8.0.11') <= 0:
password_sql = '%(password)s'
else:
password_sql = 'PASSWORD(%(password)s)'
args['password'] = password
elif password_hash is not None:
password_sql = '%(password)s'

View file

@ -172,20 +172,7 @@ def _get_pip_bin(bin_env):
# If the python binary was passed, return it
if 'python' in os.path.basename(bin_env):
return [os.path.normpath(bin_env), '-m', 'pip']
# Try to find the python binary based on the location of pip in a
# virtual environment, should be relative
if 'pip' in os.path.basename(bin_env):
# Look in the same directory as the pip binary, and also its
# parent directories.
pip_dirname = os.path.dirname(bin_env)
pip_parent_dir = os.path.dirname(pip_dirname)
for bin_path in _search_paths(pip_dirname, pip_parent_dir):
if os.path.isfile(bin_path):
logger.debug('pip: Found python binary: %s', bin_path)
return [os.path.normpath(bin_path), '-m', 'pip']
# Couldn't find python, use the passed pip binary
# This has the limitation of being unable to update pip itself
# We have been passed a pip binary, use the pip binary.
return [os.path.normpath(bin_env)]
raise CommandExecutionError(
@ -465,6 +452,13 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
``/usr/bin/pip-2.7`` or ``/usr/bin/pip-2.6``. If a directory path is
specified, it is assumed to be a virtualenv.
.. note::
For Windows, if the pip module is being used to upgrade the pip
package, bin_env should be the path to the virtualenv or to the
python binary that should be used. The pip command is unable to
upgrade itself in Windows.
use_wheel
Prefer wheel archives (requires pip>=1.4)

View file

@ -2,65 +2,62 @@
'''
A module for testing the logic of states and highstates
Saltcheck provides unittest like functionality requiring only the knowledge of salt module execution and yaml.
In order to run state and highstate saltcheck tests a sub-folder of a state must be creaed and named "saltcheck-tests".
Tests for a state should be created in files ending in *.tst and placed in the saltcheck-tests folder.
Multiple tests can be created in a file.
Multiple *.tst files can be created in the saltcheck-tests folder.
Salt rendering is supported in test files e.g. yaml + jinja.
The "id" of a test works in the same manner as in salt state files.
They should be unique and descriptive.
Example file system layout:
/srv/salt/apache/
init.sls
config.sls
saltcheck-tests/
pkg_and_mods.tst
config.tst
Saltcheck Test Syntax:
Unique-ID:
module_and_function:
args:
kwargs:
assertion:
expected-return:
Example test 1:
echo-test-hello:
module_and_function: test.echo
args:
- "hello"
kwargs:
assertion: assertEqual
expected-return: 'hello'
:codeauthor: William Cannon <william.cannon@gmail.com>
:maturity: new
Saltcheck provides unittest like functionality requiring only the knowledge of
salt module execution and yaml.
In order to run state and highstate saltcheck tests a sub-folder of a state must
be created and named ``saltcheck-tests``.
Tests for a state should be created in files ending in ``*.tst`` and placed in
the ``saltcheck-tests`` folder.
Multiple tests can be created in a file. Multiple ``*.tst`` files can be
created in the ``saltcheck-tests`` folder. Salt rendering is supported in test
files (e.g. ``yaml + jinja``). The ``id`` of a test works in the same manner as
in salt state files. They should be unique and descriptive.
Example file system layout:
.. code-block: txt
/srv/salt/apache/
init.sls
config.sls
saltcheck-tests/
pkg_and_mods.tst
config.tst
Example:
.. code-block:: yaml
echo-test-hello:
module_and_function: test.echo
args:
- "hello"
kwargs:
assertion: assertEqual
expected-return: 'hello'
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
import logging
import os
import time
from json import loads, dumps
try:
import salt.utils.files
import salt.utils.path
import salt.utils.yaml
import salt.client
import salt.exceptions
from salt.ext import six
except ImportError:
pass
# Import Salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.yaml
import salt.client
import salt.exceptions
from salt.ext import six
log = logging.getLogger(__name__)
@ -81,6 +78,9 @@ def update_master_cache():
Can be automated by setting "auto_update_master_cache: True" in minion config
CLI Example:
.. code-block:: bash
salt '*' saltcheck.update_master_cache
'''
__salt__['cp.cache_master']()
@ -92,7 +92,11 @@ def run_test(**kwargs):
Execute one saltcheck test and return result
:param keyword arg test:
CLI Example::
CLI Example:
.. code-block:: bash
salt '*' saltcheck.run_test
test='{"module_and_function": "test.echo",
"assertion": "assertEqual",
@ -115,8 +119,11 @@ def run_state_tests(state):
:param str state: the name of a user defined state
CLI Example::
salt '*' saltcheck.run_state_tests postfix
CLI Example:
.. code-block:: bash
salt '*' saltcheck.run_state_tests postfix
'''
scheck = SaltCheck()
paths = scheck.get_state_search_path_list()
@ -157,8 +164,11 @@ def run_highstate_tests():
'''
Execute all tests for a salt highstate and return results
CLI Example::
salt '*' saltcheck.run_highstate_tests
CLI Example:
.. code-block:: bash
salt '*' saltcheck.run_highstate_tests
'''
scheck = SaltCheck()
paths = scheck.get_state_search_path_list()
@ -203,7 +213,9 @@ def run_highstate_tests():
def _render_file(file_path):
'''call the salt utility to render a file'''
'''
call the salt utility to render a file
'''
# salt-call slsutil.renderer /srv/salt/jinjatest/saltcheck-tests/test1.tst
rendered = __salt__['slsutil.renderer'](file_path)
log.info("rendered: %s", rendered)
@ -211,19 +223,25 @@ def _render_file(file_path):
def _is_valid_module(module):
'''return a list of all modules available on minion'''
'''
Return a list of all modules available on minion
'''
modules = __salt__['sys.list_modules']()
return bool(module in modules)
def _get_auto_update_cache_value():
'''return the config value of auto_update_master_cache'''
'''
Return the config value of auto_update_master_cache
'''
__salt__['config.get']('auto_update_master_cache')
return True
def _is_valid_function(module_name, function):
'''Determine if a function is valid for a module'''
'''
Determine if a function is valid for a module
'''
try:
functions = __salt__['sys.list_functions'](module_name)
except salt.exceptions.SaltException:
@ -232,7 +250,9 @@ def _is_valid_function(module_name, function):
def _get_top_states():
''' equivalent to a salt cli: salt web state.show_top'''
'''
Equivalent to a salt cli: salt web state.show_top
'''
alt_states = []
try:
returned = __salt__['state.show_top']()
@ -245,7 +265,9 @@ def _get_top_states():
def _get_state_sls(state):
''' equivalent to a salt cli: salt web state.show_low_sls STATE'''
'''
Equivalent to a salt cli: salt web state.show_low_sls STATE
'''
sls_list_state = []
try:
returned = __salt__['state.show_low_sls'](state)
@ -281,11 +303,14 @@ class SaltCheck(object):
update_master_cache()
def __is_valid_test(self, test_dict):
'''Determine if a test contains:
a test name,
a valid module and function,
a valid assertion,
an expected return value'''
'''
Determine if a test contains:
- a test name
- a valid module and function
- a valid assertion
- an expected return value
'''
tots = 0 # need total of >= 6 to be a valid test
m_and_f = test_dict.get('module_and_function', None)
assertion = test_dict.get('assertion', None)
@ -314,7 +339,9 @@ class SaltCheck(object):
fun,
args,
kwargs):
'''Generic call of salt Caller command'''
'''
Generic call of salt Caller command
'''
value = False
try:
if args and kwargs:
@ -332,7 +359,9 @@ class SaltCheck(object):
return value
def run_test(self, test_dict):
'''Run a single saltcheck test'''
'''
Run a single saltcheck test
'''
if self.__is_valid_test(test_dict):
mod_and_func = test_dict['module_and_function']
args = test_dict.get('args', None)
@ -516,8 +545,9 @@ class SaltCheck(object):
@staticmethod
def get_state_search_path_list():
'''For the state file system, return a
list of paths to search for states'''
'''
For the state file system, return a list of paths to search for states
'''
# state cache should be updated before running this method
search_list = []
cachedir = __opts__.get('cachedir', None)
@ -533,7 +563,7 @@ class SaltCheck(object):
class StateTestLoader(object):
'''
Class loads in test files for a state
e.g. state_dir/saltcheck-tests/[1.tst, 2.tst, 3.tst]
e.g. state_dir/saltcheck-tests/[1.tst, 2.tst, 3.tst]
'''
def __init__(self, search_paths):
@ -543,7 +573,9 @@ class StateTestLoader(object):
self.test_dict = {}
def load_test_suite(self):
'''load tests either from one file, or a set of files'''
'''
Load tests either from one file, or a set of files
'''
self.test_dict = {}
for myfile in self.test_files:
# self.load_file(myfile)
@ -578,7 +610,9 @@ class StateTestLoader(object):
return
def gather_files(self, filepath):
'''gather files for a test suite'''
'''
Gather files for a test suite
'''
self.test_files = []
log.info("gather_files: %s", time.time())
filepath = filepath + os.sep + 'saltcheck-tests'
@ -594,7 +628,9 @@ class StateTestLoader(object):
@staticmethod
def convert_sls_to_paths(sls_list):
'''Converting sls to paths'''
'''
Converting sls to paths
'''
new_sls_list = []
for sls in sls_list:
sls = sls.replace(".", os.sep)
@ -603,12 +639,16 @@ class StateTestLoader(object):
@staticmethod
def convert_sls_to_path(sls):
'''Converting sls to paths'''
'''
Converting sls to paths
'''
sls = sls.replace(".", os.sep)
return sls
def add_test_files_for_sls(self, sls_path):
'''Adding test files'''
'''
Adding test files
'''
for path in self.search_paths:
full_path = path + os.sep + sls_path
rootdir = full_path

View file

@ -16,7 +16,6 @@ import types
log = logging.getLogger(__name__)
from salt.ext import six
try:
import testinfra
from testinfra import modules
@ -218,7 +217,7 @@ def _copy_function(module_name, name=None):
comparison: eq
```
"""
log.debug('Generating function for %s module', module_name)
log.debug('Generating function for testinfra.%s', module_name)
def _run_tests(name, **methods):
success = True
@ -278,9 +277,15 @@ def _copy_function(module_name, name=None):
))
return success, pass_msgs, fail_msgs
func = _run_tests
if name is not None:
# types.FunctionType requires a str for __name__ attribute, using a
# unicode type will result in a TypeError.
name = str(name) # future lint: disable=blacklisted-function
else:
name = func.__name__
return types.FunctionType(func.__code__,
func.__globals__,
name or func.__name__,
name,
func.__defaults__,
func.__closure__)
@ -297,7 +302,7 @@ def _register_functions():
modules_ = [module_ for module_ in modules.modules]
for mod_name in modules_:
mod_func = _copy_function(mod_name, six.text_type(mod_name))
mod_func = _copy_function(mod_name, mod_name)
mod_func.__doc__ = _build_doc(mod_name)
__all__.append(mod_name)
globals()[mod_name] = mod_func

View file

@ -532,9 +532,8 @@ def set_hwclock(clock):
'Zone \'{0}\' does not exist'.format(zonepath)
)
tzfile = _get_localtime_path()
os.unlink(tzfile)
os.symlink(zonepath, tzfile)
os.unlink('/etc/localtime')
os.symlink(zonepath, '/etc/localtime')
if 'Arch' in __grains__['os_family']:
cmd = ['timezonectl', 'set-local-rtc',

View file

@ -3021,7 +3021,6 @@ class BaseHighState(object):
'top_file_merging_strategy set to \'same\', but no '
'default_top configuration option was set'
)
self.opts['saltenv'] = self.opts['default_top']
if self.opts['saltenv']:
contents = self.client.cache_file(

View file

@ -1256,7 +1256,7 @@ def _makedirs(name,
Helper function for creating directories when the ``makedirs`` option is set
to ``True``. Handles Unix and Windows based systems
.. versionadded:: 2017.7.7
.. versionadded:: 2017.7.8
Args:
name (str): The directory path to create

View file

@ -364,9 +364,17 @@ def run(name,
)
)
else:
output_file.write(
salt.utils.stringutils.to_str(query_result)
)
if isinstance(query_result, six.text_type):
output_file.write(
salt.utils.stringutils.to_str(query_result)
)
else:
for col, val in six.iteritems(query_result):
output_file.write(
salt.utils.stringutils.to_str(
'{0}:{1}\n'.format(col, val)
)
)
else:
ret['changes']['query'] = "Executed"

View file

@ -751,9 +751,9 @@ class IPCMessageSubscriber(IPCClient):
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
if self._read_sync_future is not None:
if self._read_sync_future is not None and self._read_sync_future.done():
self._read_sync_future.exception()
if self._read_stream_future is not None:
if self._read_stream_future is not None and self._read_stream_future.done():
self._read_stream_future.exception()
def __del__(self):

View file

@ -913,10 +913,9 @@ class SaltMessageClient(object):
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exception()
if (not self._stream_return_future.done() and
self.io_loop != tornado.ioloop.IOLoop.current(
instance=False)):
if self._read_until_future.done():
self._read_until_future.exception()
elif self.io_loop != tornado.ioloop.IOLoop.current(instance=False):
self.io_loop.add_future(
self._stream_return_future,
lambda future: self.io_loop.stop()
@ -1162,7 +1161,7 @@ class Subscriber(object):
self._closing = True
if not self.stream.closed():
self.stream.close()
if self._read_until_future is not None:
if self._read_until_future is not None and self._read_until_future.done():
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'

View file

@ -56,6 +56,6 @@ def alias_function(fun, name, doc=None):
orig_name = fun.__name__
alias_msg = ('\nThis function is an alias of '
'``{0}``.\n'.format(orig_name))
alias_fun.__doc__ = alias_msg + fun.__doc__
alias_fun.__doc__ = alias_msg + (fun.__doc__ or '')
return alias_fun

View file

@ -10,6 +10,7 @@ id: minion
open_mode: True
log_file: minion.log
log_level_logfile: debug
log_fmt_console: '%(asctime)s,%(msecs)03d [%(levelname)-8s] %(message)s'
pidfile: minion.pid
# Give the minion extra attempts to find the master

View file

@ -431,6 +431,16 @@ class PipModuleTest(ModuleCase):
pprint.pprint(ret)
raise
@skipIf(not os.path.isfile('pip3'), 'test where pip3 is installed')
@skipIf(salt.utils.platform.is_windows(), 'test specific for linux usage of /bin/python')
def test_system_pip3(self):
self.run_function('pip.install', pkgs=['lazyimport==0.0.1'], bin_env='/bin/pip3')
ret1 = self.run_function('cmd.run', '/bin/pip3 freeze | grep lazyimport')
self.run_function('pip.uninstall', pkgs=['lazyimport'], bin_env='/bin/pip3')
ret2 = self.run_function('cmd.run', '/bin/pip3 freeze | grep lazyimport')
assert 'lazyimport==0.0.1' in ret1
assert ret2 == ''
def tearDown(self):
super(PipModuleTest, self).tearDown()
if os.path.isdir(self.venv_test_dir):

View file

@ -37,6 +37,18 @@ class NetapiClientTest(TestCase):
ret = self.netapi.run(low)
self.assertEqual(ret, {'minion': True, 'sub_minion': True, 'localhost': True})
def test_local_batch(self):
low = {'client': 'local_batch', 'tgt': '*', 'fun': 'test.ping'}
low.update(self.eauth_creds)
ret = self.netapi.run(low)
rets = []
for _ret in ret:
rets.append(_ret)
self.assertIn({'localhost': True}, rets)
self.assertIn({'sub_minion': True}, rets)
self.assertIn({'minion': True}, rets)
def test_local_async(self):
low = {'client': 'local_async', 'tgt': '*', 'fun': 'test.ping'}
low.update(self.eauth_creds)

View file

@ -232,8 +232,8 @@ class KeyTest(ShellCase, ShellCaseCommonTestsMixin):
test salt-key -l with wrong eauth
'''
data = self.run_key('-l acc --eauth wrongeauth --username {0} --password {1}'.format(USERA, USERA_PWD))
expect = ['The specified external authentication system "wrongeauth" is not available']
self.assertEqual(data, expect)
expect = r"^The specified external authentication system \"wrongeauth\" is not available\tAvailable eauth types: auto, .*"
self.assertRegex("\t".join(data), expect)
def test_list_un(self):
'''

View file

@ -208,5 +208,5 @@ class RunTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin)
'''
run_cmd = self.run_run('-a wrongeauth --username {0} --password {1}\
test.arg arg kwarg=kwarg1'.format(USERA, USERA_PWD))
expect = ['The specified external authentication system "wrongeauth" is not available']
self.assertEqual(expect, run_cmd)
expect = r"^The specified external authentication system \"wrongeauth\" is not available\tAvailable eauth types: auto, .*"
self.assertRegex("\t".join(run_cmd), expect)

View file

@ -489,7 +489,7 @@ class ShellCase(ShellTestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixi
'''
Execute salt
'''
arg_str = '-c {0} {1}'.format(self.config_dir, arg_str)
arg_str = '-c {0} -t {1} {2}'.format(self.config_dir, timeout, arg_str)
ret = self.run_script('salt',
arg_str,
with_retcode=with_retcode,

View file

@ -15,6 +15,8 @@
# pylint: disable=unused-import,function-redefined,blacklisted-module,blacklisted-external-module
from __future__ import absolute_import
import collections
import copy
import errno
import fnmatch
import sys
@ -94,119 +96,420 @@ if NO_MOCK is False:
NO_MOCK_REASON = 'you need to upgrade your mock version to >= 0.8.0'
# backport mock_open from the python 3 unittest.mock library so that we can
# mock read, readline, readlines, and file iteration properly
class MockFH(object):
def __init__(self, filename, read_data, *args, **kwargs):
self.filename = filename
self.read_data = read_data
try:
self.mode = args[0]
except IndexError:
self.mode = kwargs.get('mode', 'r')
self.binary_mode = 'b' in self.mode
self.read_mode = any(x in self.mode for x in ('r', '+'))
self.write_mode = any(x in self.mode for x in ('w', 'a', '+'))
self.empty_string = b'' if self.binary_mode else ''
self.call = MockCall(filename, *args, **kwargs)
self.read_data_iter = self._iterate_read_data(read_data)
self.read = Mock(side_effect=self._read)
self.readlines = Mock(side_effect=self._readlines)
self.readline = Mock(side_effect=self._readline)
self.write = Mock(side_effect=self._write)
self.writelines = Mock(side_effect=self._writelines)
self.close = Mock()
self.seek = Mock()
self.__loc = 0
self.__read_data_ok = False
file_spec = None
def _iterate_read_data(self, read_data):
'''
Helper for mock_open:
Retrieve lines from read_data via a generator so that separate calls to
readline, read, and readlines are properly interleaved
'''
# Newline will always be a bytestring on PY2 because mock_open will have
# normalized it to one.
newline = b'\n' if isinstance(read_data, six.binary_type) else '\n'
read_data = [line + newline for line in read_data.split(newline)]
def _iterate_read_data(read_data):
'''
Helper for mock_open:
Retrieve lines from read_data via a generator so that separate calls to
readline, read, and readlines are properly interleaved
'''
# Newline will always be a bytestring on PY2 because mock_open will have
# normalized it to one.
newline = b'\n' if isinstance(read_data, six.binary_type) else '\n'
if read_data[-1] == newline:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
read_data = read_data[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line, so remove the
# newline that we added in the list comprehension.
read_data[-1] = read_data[-1][:-1]
read_data = [line + newline for line in read_data.split(newline)]
if read_data[-1] == newline:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
read_data = read_data[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line, so remove the
# newline that we added in the list comprehension.
read_data[-1] = read_data[-1][:-1]
for line in read_data:
yield line
def mock_open(mock=None, read_data='', match=None):
'''
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
If passed, `match` can be either a string or an iterable containing
patterns to attempt to match using fnmatch.fnmatch(). A side_effect will be
added to the mock object returned, which will cause an IOError(2, 'No such
file or directory') to be raised when the file path is not a match. This
allows you to make your mocked filehandle only work for certain file paths.
'''
# Normalize read_data, Python 2 filehandles should never produce unicode
# types on read.
if six.PY2:
read_data = salt.utils.stringutils.to_str(read_data)
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
joiner = b'' if isinstance(read_data, six.binary_type) else ''
return joiner.join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
for line in read_data:
yield line
global file_spec
if file_spec is None:
if six.PY3:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
@property
def write_calls(self):
'''
Return a list of all calls to the .write() mock
'''
return [x[1][0] for x in self.write.mock_calls]
@property
def writelines_calls(self):
'''
Return a list of all calls to the .writelines() mock
'''
return [x[1][0] for x in self.writelines.mock_calls]
def tell(self):
return self.__loc
def __check_read_data(self):
if not self.__read_data_ok:
if self.binary_mode:
if not isinstance(self.read_data, six.binary_type):
raise TypeError(
'{0} opened in binary mode, expected read_data to be '
'bytes, not {1}'.format(
self.filename,
type(self.read_data).__name__
)
)
else:
if not isinstance(self.read_data, str):
raise TypeError(
'{0} opened in non-binary mode, expected read_data to '
'be str, not {1}'.format(
self.filename,
type(self.read_data).__name__
)
)
# No need to repeat this the next time we check
self.__read_data_ok = True
def _read(self, size=0):
self.__check_read_data()
if not self.read_mode:
raise IOError('File not open for reading')
if not isinstance(size, six.integer_types) or size < 0:
raise TypeError('a positive integer is required')
joined = self.empty_string.join(self.read_data_iter)
if not size:
# read() called with no args, return everything
self.__loc += len(joined)
return joined
else:
file_spec = file # pylint: disable=undefined-variable
# read() called with an explicit size. Return a slice matching the
# requested size, but before doing so, reset read_data to reflect
# what we read.
self.read_data_iter = self._iterate_read_data(joined[size:])
ret = joined[:size]
self.__loc += len(ret)
return ret
if mock is None:
mock = MagicMock(name='open', spec=open)
def _readlines(self, size=None): # pylint: disable=unused-argument
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
raise IOError('File not open for reading')
ret = list(self.read_data_iter)
self.__loc += sum(len(x) for x in ret)
return ret
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
def _readline(self, size=None): # pylint: disable=unused-argument
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
raise IOError('File not open for reading')
try:
ret = next(self.read_data_iter)
self.__loc += len(ret)
return ret
except StopIteration:
return self.empty_string
_data = _iterate_read_data(read_data)
def __iter__(self):
self.__check_read_data()
if not self.read_mode:
raise IOError('File not open for reading')
while True:
try:
ret = next(self.read_data_iter)
self.__loc += len(ret)
yield ret
except StopIteration:
break
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
def _write(self, content):
if not self.write_mode:
raise IOError('File not open for writing')
if six.PY2:
if isinstance(content, six.text_type):
# encoding intentionally not specified to force a
# UnicodeEncodeError when non-ascii unicode type is passed
content.encode()
else:
content_type = type(content)
if self.binary_mode and content_type is not bytes:
raise TypeError(
'a bytes-like object is required, not \'{0}\''.format(
content_type.__name__
)
)
elif not self.binary_mode and content_type is not str:
raise TypeError(
'write() argument must be str, not {0}'.format(
content_type.__name__
)
)
# Support iteration via for loop
handle.__iter__ = lambda x: _readline_side_effect()
def _writelines(self, lines):
if not self.write_mode:
raise IOError('File not open for writing')
for line in lines:
self._write(line)
# This is salt specific and not in the upstream mock
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
def __enter__(self):
return self
if match is not None:
if isinstance(match, six.string_types):
match = [match]
def __exit__(self, exc_type, exc_val, exc_tb): # pylint: disable=unused-argument
pass
def fopen_side_effect(name, *args, **kwargs):
for pat in match:
if fnmatch.fnmatch(name, pat):
return DEFAULT
class MockCall(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __repr__(self):
# future lint: disable=blacklisted-function
ret = str('MockCall(')
for arg in self.args:
ret += repr(arg) + str(', ')
if not self.kwargs:
if self.args:
# Remove trailing ', '
ret = ret[:-2]
else:
for key, val in six.iteritems(self.kwargs):
ret += str('{0}={1}').format(
salt.utils.stringutils.to_str(key),
repr(val)
)
ret += str(')')
return ret
# future lint: enable=blacklisted-function
def __str__(self):
return self.__repr__()
def __eq__(self, other):
return self.args == other.args and self.kwargs == other.kwargs
class MockOpen(object):
r'''
This class can be used to mock the use of ``open()``.
``read_data`` is a string representing the contents of the file to be read.
By default, this is an empty string.
Optionally, ``read_data`` can be a dictionary mapping ``fnmatch.fnmatch()``
patterns to strings (or optionally, exceptions). This allows the mocked
filehandle to serve content for more than one file path.
.. code-block:: python
data = {
'/etc/foo.conf': textwrap.dedent("""\
Foo
Bar
Baz
"""),
'/etc/bar.conf': textwrap.dedent("""\
A
B
C
"""),
}
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
If the file path being opened does not match any of the glob expressions,
an IOError will be raised to simulate the file not existing.
Passing ``read_data`` as a string is equivalent to passing it with a glob
expression of "*". That is to say, the below two invocations are
equivalent:
.. code-block:: python
mock_open(read_data='foo\n')
mock_open(read_data={'*': 'foo\n'})
Instead of a string representing file contents, ``read_data`` can map to an
exception, and that exception will be raised if a file matching that
pattern is opened:
.. code-block:: python
data = {
'/etc/*': IOError(errno.EACCES, 'Permission denied'),
'*': 'Hello world!\n',
}
with patch('salt.utils.files.fopen', mock_open(read_data=data)):
do stuff
The above would raise an exception if any files within /etc are opened, but
would produce a mocked filehandle if any other file is opened.
To simulate file contents changing upon subsequent opens, the file contents
can be a list of strings/exceptions. For example:
.. code-block:: python
data = {
'/etc/foo.conf': [
'before\n',
'after\n',
],
'/etc/bar.conf': [
IOError(errno.ENOENT, 'No such file or directory', '/etc/bar.conf'),
'Hey, the file exists now!',
],
}
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
The first open of ``/etc/foo.conf`` would return "before\n" when read,
while the second would return "after\n" when read. For ``/etc/bar.conf``,
the first read would raise an exception, while the second would open
successfully and read the specified string.
Expressions will be attempted in dictionary iteration order (the exception
being ``*`` which is tried last), so if a file path matches more than one
fnmatch expression then the first match "wins". If your use case calls for
overlapping expressions, then an OrderedDict can be used to ensure that the
desired matching behavior occurs:
.. code-block:: python
data = OrderedDict()
data['/etc/foo.conf'] = 'Permission granted!'
data['/etc/*'] = IOError(errno.EACCES, 'Permission denied')
data['*'] = '*': 'Hello world!\n'
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
The following attributes are tracked for the life of a mock object:
* call_count - Tracks how many fopen calls were attempted
* filehandles - This is a dictionary mapping filenames to lists of MockFH
objects, representing the individual times that a given file was opened.
'''
def __init__(self, read_data=''):
# If the read_data contains lists, we will be popping it. So, don't
# modify the original value passed.
read_data = copy.copy(read_data)
# Normalize read_data, Python 2 filehandles should never produce unicode
# types on read.
if not isinstance(read_data, dict):
read_data = {'*': read_data}
if six.PY2:
# .__class__() used here to preserve the dict class in the event that
# an OrderedDict was used.
new_read_data = read_data.__class__()
for key, val in six.iteritems(read_data):
try:
val = salt.utils.data.decode(val, to_str=True)
except TypeError:
if not isinstance(val, BaseException):
raise
new_read_data[key] = val
read_data = new_read_data
del new_read_data
self.read_data = read_data
self.filehandles = {}
self.calls = []
self.call_count = 0
def __call__(self, name, *args, **kwargs):
'''
Match the file being opened to the patterns in the read_data and spawn
a mocked filehandle with the corresponding file contents.
'''
call = MockCall(name, *args, **kwargs)
self.calls.append(call)
self.call_count += 1
for pat in self.read_data:
if pat == '*':
continue
if fnmatch.fnmatch(name, pat):
matched_pattern = pat
break
else:
# No non-glob match in read_data, fall back to '*'
matched_pattern = '*'
try:
matched_contents = self.read_data[matched_pattern]
try:
# Assuming that the value for the matching expression is a
# list, pop the first element off of it.
file_contents = matched_contents.pop(0)
except AttributeError:
# The value for the matching expression is a string (or exception)
file_contents = matched_contents
except IndexError:
# We've run out of file contents, abort!
raise RuntimeError(
'File matching expression \'{0}\' opened more times than '
'expected'.format(matched_pattern)
)
try:
# Raise the exception if the matched file contents are an
# instance of an exception class.
raise file_contents
except TypeError:
# Contents were not an exception, so proceed with creating the
# mocked filehandle.
pass
ret = MockFH(name, file_contents, *args, **kwargs)
self.filehandles.setdefault(name, []).append(ret)
return ret
except KeyError:
# No matching glob in read_data, treat this as a file that does
# not exist and raise the appropriate exception.
raise IOError(errno.ENOENT, 'No such file or directory', name)
mock.side_effect = fopen_side_effect
def write_calls(self, path=None):
'''
Returns the contents passed to all .write() calls. Use `path` to narrow
the results to files matching a given pattern.
'''
ret = []
for filename, handles in six.iteritems(self.filehandles):
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.write_calls)
return ret
mock.return_value = handle
return mock
def writelines_calls(self, path=None):
'''
Returns the contents passed to all .writelines() calls. Use `path` to
narrow the results to files matching a given pattern.
'''
ret = []
for filename, handles in six.iteritems(self.filehandles):
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.writelines_calls)
return ret
# reimplement mock_open to support multiple filehandles
mock_open = MockOpen

View file

@ -29,7 +29,7 @@ def iter_installers(content):
x = m.groups()[0]
if not x.startswith(PREFIX):
continue
if x.endswith('zip'):
if x.endswith(('zip', 'sha256')):
continue
if installer:
if x != installer + '.md5':

View file

@ -12,6 +12,7 @@ from tests.support.mixins import LoaderModuleMockMixin
# Salt libs
import salt.beacons.btmp as btmp
from salt.ext import six
# pylint: disable=import-error
try:
@ -63,10 +64,11 @@ class BTMPBeaconTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(ret, (True, 'Valid beacon configuration'))
with patch('salt.utils.files.fopen', mock_open()) as m_open:
with patch('salt.utils.files.fopen', mock_open(b'')) as m_open:
ret = btmp.beacon(config)
m_open.assert_called_with(btmp.BTMP, 'rb')
self.assertEqual(ret, [])
call_args = next(six.itervalues(m_open.filehandles))[0].call.args
assert call_args == (btmp.BTMP, 'rb'), call_args
assert ret == [], ret
def test_invalid_users(self):
config = [{'users': ['gareth']}]

View file

@ -12,6 +12,7 @@ from tests.support.mixins import LoaderModuleMockMixin
# Salt libs
import salt.beacons.wtmp as wtmp
from salt.ext import six
# pylint: disable=import-error
try:
@ -63,10 +64,11 @@ class WTMPBeaconTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(ret, (True, 'Valid beacon configuration'))
with patch('salt.utils.files.fopen', mock_open()) as m_open:
with patch('salt.utils.files.fopen', mock_open(b'')) as m_open:
ret = wtmp.beacon(config)
m_open.assert_called_with(wtmp.WTMP, 'rb')
self.assertEqual(ret, [])
call_args = next(six.itervalues(m_open.filehandles))[0].call.args
assert call_args == (wtmp.WTMP, 'rb'), call_args
assert ret == [], ret
def test_invalid_users(self):
config = [{'users': ['gareth']}]

View file

@ -17,7 +17,6 @@ from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, PropertyMock
from tests.support.paths import TMP
from tests.unit.test_crypt import PRIVKEY_DATA
PASS_DATA = (
b'qOjCKDlBdcNEbJ/J8eRl7sH+bYIIm4cvHHY86gh2NEUnufFlFo0gGVTZR05Fj0cw3n/w7gR'
b'urNXz5JoeSIHVuNI3YTwzL9yEAaC0kuy8EbOlO2yx8yPGdfml9BRwOV7A6b8UFo9co4H7fz'

View file

@ -185,10 +185,15 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
self.assertIn(UNICODE_DIRNAME, ret)
def test_symlink_list(self):
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = self.test_symlink_list_file_roots
ret = roots.symlink_list({'saltenv': 'base'})
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
orig_file_roots = self.opts['file_roots']
try:
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = self.test_symlink_list_file_roots
ret = roots.symlink_list({'saltenv': 'base'})
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
finally:
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = orig_file_roots
class RootsLimitTraversalTest(TestCase, AdaptedConfigurationTestCaseMixin):

View file

@ -72,7 +72,9 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
os_release_content = os_release_file.read()
with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
os_release = core._parse_os_release(
'/etc/os-release',
'/usr/lib/os-release')
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@ -88,10 +90,9 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
"UBUNTU_CODENAME": "artful",
})
@patch("os.path.isfile")
def test_missing_os_release(self, path_isfile_mock):
path_isfile_mock.return_value = False
os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
def test_missing_os_release(self):
with patch('salt.utils.files.fopen', mock_open(read_data={})):
os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release')
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@ -104,7 +105,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
}
_path_isfile_map = {}
_cmd_run_map = {
'dpkg --print-architecture': 'amd64'
'dpkg --print-architecture': 'amd64',
}
path_exists_mock = MagicMock(side_effect=lambda x: _path_exists_map[x])
@ -127,60 +128,35 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
raise ImportError('No module named lsb_release')
return orig_import(name, *args)
# Skip the first if statement
# - Skip the first if statement
# - Skip the selinux/systemd stuff (not pertinent)
# - Skip the init grain compilation (not pertinent)
# - Ensure that lsb_release fails to import
# - Skip all the /etc/*-release stuff (not pertinent)
# - Mock linux_distribution to give us the OS name that we want
# - Make a bunch of functions return empty dicts, we don't care about
# these grains for the purposes of this test.
# - Mock the osarch
distro_mock = MagicMock(return_value=('Debian GNU/Linux', '8.3', ''))
with patch.object(salt.utils.platform, 'is_proxy',
MagicMock(return_value=False)):
# Skip the selinux/systemd stuff (not pertinent)
with patch.object(core, '_linux_bin_exists',
MagicMock(return_value=False)):
# Skip the init grain compilation (not pertinent)
with patch.object(os.path, 'exists', path_exists_mock):
# Ensure that lsb_release fails to import
with patch('{0}.__import__'.format(built_in),
side_effect=_import_mock):
# Skip all the /etc/*-release stuff (not pertinent)
with patch.object(os.path, 'isfile', path_isfile_mock):
# Mock linux_distribution to give us the OS name
# that we want.
distro_mock = MagicMock(
return_value=('Debian GNU/Linux', '8.3', '')
)
with patch.object(
core,
'linux_distribution',
distro_mock):
# Make a bunch of functions return empty dicts,
# we don't care about these grains for the
# purposes of this test.
with patch.object(
core,
'_linux_cpudata',
empty_mock):
with patch.object(
core,
'_linux_gpu_data',
empty_mock):
with patch.object(
core,
'_memdata',
empty_mock):
with patch.object(
core,
'_hw_data',
empty_mock):
with patch.object(
core,
'_virtual',
empty_mock):
with patch.object(
core,
'_ps',
empty_mock):
# Mock the osarch
with patch.dict(
core.__salt__,
{'cmd.run': cmd_run_mock}):
os_grains = core.os_data()
MagicMock(return_value=False)), \
patch.object(core, '_linux_bin_exists',
MagicMock(return_value=False)), \
patch.object(os.path, 'exists', path_exists_mock), \
patch('{0}.__import__'.format(built_in), side_effect=_import_mock), \
patch.object(os.path, 'isfile', path_isfile_mock), \
patch.object(core, '_parse_lsb_release', empty_mock), \
patch.object(core, '_parse_os_release', empty_mock), \
patch.object(core, '_parse_lsb_release', empty_mock), \
patch.object(core, 'linux_distribution', distro_mock), \
patch.object(core, '_linux_cpudata', empty_mock), \
patch.object(core, '_linux_gpu_data', empty_mock), \
patch.object(core, '_memdata', empty_mock), \
patch.object(core, '_hw_data', empty_mock), \
patch.object(core, '_virtual', empty_mock), \
patch.object(core, '_ps', empty_mock), \
patch.dict(core.__salt__, {'cmd.run': cmd_run_mock}):
os_grains = core.os_data()
self.assertEqual(os_grains.get('os_family'), 'Debian')
@ -218,33 +194,34 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
raise ImportError('No module named lsb_release')
return orig_import(name, *args)
# Skip the first if statement
distro_mock = MagicMock(
return_value=('SUSE Linux Enterprise Server ', '12', 'x86_64')
)
# - Skip the first if statement
# - Skip the selinux/systemd stuff (not pertinent)
# - Skip the init grain compilation (not pertinent)
# - Ensure that lsb_release fails to import
# - Skip all the /etc/*-release stuff (not pertinent)
# - Mock linux_distribution to give us the OS name that we want
# - Mock the osarch
with patch.object(salt.utils.platform, 'is_proxy',
MagicMock(return_value=False)):
# Skip the selinux/systemd stuff (not pertinent)
with patch.object(core, '_linux_bin_exists',
MagicMock(return_value=False)):
# Skip the init grain compilation (not pertinent)
with patch.object(os.path, 'exists', path_exists_mock):
# Ensure that lsb_release fails to import
with patch('{0}.__import__'.format(built_in),
side_effect=_import_mock):
# Skip all the /etc/*-release stuff (not pertinent)
with patch.object(os.path, 'isfile', MagicMock(return_value=False)):
with patch.object(core, '_parse_os_release', os_release_mock):
# Mock linux_distribution to give us the OS
# name that we want.
distro_mock = MagicMock(
return_value=('SUSE Linux Enterprise Server ', '12', 'x86_64')
)
with patch.object(core, 'linux_distribution', distro_mock):
with patch.object(core, '_linux_gpu_data', empty_mock):
with patch.object(core, '_hw_data', empty_mock):
with patch.object(core, '_linux_cpudata', empty_mock):
with patch.object(core, '_virtual', empty_mock):
# Mock the osarch
with patch.dict(core.__salt__, {'cmd.run': osarch_mock}):
os_grains = core.os_data()
MagicMock(return_value=False)), \
patch.object(core, '_linux_bin_exists',
MagicMock(return_value=False)), \
patch.object(os.path, 'exists', path_exists_mock), \
patch('{0}.__import__'.format(built_in),
side_effect=_import_mock), \
patch.object(os.path, 'isfile', MagicMock(return_value=False)), \
patch.object(core, '_parse_os_release', os_release_mock), \
patch.object(core, '_parse_lsb_release', empty_mock), \
patch.object(core, 'linux_distribution', distro_mock), \
patch.object(core, '_linux_gpu_data', empty_mock), \
patch.object(core, '_hw_data', empty_mock), \
patch.object(core, '_linux_cpudata', empty_mock), \
patch.object(core, '_virtual', empty_mock), \
patch.dict(core.__salt__, {'cmd.run': osarch_mock}):
os_grains = core.os_data()
self.assertEqual(os_grains.get('os_family'), 'Suse')
self.assertEqual(os_grains.get('os'), 'SUSE')
@ -255,7 +232,8 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
osarch_mock = MagicMock(return_value="amd64")
if os_release_filename:
os_release_data = core._parse_os_release(
[os.path.join(OS_RELEASE_DIR, os_release_filename)])
os.path.join(OS_RELEASE_DIR, os_release_filename)
)
else:
os_release_data = os_release_map.get('os_release_file', {})
os_release_mock = MagicMock(return_value=os_release_data)
@ -271,13 +249,19 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
raise ImportError('No module named lsb_release')
return orig_import(name, *args)
# Skip the first if statement
# Skip the selinux/systemd stuff (not pertinent)
# Skip the init grain compilation (not pertinent)
# Ensure that lsb_release fails to import
# Skip all the /etc/*-release stuff (not pertinent)
# - Mock linux_distribution to give us the OS name that we want.
# Mock the osarch
suse_release_file = os_release_map.get('suse_release_file')
file_contents = {'/proc/1/cmdline': ''}
if suse_release_file:
file_contents['/etc/SuSE-release'] = suse_release_file
# - Skip the first if statement
# - Skip the selinux/systemd stuff (not pertinent)
# - Skip the init grain compilation (not pertinent)
# - Ensure that lsb_release fails to import
# - Skip all the /etc/*-release stuff (not pertinent)
# - Mock linux_distribution to give us the OS name that we want
# - Mock the osarch
distro_mock = MagicMock(return_value=os_release_map['linux_distribution'])
with patch.object(salt.utils.platform, 'is_proxy', MagicMock(return_value=False)), \
patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)), \
@ -285,7 +269,8 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
patch('{0}.__import__'.format(built_in), side_effect=_import_mock), \
patch.object(os.path, 'isfile', path_isfile_mock), \
patch.object(core, '_parse_os_release', os_release_mock), \
patch('salt.utils.files.fopen', mock_open(read_data=os_release_map.get('suse_release_file', ''))), \
patch.object(core, '_parse_lsb_release', empty_mock), \
patch('salt.utils.files.fopen', mock_open(read_data=file_contents)), \
patch.object(core, 'linux_distribution', distro_mock), \
patch.object(core, '_linux_gpu_data', empty_mock), \
patch.object(core, '_linux_cpudata', empty_mock), \
@ -562,67 +547,13 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
'''
Test memdata on Linux systems
'''
_path_exists_map = {
'/proc/1/cmdline': False,
'/proc/meminfo': True
}
_path_isfile_map = {
'/proc/meminfo': True
}
_cmd_run_map = {
'dpkg --print-architecture': 'amd64',
'rpm --eval %{_host_cpu}': 'x86_64'
}
path_exists_mock = MagicMock(side_effect=lambda x: _path_exists_map[x])
path_isfile_mock = MagicMock(
side_effect=lambda x: _path_isfile_map.get(x, False)
)
cmd_run_mock = MagicMock(
side_effect=lambda x: _cmd_run_map[x]
)
empty_mock = MagicMock(return_value={})
_proc_meminfo = textwrap.dedent('''\
MemTotal: 16277028 kB
SwapTotal: 4789244 kB''')
orig_import = __import__
if six.PY2:
built_in = '__builtin__'
else:
built_in = 'builtins'
def _import_mock(name, *args):
if name == 'lsb_release':
raise ImportError('No module named lsb_release')
return orig_import(name, *args)
# Mock a bunch of stuff so we can isolate the mem stuff:
# - Skip the first if statement
# - Skip the init grain compilation (not pertinent)
# - Ensure that lsb_release fails to import
# - Skip all the /etc/*-release stuff (not pertinent)
# - Make a bunch of functions return empty dicts, we don't care
# about these grains for the purposes of this test.
# - Mock the osarch
# - And most importantly, mock the contents of /proc/meminfo
with patch.object(salt.utils.platform, 'is_proxy', MagicMock(return_value=False)), \
patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)), \
patch.object(os.path, 'exists', path_exists_mock), \
patch('{0}.__import__'.format(built_in), side_effect=_import_mock), \
patch.object(os.path, 'isfile', path_isfile_mock), \
patch.object(core, '_linux_cpudata', empty_mock), \
patch.object(core, '_linux_gpu_data', empty_mock), \
patch.object(core, '_hw_data', empty_mock), \
patch.object(core, '_virtual', empty_mock), \
patch.object(core, '_ps', empty_mock), \
patch.dict(core.__salt__, {'cmd.run': cmd_run_mock}), \
patch('salt.utils.files.fopen', mock_open(read_data=_proc_meminfo)):
os_grains = core.os_data()
self.assertEqual(os_grains.get('mem_total'), 15895)
self.assertEqual(os_grains.get('swap_total'), 4676)
with patch('salt.utils.files.fopen', mock_open(read_data=_proc_meminfo)):
memdata = core._linux_memdata()
self.assertEqual(memdata.get('mem_total'), 15895)
self.assertEqual(memdata.get('swap_total'), 4676)
@skipIf(salt.utils.platform.is_windows(), 'System is Windows')
def test_bsd_memdata(self):
@ -935,39 +866,70 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
'''
import platform
path_isfile_mock = MagicMock(side_effect=lambda x: x in ['/etc/release'])
with patch.object(platform, 'uname',
MagicMock(return_value=('SunOS', 'testsystem', '5.11', '11.3', 'sunv4', 'sparc'))):
with patch.object(salt.utils.platform, 'is_proxy',
MagicMock(return_value=False)):
with patch.object(salt.utils.platform, 'is_linux',
MagicMock(return_value=False)):
with patch.object(salt.utils.platform, 'is_windows',
MagicMock(return_value=False)):
with patch.object(salt.utils.platform, 'is_smartos',
MagicMock(return_value=False)):
with patch.object(salt.utils.path, 'which_bin',
MagicMock(return_value=None)):
with patch.object(os.path, 'isfile', path_isfile_mock):
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "solaris-11.3")) as os_release_file:
os_release_content = os_release_file.readlines()
with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
os_release_file.return_value.__iter__.return_value = os_release_content
with patch.object(core, '_sunos_cpudata',
MagicMock(return_value={'cpuarch': 'sparcv9',
'num_cpus': '1',
'cpu_model': 'MOCK_CPU_MODEL',
'cpu_flags': []})):
with patch.object(core, '_memdata',
MagicMock(return_value={'mem_total': 16384})):
with patch.object(core, '_virtual',
MagicMock(return_value={})):
with patch.object(core, '_ps',
MagicMock(return_value={})):
with patch.object(salt.utils.path, 'which',
MagicMock(return_value=True)):
sparc_return_mock = MagicMock(return_value=prtdata)
with patch.dict(core.__salt__, {'cmd.run': sparc_return_mock}):
os_grains = core.os_data()
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "solaris-11.3")) as os_release_file:
os_release_content = os_release_file.readlines()
uname_mock = MagicMock(return_value=(
'SunOS', 'testsystem', '5.11', '11.3', 'sunv4', 'sparc'
))
with patch.object(platform, 'uname', uname_mock), \
patch.object(salt.utils.platform, 'is_proxy',
MagicMock(return_value=False)), \
patch.object(salt.utils.platform, 'is_linux',
MagicMock(return_value=False)), \
patch.object(salt.utils.platform, 'is_windows',
MagicMock(return_value=False)), \
patch.object(salt.utils.platform, 'is_smartos',
MagicMock(return_value=False)), \
patch.object(salt.utils.path, 'which_bin',
MagicMock(return_value=None)), \
patch.object(os.path, 'isfile', path_isfile_mock), \
patch('salt.utils.files.fopen',
mock_open(read_data=os_release_content)) as os_release_file, \
patch.object(core, '_sunos_cpudata',
MagicMock(return_value={
'cpuarch': 'sparcv9',
'num_cpus': '1',
'cpu_model': 'MOCK_CPU_MODEL',
'cpu_flags': []})), \
patch.object(core, '_memdata',
MagicMock(return_value={'mem_total': 16384})), \
patch.object(core, '_virtual',
MagicMock(return_value={})), \
patch.object(core, '_ps', MagicMock(return_value={})), \
patch.object(salt.utils.path, 'which',
MagicMock(return_value=True)), \
patch.dict(core.__salt__,
{'cmd.run': MagicMock(return_value=prtdata)}):
os_grains = core.os_data()
grains = {k: v for k, v in os_grains.items()
if k in set(['product', 'productname'])}
self.assertEqual(grains, expectation)
@patch('os.path.isfile')
@patch('os.path.isdir')
def test_core_virtual_unicode(self, mock_file, mock_dir):
'''
test virtual grain with unicode character in product_name file
'''
def path_side_effect(path):
if path == '/sys/devices/virtual/dmi/id/product_name':
return True
return False
virt = 'kvm'
mock_file.side_effect = path_side_effect
mock_dir.side_effect = path_side_effect
with patch.object(salt.utils.platform, 'is_windows',
MagicMock(return_value=False)):
with patch.object(salt.utils.path, 'which',
MagicMock(return_value=True)):
with patch.dict(core.__salt__, {'cmd.run_all':
MagicMock(return_value={'pid': 78,
'retcode': 0,
'stderr': '',
'stdout': virt})}):
with patch('salt.utils.files.fopen',
mock_open(read_data='')):
osdata = {'kernel': 'Linux', }
ret = core._virtual(osdata)
self.assertEqual(ret['virtual'], virt)

View file

@ -32,20 +32,14 @@ class FibreChannelGrainsTestCase(TestCase):
cmd_run_mock = MagicMock(return_value=wwns)
with patch('salt.modules.cmdmod.powershell', cmd_run_mock):
ret = fibre_channel._windows_wwns()
self.assertEqual(ret, wwns)
assert ret == wwns, ret
def test_linux_fibre_channel_wwns_grains(self):
def multi_mock_open(*file_contents):
mock_files = [mock_open(read_data=content).return_value for content in file_contents]
mock_opener = mock_open()
mock_opener.side_effect = mock_files
return mock_opener
contents = ['0x500143802426baf4', '0x500143802426baf5']
files = ['file1', 'file2']
with patch('glob.glob', MagicMock(return_value=files)):
with patch('salt.utils.files.fopen', multi_mock_open('0x500143802426baf4', '0x500143802426baf5')):
ret = fibre_channel._linux_wwns()
with patch('glob.glob', MagicMock(return_value=files)), \
patch('salt.utils.files.fopen', mock_open(read_data=contents)):
ret = fibre_channel._linux_wwns()
self.assertEqual(ret, ['500143802426baf4', '500143802426baf5'])
assert ret == ['500143802426baf4', '500143802426baf5'], ret

View file

@ -131,19 +131,23 @@ class CpTestCase(TestCase, LoaderModuleMockMixin):
'''
Test if push works with good posix path.
'''
filename = '/saltines/test.file'
with patch('salt.modules.cp.os.path',
MagicMock(isfile=Mock(return_value=True), wraps=cp.os.path)), \
patch.multiple('salt.modules.cp',
_auth=MagicMock(**{'return_value.gen_token.return_value': 'token'}),
__opts__={'id': 'abc', 'file_buffer_size': 10}), \
patch('salt.utils.files.fopen', mock_open(read_data=b'content')), \
patch('salt.utils.files.fopen', mock_open(read_data=b'content')) as m_open, \
patch('salt.transport.Channel.factory', MagicMock()):
response = cp.push('/saltines/test.file')
self.assertEqual(response, True)
self.assertEqual(salt.utils.files.fopen().read.call_count, 2) # pylint: disable=resource-leakage
response = cp.push(filename)
assert response, response
num_opens = len(m_open.filehandles[filename])
assert num_opens == 1, num_opens
fh_ = m_open.filehandles[filename][0]
assert fh_.read.call_count == 2, fh_.read.call_count
salt.transport.Channel.factory({}).send.assert_called_once_with(
dict(
loc=salt.utils.files.fopen().tell(), # pylint: disable=resource-leakage
loc=fh_.tell(), # pylint: disable=resource-leakage
cmd='_file_recv',
tok='token',
path=['saltines', 'test.file'],

View file

@ -13,7 +13,6 @@ from tests.support.mock import (
MagicMock,
patch,
mock_open,
call,
NO_MOCK,
NO_MOCK_REASON
)
@ -51,24 +50,23 @@ mock_soa_zone = salt.utils.stringutils.to_str(
'1 PTR localhost.')
if NO_MOCK is False:
mock_calls_list = [
call.read(),
call.write(salt.utils.stringutils.to_str('##\n')),
call.write(salt.utils.stringutils.to_str('# Host Database\n')),
call.write(salt.utils.stringutils.to_str('#\n')),
call.write(salt.utils.stringutils.to_str('# localhost is used to configure the '
'loopback interface\n')),
call.write(salt.utils.stringutils.to_str('# when the system is booting. Do not '
'change this entry.\n')),
call.write(salt.utils.stringutils.to_str('##\n')),
call.write(salt.utils.stringutils.to_str('127.0.0.1 localhost')),
call.write(salt.utils.stringutils.to_str('\n')),
call.write(salt.utils.stringutils.to_str('255.255.255.255 broadcasthost')),
call.write(salt.utils.stringutils.to_str('\n')),
call.write(salt.utils.stringutils.to_str('::1 localhost')),
call.write(salt.utils.stringutils.to_str('\n')),
call.write(salt.utils.stringutils.to_str('fe80::1%lo0 localhost')),
call.write(salt.utils.stringutils.to_str('\n'))]
mock_writes_list = salt.utils.data.decode([
'##\n',
'# Host Database\n',
'#\n',
'# localhost is used to configure the loopback interface\n',
'# when the system is booting. Do not change this entry.\n',
'##\n',
'127.0.0.1 localhost',
'\n',
'255.255.255.255 broadcasthost',
'\n',
'::1 localhost',
'\n',
'fe80::1%lo0 localhost',
'\n'
], to_str=True
)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@ -84,18 +82,21 @@ class DNSUtilTestCase(TestCase):
with patch('salt.utils.files.fopen', mock_open(read_data=mock_hosts_file)) as m_open, \
patch('salt.modules.dnsutil.parse_hosts', MagicMock(return_value=mock_hosts_file_rtn)):
dnsutil.hosts_append('/etc/hosts', '127.0.0.1', 'ad1.yuk.co,ad2.yuk.co')
helper_open = m_open()
helper_open.write.assert_called_once_with(
salt.utils.stringutils.to_str('\n127.0.0.1 ad1.yuk.co ad2.yuk.co'))
writes = m_open.write_calls()
# We should have called .write() only once, with the expected
# content
num_writes = len(writes)
assert num_writes == 1, num_writes
expected = salt.utils.stringutils.to_str('\n127.0.0.1 ad1.yuk.co ad2.yuk.co')
assert writes[0] == expected, writes[0]
def test_hosts_remove(self):
to_remove = 'ad1.yuk.co'
new_mock_file = mock_hosts_file + '\n127.0.0.1 ' + to_remove + '\n'
with patch('salt.utils.files.fopen', mock_open(read_data=new_mock_file)) as m_open:
dnsutil.hosts_remove('/etc/hosts', to_remove)
helper_open = m_open()
calls_list = helper_open.method_calls
self.assertEqual(calls_list, mock_calls_list)
writes = m_open.write_calls()
assert writes == mock_writes_list, writes
@skipIf(True, 'Waiting on bug report fixes')
def test_parse_zone(self):

View file

@ -166,6 +166,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(docker_mod.__salt__,
{'mine.send': mine_send,
'container_resource.run': MagicMock(),
'config.get': MagicMock(return_value=True),
'cp.cache_file': MagicMock(return_value=False)}):
with patch.dict(docker_mod.__utils__,
{'docker.get_client_args': client_args_mock}):
@ -174,6 +175,44 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
mine_send.assert_called_with('docker.ps', verbose=True, all=True,
host=True)
def test_update_mine(self):
'''
Test the docker.update_mine config option
'''
def config_get_disabled(val, default):
return {'base_url': docker_mod.NOTSET,
'version': docker_mod.NOTSET,
'docker.url': docker_mod.NOTSET,
'docker.version': docker_mod.NOTSET,
'docker.machine': docker_mod.NOTSET,
'docker.update_mine': False}[val]
def config_get_enabled(val, default):
return {'base_url': docker_mod.NOTSET,
'version': docker_mod.NOTSET,
'docker.url': docker_mod.NOTSET,
'docker.version': docker_mod.NOTSET,
'docker.machine': docker_mod.NOTSET,
'docker.update_mine': True}[val]
mine_mock = Mock()
dunder_salt = {
'config.get': MagicMock(side_effect=config_get_disabled),
'mine.send': mine_mock,
}
with patch.dict(docker_mod.__salt__, dunder_salt), \
patch.dict(docker_mod.__context__, {'docker.client': Mock()}), \
patch.object(docker_mod, 'state', MagicMock(return_value='stopped')):
docker_mod.stop('foo', timeout=1)
mine_mock.assert_not_called()
with patch.dict(docker_mod.__salt__, dunder_salt), \
patch.dict(docker_mod.__context__, {'docker.client': Mock()}), \
patch.object(docker_mod, 'state', MagicMock(return_value='stopped')):
dunder_salt['config.get'].side_effect = config_get_enabled
docker_mod.stop('foo', timeout=1)
self.assert_called_once(mine_mock)
@skipIf(_docker_py_version() < (1, 5, 0),
'docker module must be installed to run this test or is too old. >=1.5.0')
def test_list_networks(self, *args):

View file

@ -8,10 +8,11 @@ import tempfile
import textwrap
# Import Salt Testing libs
from tests.support.helpers import with_tempfile
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.paths import TMP
from tests.support.unit import TestCase, skipIf
from tests.support.mock import MagicMock, Mock, patch, mock_open
from tests.support.mock import MagicMock, Mock, patch, mock_open, DEFAULT
try:
import pytest
@ -39,6 +40,10 @@ here
'''
class DummyStat(object):
st_size = 123
class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
@ -1176,10 +1181,8 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(
filemod._starts_till(src=src, probe='and here is something'), -1)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_after_no_pattern(self):
@with_tempfile()
def test_line_insert_after_no_pattern(self, name):
'''
Test for file.line for insertion after specific line, using no pattern.
@ -1198,19 +1201,26 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/custom'
])
cfg_content = '- /srv/custom'
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after='- /srv/salt', mode='insert')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_after_pattern(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen', mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open', mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, after='- /srv/salt', mode='insert')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_after_pattern(self, name):
'''
Test for file.line for insertion after specific line, using pattern.
@ -1239,20 +1249,59 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' custom:',
' - /srv/custom'
])
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for after_line in ['file_r.*', '.*roots']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after=after_line, mode='insert', indent=False)
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, after=after_line, mode='insert', indent=False)
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_before(self):
@with_tempfile()
def test_line_insert_multi_line_content_after_unicode(self, name):
'''
Test for file.line for insertion after specific line with Unicode
See issue #48113
:return:
'''
file_content = ("This is a line\nThis is another line")
file_modified = salt.utils.stringutils.to_str("This is a line\nThis is another line\nThis is a line with unicode Ŷ")
cfg_content = "This is a line with unicode Ŷ"
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for after_line in ['This is another line']:
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, after=after_line, mode='insert', indent=False)
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_before(self, name):
'''
Test for file.line for insertion before specific line, using pattern and no patterns.
@ -1273,15 +1322,26 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/sugar'
])
cfg_content = '- /srv/custom'
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for before_line in ['/srv/salt', '/srv/sa.*t']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before=before_line, mode='insert')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, before=before_line, mode='insert')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@ -1309,10 +1369,8 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(cm.exception.strerror,
'Found more than expected occurrences in "before" expression')
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_before_after(self):
@with_tempfile()
def test_line_insert_before_after(self, name):
'''
Test for file.line for insertion before specific line, using pattern and no patterns.
@ -1335,20 +1393,29 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/sugar'
])
cfg_content = '- /srv/coriander'
for b_line, a_line in [('/srv/sugar', '/srv/salt')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before=b_line, after=a_line, mode='insert')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_start(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for b_line, a_line in [('/srv/sugar', '/srv/salt')]:
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, before=b_line, after=a_line, mode='insert')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_start(self, name):
'''
Test for file.line for insertion at the beginning of the file
:return:
@ -1367,19 +1434,28 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/salt',
' - /srv/sugar'
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, location='start', mode='insert')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_end(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, location='start', mode='insert')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_end(self, name):
'''
Test for file.line for insertion at the end of the file (append)
:return:
@ -1398,19 +1474,28 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/sugar',
' ' + cfg_content
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, location='end', mode='insert')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_before(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, location='end', mode='insert')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_ensure_before(self, name):
'''
Test for file.line for insertion ensuring the line is before
:return:
@ -1427,14 +1512,25 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
cfg_content,
'exit 0'
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before='exit 0', mode='ensure')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, before='exit 0', mode='ensure')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@ -1463,10 +1559,8 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_after(self):
@with_tempfile()
def test_line_insert_ensure_after(self, name):
'''
Test for file.line for insertion ensuring the line is after
:return:
@ -1481,19 +1575,28 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
'/etc/init.d/someservice restart',
cfg_content
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after='/etc/init.d/someservice restart', mode='ensure')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_twolines(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, after='/etc/init.d/someservice restart', mode='ensure')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_ensure_beforeafter_twolines(self, name):
'''
Test for file.line for insertion ensuring the line is between two lines
:return:
@ -1507,23 +1610,32 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
# pylint: enable=W1401
after, before = file_content.split(os.linesep)
file_modified = os.linesep.join([after, cfg_content, before])
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_twolines_exists(self):
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
filemod.line(name, content=cfg_content, after=_after, before=_before, mode='ensure')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@with_tempfile()
def test_line_insert_ensure_beforeafter_twolines_exists(self, name):
'''
Test for file.line for insertion ensuring the line is between two lines where content already exists
:return:
Test for file.line for insertion ensuring the line is between two lines
where content already exists
'''
cfg_content = 'EXTRA_GROUPS="dialout"'
# pylint: disable=W1401
@ -1534,24 +1646,28 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
])
# pylint: enable=W1401
after, before = file_content.split(os.linesep)[0], file_content.split(os.linesep)[2]
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
result = filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
self.assertEqual(len(atomic_opener().write.call_args_list), 0)
self.assertEqual(result, False)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen',
mock_open(read_data=file_content)), \
patch('salt.utils.atomicfile.atomic_open',
mock_open()) as atomic_open_mock:
result = filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
# We should not have opened the file
assert not atomic_open_mock.filehandles
# No changes should have been made
assert result is False
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_rangelines(self):
'''
Test for file.line for insertion ensuring the line is between two lines within the range.
This expected to bring no changes.
:return:
Test for file.line for insertion ensuring the line is between two lines
within the range. This expected to bring no changes.
'''
cfg_content = 'EXTRA_GROUPS="dialout cdrom floppy audio video plugdev users"'
# pylint: disable=W1401
@ -1570,10 +1686,8 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
'Found more than one line between boundaries "before" and "after"',
six.text_type(cmd_err))
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_delete(self):
@with_tempfile()
def test_line_delete(self, name):
'''
Test for file.line for deletion of specific line
:return:
@ -1591,20 +1705,28 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/salt',
' - /srv/sugar'
])
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for content in ['/srv/pepper', '/srv/pepp*', '/srv/p.*', '/sr.*pe.*']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=content, mode='delete')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen', files_fopen), \
patch('salt.utils.atomicfile.atomic_open', mock_open()) as atomic_open_mock:
filemod.line(name, content=content, mode='delete')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_replace(self):
@with_tempfile()
def test_line_replace(self, name):
'''
Test for file.line for replacement of specific line
:return:
@ -1623,15 +1745,25 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/natrium-chloride',
' - /srv/sugar'
])
isfile_mock = MagicMock(side_effect=lambda x: True if x == name else DEFAULT)
for match in ['/srv/pepper', '/srv/pepp*', '/srv/p.*', '/sr.*pe.*']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content='- /srv/natrium-chloride', match=match, mode='replace')
self.assertEqual(len(atomic_opener().write.call_args_list), 1)
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
with patch('os.path.isfile', isfile_mock), \
patch('os.stat', MagicMock(return_value=DummyStat())), \
patch('salt.utils.files.fopen', files_fopen), \
patch('salt.utils.atomicfile.atomic_open', mock_open()) as atomic_open_mock:
filemod.line(name, content='- /srv/natrium-chloride', match=match, mode='replace')
handles = atomic_open_mock.filehandles[name]
# We should only have opened the file once
open_count = len(handles)
assert open_count == 1, open_count
# We should only have invoked .write() once...
write_count = len(handles[0].write.call_args_list)
assert write_count == 1, write_count
# ... with the updated content
write_content = handles[0].write.call_args_list[0][0][0]
assert write_content == file_modified, write_content
class FileBasicsTestCase(TestCase, LoaderModuleMockMixin):

View file

@ -5,6 +5,7 @@
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import errno
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
@ -43,15 +44,13 @@ class GrublegacyTestCase(TestCase, LoaderModuleMockMixin):
'''
Test for Parse GRUB conf file
'''
mock = MagicMock(side_effect=IOError('foo'))
with patch('salt.utils.files.fopen', mock):
with patch.object(grub_legacy, '_detect_conf', return_value='A'):
self.assertRaises(CommandExecutionError, grub_legacy.conf)
file_data = IOError(errno.EACCES, 'Permission denied')
with patch('salt.utils.files.fopen', mock_open(read_data=file_data)), \
patch.object(grub_legacy, '_detect_conf', return_value='A'):
self.assertRaises(CommandExecutionError, grub_legacy.conf)
file_data = salt.utils.stringutils.to_str('\n'.join(['#', 'A B C D,E,F G H']))
with patch('salt.utils.files.fopen',
mock_open(read_data=file_data), create=True) as f_mock:
f_mock.return_value.__iter__.return_value = file_data.splitlines()
with patch.object(grub_legacy, '_detect_conf', return_value='A'):
self.assertEqual(grub_legacy.conf(),
{'A': 'B C D,E,F G H', 'stanzas': []})
with patch('salt.utils.files.fopen', mock_open(read_data=file_data)), \
patch.object(grub_legacy, '_detect_conf', return_value='A'):
conf = grub_legacy.conf()
assert conf == {'A': 'B C D,E,F G H', 'stanzas': []}, conf

View file

@ -114,7 +114,7 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
with patch('salt.modules.hosts.__get_hosts_filename',
MagicMock(return_value='/etc/hosts')), \
patch('os.path.isfile', MagicMock(return_value=True)), \
patch('salt.utils.files.fopen', mock_open()):
patch('salt.utils.files.fopen', mock_open(b'')):
mock_opt = MagicMock(return_value=None)
with patch.dict(hosts.__salt__, {'config.option': mock_opt}):
self.assertTrue(hosts.set_host('10.10.10.10', 'Salt1'))
@ -212,7 +212,7 @@ class HostsTestCase(TestCase, LoaderModuleMockMixin):
'''
Tests if specified host entry gets removed from the hosts file
'''
with patch('salt.utils.files.fopen', mock_open()), \
with patch('salt.utils.files.fopen', mock_open(b'')), \
patch('salt.modules.hosts.__get_hosts_filename',
MagicMock(return_value='/etc/hosts')), \
patch('salt.modules.hosts.has_pair',

View file

@ -1473,29 +1473,26 @@ class Test_Junos_Module(TestCase, LoaderModuleMockMixin, XMLEqualityMixin):
with patch('jnpr.junos.device.Device.execute') as mock_execute:
mock_execute.return_value = etree.XML(
'<rpc-reply>text rpc reply</rpc-reply>')
m = mock_open()
with patch('salt.utils.files.fopen', m, create=True):
with patch('salt.utils.files.fopen', mock_open(), create=True) as m_open:
junos.rpc('get-chassis-inventory', '/path/to/file', format='text')
handle = m()
handle.write.assert_called_with('text rpc reply')
writes = m_open.write_calls()
assert writes == ['text rpc reply'], writes
def test_rpc_write_file_format_json(self):
with patch('jnpr.junos.device.Device.execute') as mock_execute, \
patch('salt.utils.json.dumps') as mock_dumps:
mock_dumps.return_value = 'json rpc reply'
m = mock_open()
with patch('salt.utils.files.fopen', m, create=True):
with patch('salt.utils.files.fopen', mock_open(), create=True) as m_open:
junos.rpc('get-chassis-inventory', '/path/to/file', format='json')
handle = m()
handle.write.assert_called_with('json rpc reply')
writes = m_open.write_calls()
assert writes == ['json rpc reply'], writes
def test_rpc_write_file(self):
with patch('salt.modules.junos.jxmlease.parse') as mock_parse, \
patch('salt.modules.junos.etree.tostring') as mock_tostring, \
patch('jnpr.junos.device.Device.execute') as mock_execute:
mock_tostring.return_value = 'xml rpc reply'
m = mock_open()
with patch('salt.utils.files.fopen', m, create=True):
with patch('salt.utils.files.fopen', mock_open(), create=True) as m_open:
junos.rpc('get-chassis-inventory', '/path/to/file')
handle = m()
handle.write.assert_called_with('xml rpc reply')
writes = m_open.write_calls()
assert writes == ['xml rpc reply'], writes

View file

@ -101,6 +101,7 @@ class LinuxSysctlTestCase(TestCase, LoaderModuleMockMixin):
'''
Tests successful add of config file when previously not one
'''
config = '/etc/sysctl.conf'
with patch('os.path.isfile', MagicMock(return_value=False)), \
patch('os.path.exists', MagicMock(return_value=True)):
asn_cmd = {'pid': 1337, 'retcode': 0, 'stderr': '',
@ -110,18 +111,20 @@ class LinuxSysctlTestCase(TestCase, LoaderModuleMockMixin):
sys_cmd = 'systemd 208\n+PAM +LIBWRAP'
mock_sys_cmd = MagicMock(return_value=sys_cmd)
with patch('salt.utils.files.fopen', mock_open()) as m_open:
with patch.dict(linux_sysctl.__context__, {'salt.utils.systemd.version': 232}):
with patch.dict(linux_sysctl.__salt__,
{'cmd.run_stdout': mock_sys_cmd,
'cmd.run_all': mock_asn_cmd}):
with patch.dict(systemd.__context__,
{'salt.utils.systemd.booted': True,
'salt.utils.systemd.version': 232}):
linux_sysctl.persist('net.ipv4.ip_forward', 1)
helper_open = m_open()
helper_open.write.assert_called_once_with(
'#\n# Kernel sysctl configuration\n#\n')
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch.dict(linux_sysctl.__context__,
{'salt.utils.systemd.version': 232}), \
patch.dict(linux_sysctl.__salt__,
{'cmd.run_stdout': mock_sys_cmd,
'cmd.run_all': mock_asn_cmd}), \
patch.dict(systemd.__context__,
{'salt.utils.systemd.booted': True,
'salt.utils.systemd.version': 232}):
linux_sysctl.persist('net.ipv4.ip_forward', 1, config=config)
writes = m_open.write_calls()
assert writes == [
'#\n# Kernel sysctl configuration\n#\n'
], writes
def test_persist_read_conf_success(self):
'''

View file

@ -17,9 +17,9 @@ from tests.support.mock import (
MagicMock,
mock_open,
patch,
call,
NO_MOCK,
NO_MOCK_REASON
NO_MOCK_REASON,
DEFAULT
)
@ -67,9 +67,9 @@ class DarwinSysctlTestCase(TestCase, LoaderModuleMockMixin):
'''
Tests adding of config file failure
'''
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
read_data = IOError(13, 'Permission denied', '/file')
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)), \
patch('os.path.isfile', MagicMock(return_value=False)):
m_open.side_effect = IOError(13, 'Permission denied', '/file')
self.assertRaises(CommandExecutionError,
mac_sysctl.persist,
'net.inet.icmp.icmplim',
@ -77,29 +77,45 @@ class DarwinSysctlTestCase(TestCase, LoaderModuleMockMixin):
def test_persist_no_conf_success(self):
'''
Tests successful add of config file when previously not one
Tests successful add of config file when it did not already exist
'''
config = '/etc/sysctl.conf'
isfile_mock = MagicMock(
side_effect=lambda x: False if x == config else DEFAULT
)
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch('os.path.isfile', MagicMock(return_value=False)):
mac_sysctl.persist('net.inet.icmp.icmplim', 50)
helper_open = m_open()
helper_open.write.assert_called_once_with(
'#\n# Kernel sysctl configuration\n#\n')
patch('os.path.isfile', isfile_mock):
mac_sysctl.persist('net.inet.icmp.icmplim', 50, config=config)
# We only should have opened the one file
num_handles = len(m_open.filehandles)
assert num_handles == 1, num_handles
writes = m_open.write_calls()
# We should have called .write() only once, with the expected
# content
num_writes = len(writes)
assert num_writes == 1, num_writes
assert writes[0] == '#\n# Kernel sysctl configuration\n#\n', writes[0]
def test_persist_success(self):
'''
Tests successful write to existing sysctl file
'''
config = '/etc/sysctl.conf'
to_write = '#\n# Kernel sysctl configuration\n#\n'
m_calls_list = [call.writelines([
writelines_calls = [[
'#\n',
'# Kernel sysctl configuration\n',
'#\n',
'net.inet.icmp.icmplim=50\n',
])]
]]
isfile_mock = MagicMock(
side_effect=lambda x: True if x == config else DEFAULT
)
with patch('salt.utils.files.fopen', mock_open(read_data=to_write)) as m_open, \
patch('os.path.isfile', MagicMock(return_value=True)):
mac_sysctl.persist('net.inet.icmp.icmplim', 50, config=to_write)
helper_open = m_open()
calls_list = helper_open.method_calls
self.assertEqual(calls_list, m_calls_list)
patch('os.path.isfile', isfile_mock):
mac_sysctl.persist('net.inet.icmp.icmplim', 50, config=config)
# We only should have opened the one file
num_handles = len(m_open.filehandles)
assert num_handles == 1, num_handles
writes = m_open.writelines_calls()
assert writes == writelines_calls, writes

View file

@ -87,20 +87,19 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isfile', mock):
self.assertEqual(mount.fstab(), {})
file_data = '\n'.join(['#', 'A B C D,E,F G H'])
mock = MagicMock(return_value=True)
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(os.path, 'isfile', mock):
file_data = '\n'.join(['#',
'A B C D,E,F G H'])
with patch('salt.utils.files.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertEqual(mount.fstab(), {'B': {'device': 'A',
'dump': 'G',
'fstype': 'C',
'opts': ['D', 'E', 'F'],
'pass': 'H'}})
with patch.dict(mount.__grains__, {'kernel': ''}), \
patch.object(os.path, 'isfile', mock), \
patch('salt.utils.files.fopen', mock_open(read_data=file_data)):
fstab = mount.fstab()
assert fstab == {
'B': {'device': 'A',
'dump': 'G',
'fstype': 'C',
'opts': ['D', 'E', 'F'],
'pass': 'H'}
}, fstab
def test_vfstab(self):
'''
@ -110,21 +109,23 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isfile', mock):
self.assertEqual(mount.vfstab(), {})
file_data = textwrap.dedent('''\
#
swap - /tmp tmpfs - yes size=2048m
''')
mock = MagicMock(return_value=True)
with patch.dict(mount.__grains__, {'kernel': 'SunOS'}):
with patch.object(os.path, 'isfile', mock):
file_data = '\n'.join(['#',
'swap - /tmp tmpfs - yes size=2048m'])
with patch('salt.utils.files.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertEqual(mount.fstab(), {'/tmp': {'device': 'swap',
'device_fsck': '-',
'fstype': 'tmpfs',
'mount_at_boot': 'yes',
'opts': ['size=2048m'],
'pass_fsck': '-'}})
with patch.dict(mount.__grains__, {'kernel': 'SunOS'}), \
patch.object(os.path, 'isfile', mock), \
patch('salt.utils.files.fopen', mock_open(read_data=file_data)):
vfstab = mount.vfstab()
assert vfstab == {
'/tmp': {'device': 'swap',
'device_fsck': '-',
'fstype': 'tmpfs',
'mount_at_boot': 'yes',
'opts': ['size=2048m'],
'pass_fsck': '-'}
}, vfstab
def test_rm_fstab(self):
'''
@ -274,30 +275,36 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
Return a dict containing information on active swap
'''
file_data = '\n'.join(['Filename Type Size Used Priority',
'/dev/sda1 partition 31249404 4100 -1'])
file_data = textwrap.dedent('''\
Filename Type Size Used Priority
/dev/sda1 partition 31249404 4100 -1
''')
with patch.dict(mount.__grains__, {'os': '', 'kernel': ''}):
with patch('salt.utils.files.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
with patch('salt.utils.files.fopen', mock_open(read_data=file_data)):
swaps = mount.swaps()
assert swaps == {
'/dev/sda1': {
'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}
}, swaps
self.assertDictEqual(mount.swaps(), {'/dev/sda1':
{'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}})
file_data = '\n'.join(['Device Size Used Unknown Unknown Priority',
'/dev/sda1 31249404 4100 unknown unknown -1'])
file_data = textwrap.dedent('''\
Device Size Used Unknown Unknown Priority
/dev/sda1 31249404 4100 unknown unknown -1
''')
mock = MagicMock(return_value=file_data)
with patch.dict(mount.__grains__, {'os': 'OpenBSD', 'kernel': 'OpenBSD'}):
with patch.dict(mount.__salt__, {'cmd.run_stdout': mock}):
self.assertDictEqual(mount.swaps(), {'/dev/sda1':
{'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}})
with patch.dict(mount.__grains__, {'os': 'OpenBSD', 'kernel': 'OpenBSD'}), \
patch.dict(mount.__salt__, {'cmd.run_stdout': mock}):
swaps = mount.swaps()
assert swaps == {
'/dev/sda1': {
'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}
}, swaps
def test_swapon(self):
'''

View file

@ -54,10 +54,12 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
# test_user_create_when_user_exists(self):
# ensure we don't try to create a user when one already exists
with patch.object(mysql, 'user_exists', MagicMock(return_value=True)):
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
ret = mysql.user_create('testuser')
self.assertEqual(False, ret)
# mock the version of MySQL
with patch.object(mysql, 'version', MagicMock(return_value='8.0.10')):
with patch.object(mysql, 'user_exists', MagicMock(return_value=True)):
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
ret = mysql.user_create('testuser')
self.assertEqual(False, ret)
def test_user_create(self):
'''

View file

@ -230,16 +230,14 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
Test for Modify hostname
'''
self.assertFalse(network.mod_hostname(None))
file_d = '\n'.join(['#', 'A B C D,E,F G H'])
with patch.object(salt.utils.path, 'which', return_value='hostname'):
with patch.dict(network.__salt__,
{'cmd.run': MagicMock(return_value=None)}):
file_d = '\n'.join(['#', 'A B C D,E,F G H'])
with patch('salt.utils.files.fopen', mock_open(read_data=file_d),
create=True) as mfi:
mfi.return_value.__iter__.return_value = file_d.splitlines()
with patch.dict(network.__grains__, {'os_family': 'A'}):
self.assertTrue(network.mod_hostname('hostname'))
with patch.object(salt.utils.path, 'which', return_value='hostname'), \
patch.dict(network.__salt__,
{'cmd.run': MagicMock(return_value=None)}), \
patch.dict(network.__grains__, {'os_family': 'A'}), \
patch('salt.utils.files.fopen', mock_open(read_data=file_d)):
self.assertTrue(network.mod_hostname('hostname'))
def test_connect(self):
'''

View file

@ -10,6 +10,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
mock_open,
patch,
NO_MOCK,
@ -32,21 +33,21 @@ class NfsTestCase(TestCase, LoaderModuleMockMixin):
'''
Test for List configured exports
'''
file_d = '\n'.join(['A B1(23'])
with patch('salt.utils.files.fopen',
mock_open(read_data=file_d), create=True) as mfi:
mfi.return_value.__iter__.return_value = file_d.splitlines()
self.assertDictEqual(nfs3.list_exports(),
{'A': [{'hosts': 'B1', 'options': ['23']}]})
with patch('salt.utils.files.fopen', mock_open(read_data='A B1(23')):
exports = nfs3.list_exports()
assert exports == {'A': [{'hosts': 'B1', 'options': ['23']}]}, exports
def test_del_export(self):
'''
Test for Remove an export
'''
with patch.object(nfs3,
'list_exports',
return_value={'A':
[{'hosts':
['B1'], 'options': ['23']}]}):
with patch.object(nfs3, '_write_exports', return_value=None):
self.assertDictEqual(nfs3.del_export(path='A'), {})
list_exports_mock = MagicMock(return_value={
'A': [
{'hosts': ['B1'],
'options': ['23']},
],
})
with patch.object(nfs3, 'list_exports', list_exports_mock), \
patch.object(nfs3, '_write_exports', MagicMock(return_value=None)):
result = nfs3.del_export(path='A')
assert result == {}, result

View file

@ -140,8 +140,9 @@ class PuppetTestCase(TestCase, LoaderModuleMockMixin):
mock_open(read_data="resources: 1")):
self.assertDictEqual(puppet.summary(), {'resources': 1})
with patch('salt.utils.files.fopen', mock_open()) as m_open:
m_open.side_effect = IOError(13, 'Permission denied:', '/file')
permission_error = IOError(os.errno.EACCES, 'Permission denied:', '/file')
with patch('salt.utils.files.fopen',
mock_open(read_data=permission_error)) as m_open:
self.assertRaises(CommandExecutionError, puppet.summary)
def test_plugin_sync(self):

View file

@ -358,13 +358,14 @@ class SnapperTestCase(TestCase, LoaderModuleMockMixin):
patch('os.path.isfile', MagicMock(side_effect=[True, True, False, True])), \
patch('os.path.isdir', MagicMock(return_value=False)), \
patch('salt.modules.snapper.snapper.ListConfigs', MagicMock(return_value=DBUS_RET['ListConfigs'])):
fopen_effect = [
mock_open(read_data=FILE_CONTENT["/tmp/foo"]['pre']).return_value,
mock_open(read_data=FILE_CONTENT["/tmp/foo"]['post']).return_value,
mock_open(read_data=FILE_CONTENT["/tmp/foo2"]['post']).return_value,
]
with patch('salt.utils.files.fopen') as fopen_mock:
fopen_mock.side_effect = fopen_effect
contents = {
'*/tmp/foo': [
FILE_CONTENT['/tmp/foo']['pre'],
FILE_CONTENT['/tmp/foo']['post'],
],
'*/tmp/foo2': FILE_CONTENT['/tmp/foo2']['post'],
}
with patch('salt.utils.files.fopen', mock_open(read_data=contents)):
module_ret = {
"/tmp/foo": MODULE_RET['DIFF']["/tmp/foo"],
"/tmp/foo2": MODULE_RET['DIFF']["/tmp/foo2"],
@ -387,12 +388,7 @@ class SnapperTestCase(TestCase, LoaderModuleMockMixin):
"f18f971f1517449208a66589085ddd3723f7f6cefb56c141e3d97ae49e1d87fa",
])
}):
fopen_effect = [
mock_open(read_data="dummy binary").return_value,
mock_open(read_data="dummy binary").return_value,
]
with patch('salt.utils.files.fopen') as fopen_mock:
fopen_mock.side_effect = fopen_effect
with patch('salt.utils.files.fopen', mock_open(read_data='dummy binary')):
module_ret = {
"/tmp/foo3": MODULE_RET['DIFF']["/tmp/foo3"],
}

View file

@ -5,10 +5,15 @@
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import copy
import os
import shutil
import tempfile
import textwrap
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.paths import TMP, TMP_CONF_DIR
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
@ -21,6 +26,9 @@ from tests.support.mock import (
# Import Salt Libs
import salt.config
import salt.loader
import salt.state
import salt.utils.files
import salt.utils.json
import salt.utils.hashutils
import salt.utils.odict
import salt.utils.platform
@ -945,7 +953,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
mock):
with patch(
'salt.utils.files.fopen',
mock_open()):
mock_open(b'')):
self.assertTrue(
state.sls(arg,
None,
@ -1232,3 +1240,300 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
({'force': False}, errors),
({}, errors)]:
assert res == state._get_pillar_errors(kwargs=opts, pillar=ext_pillar)
class TopFileMergingCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {
state: {
'__opts__': salt.config.minion_config(
os.path.join(TMP_CONF_DIR, 'minion')
),
'__salt__': {
'saltutil.is_running': MagicMock(return_value=[]),
},
},
}
def setUp(self):
self.cachedir = tempfile.mkdtemp(dir=TMP)
self.fileserver_root = tempfile.mkdtemp(dir=TMP)
self.addCleanup(shutil.rmtree, self.cachedir, ignore_errors=True)
self.addCleanup(shutil.rmtree, self.fileserver_root, ignore_errors=True)
self.saltenvs = ['base', 'foo', 'bar', 'baz']
self.saltenv_roots = {
x: os.path.join(self.fileserver_root, x)
for x in ('base', 'foo', 'bar', 'baz')
}
self.base_top_file = os.path.join(self.saltenv_roots['base'], 'top.sls')
self.dunder_opts = salt.utils.yaml.safe_load(
textwrap.dedent('''\
file_client: local
default_top: base
file_roots:
base:
- {base}
foo:
- {foo}
bar:
- {bar}
baz:
- {baz}
'''.format(**self.saltenv_roots)
)
)
self.dunder_opts['env_order'] = self.saltenvs
# Write top files for all but the "baz" environment
for saltenv in self.saltenv_roots:
os.makedirs(self.saltenv_roots[saltenv])
if saltenv == 'baz':
continue
top_file = os.path.join(self.saltenv_roots[saltenv], 'top.sls')
with salt.utils.files.fopen(top_file, 'w') as fp_:
# Add a section for every environment to each top file, with
# the SLS target prefixed with the current saltenv.
for env_name in self.saltenvs:
fp_.write(textwrap.dedent('''\
{env_name}:
'*':
- {saltenv}_{env_name}
'''.format(env_name=env_name, saltenv=saltenv)))
def show_top(self, **kwargs):
local_opts = copy.deepcopy(self.dunder_opts)
local_opts.update(kwargs)
with patch.dict(state.__opts__, local_opts), \
patch.object(salt.state.State, '_gather_pillar',
MagicMock(return_value={})):
ret = state.show_top()
# Lazy way of converting ordered dicts to regular dicts. We don't
# care about dict ordering for these tests.
return salt.utils.json.loads(salt.utils.json.dumps(ret))
def use_limited_base_top_file(self):
'''
Overwrites the base top file so that it only contains sections for its
own saltenv.
'''
with salt.utils.files.fopen(self.base_top_file, 'w') as fp_:
fp_.write(textwrap.dedent('''\
base:
'*':
- base_base
'''))
def test_merge_strategy_merge(self):
'''
Base overrides everything
'''
ret = self.show_top(top_file_merging_strategy='merge')
assert ret == {
'base': ['base_base'],
'foo': ['base_foo'],
'bar': ['base_bar'],
'baz': ['base_baz'],
}, ret
def test_merge_strategy_merge_limited_base(self):
'''
Test with a "base" top file containing only a "base" section. The "baz"
saltenv should not be in the return data because that env doesn't have
its own top file and there will be no "baz" section in the "base" env's
top file.
Next, append a "baz" section to the rewritten top file and we should
get results for that saltenv in the return data.
'''
self.use_limited_base_top_file()
ret = self.show_top(top_file_merging_strategy='merge')
assert ret == {
'base': ['base_base'],
'foo': ['foo_foo'],
'bar': ['bar_bar'],
}, ret
# Add a "baz" section
with salt.utils.files.fopen(self.base_top_file, 'a') as fp_:
fp_.write(textwrap.dedent('''\
baz:
'*':
- base_baz
'''))
ret = self.show_top(top_file_merging_strategy='merge')
assert ret == {
'base': ['base_base'],
'foo': ['foo_foo'],
'bar': ['bar_bar'],
'baz': ['base_baz'],
}, ret
def test_merge_strategy_merge_state_top_saltenv_base(self):
'''
This tests with state_top_saltenv=base, which should pull states *only*
from the base saltenv.
'''
ret = self.show_top(
top_file_merging_strategy='merge',
state_top_saltenv='base')
assert ret == {
'base': ['base_base'],
'foo': ['base_foo'],
'bar': ['base_bar'],
'baz': ['base_baz'],
}, ret
def test_merge_strategy_merge_state_top_saltenv_foo(self):
'''
This tests with state_top_saltenv=foo, which should pull states *only*
from the foo saltenv. Since that top file is only authoritative for
its own saltenv, *only* the foo saltenv's matches from the foo top file
should be in the return data.
'''
ret = self.show_top(
top_file_merging_strategy='merge',
state_top_saltenv='foo')
assert ret == {'foo': ['foo_foo']}, ret
def test_merge_strategy_merge_all(self):
'''
Include everything in every top file
'''
ret = self.show_top(top_file_merging_strategy='merge_all')
assert ret == {
'base': ['base_base', 'foo_base', 'bar_base'],
'foo': ['base_foo', 'foo_foo', 'bar_foo'],
'bar': ['base_bar', 'foo_bar', 'bar_bar'],
'baz': ['base_baz', 'foo_baz', 'bar_baz'],
}, ret
def test_merge_strategy_merge_all_alternate_env_order(self):
'''
Use an alternate env_order. This should change the order in which the
SLS targets appear in the result.
'''
ret = self.show_top(
top_file_merging_strategy='merge_all',
env_order=['bar', 'foo', 'base'])
assert ret == {
'base': ['bar_base', 'foo_base', 'base_base'],
'foo': ['bar_foo', 'foo_foo', 'base_foo'],
'bar': ['bar_bar', 'foo_bar', 'base_bar'],
'baz': ['bar_baz', 'foo_baz', 'base_baz'],
}, ret
def test_merge_strategy_merge_all_state_top_saltenv_base(self):
'''
This tests with state_top_saltenv=base, which should pull states *only*
from the base saltenv. Since we are using the "merge_all" strategy, all
the states from that top file should be in the return data.
'''
ret = self.show_top(
top_file_merging_strategy='merge_all',
state_top_saltenv='base')
assert ret == {
'base': ['base_base'],
'foo': ['base_foo'],
'bar': ['base_bar'],
'baz': ['base_baz'],
}, ret
def test_merge_strategy_merge_all_state_top_saltenv_foo(self):
'''
This tests with state_top_saltenv=foo, which should pull states *only*
from the foo saltenv. Since we are using the "merge_all" strategy, all
the states from that top file should be in the return data.
'''
ret = self.show_top(
top_file_merging_strategy='merge_all',
state_top_saltenv='foo')
assert ret == {
'base': ['foo_base'],
'foo': ['foo_foo'],
'bar': ['foo_bar'],
'baz': ['foo_baz'],
}, ret
def test_merge_strategy_same(self):
'''
Each env should get its SLS targets from its own top file, with the
"baz" env pulling from "base" since default_top=base and there is no
top file in the "baz" saltenv.
'''
ret = self.show_top(top_file_merging_strategy='same')
assert ret == {
'base': ['base_base'],
'foo': ['foo_foo'],
'bar': ['bar_bar'],
'baz': ['base_baz'],
}, ret
def test_merge_strategy_same_limited_base(self):
'''
Each env should get its SLS targets from its own top file, with the
"baz" env pulling from "base" since default_top=base and there is no
top file in the "baz" saltenv.
'''
self.use_limited_base_top_file()
ret = self.show_top(top_file_merging_strategy='same')
assert ret == {
'base': ['base_base'],
'foo': ['foo_foo'],
'bar': ['bar_bar'],
}, ret
def test_merge_strategy_same_default_top_foo(self):
'''
Each env should get its SLS targets from its own top file, with the
"baz" env pulling from "foo" since default_top=foo and there is no top
file in the "baz" saltenv.
'''
ret = self.show_top(
top_file_merging_strategy='same',
default_top='foo')
assert ret == {
'base': ['base_base'],
'foo': ['foo_foo'],
'bar': ['bar_bar'],
'baz': ['foo_baz'],
}, ret
def test_merge_strategy_same_state_top_saltenv_base(self):
'''
Test the state_top_saltenv parameter to load states exclusively from
the base saltenv, with the "same" merging strategy. This should
result in just the base environment's states from the base top file
being in the merged result.
'''
ret = self.show_top(
top_file_merging_strategy='same',
state_top_saltenv='base')
assert ret == {'base': ['base_base']}, ret
def test_merge_strategy_same_state_top_saltenv_foo(self):
'''
Test the state_top_saltenv parameter to load states exclusively from
the foo saltenv, with the "same" merging strategy. This should
result in just the foo environment's states from the foo top file
being in the merged result.
'''
ret = self.show_top(
top_file_merging_strategy='same',
state_top_saltenv='foo')
assert ret == {'foo': ['foo_foo']}, ret
def test_merge_strategy_same_state_top_saltenv_baz(self):
'''
Test the state_top_saltenv parameter to load states exclusively from
the baz saltenv, with the "same" merging strategy. This should
result in an empty dictionary since there is no top file in that
environment.
'''
ret = self.show_top(
top_file_merging_strategy='same',
state_top_saltenv='baz')
assert ret == {}, ret

View file

@ -203,13 +203,11 @@ class TimezoneModuleTestCase(TestCase, LoaderModuleMockMixin):
:return:
'''
with patch.dict(timezone.__grains__, {'os_family': ['Gentoo']}):
_fopen = mock_open()
with patch('salt.utils.files.fopen', _fopen):
with patch('salt.utils.files.fopen', mock_open()) as m_open:
assert timezone.set_zone(self.TEST_TZ)
name, args, kwargs = _fopen.mock_calls[0]
assert args == ('/etc/timezone', 'w')
name, args, kwargs = _fopen.return_value.__enter__.return_value.write.mock_calls[0]
assert args == ('UTC',)
fh_ = m_open.filehandles['/etc/timezone'][0]
assert fh_.call.args == ('/etc/timezone', 'w'), fh_.call.args
assert fh_.write_calls == ['UTC', '\n'], fh_.write_calls
@skipIf(salt.utils.platform.is_windows(), 'os.symlink not available in Windows')
@patch('salt.utils.path.which', MagicMock(return_value=False))
@ -222,13 +220,11 @@ class TimezoneModuleTestCase(TestCase, LoaderModuleMockMixin):
:return:
'''
with patch.dict(timezone.__grains__, {'os_family': ['Debian']}):
_fopen = mock_open()
with patch('salt.utils.files.fopen', _fopen):
with patch('salt.utils.files.fopen', mock_open()) as m_open:
assert timezone.set_zone(self.TEST_TZ)
name, args, kwargs = _fopen.mock_calls[0]
assert args == ('/etc/timezone', 'w')
name, args, kwargs = _fopen.return_value.__enter__.return_value.write.mock_calls[0]
assert args == ('UTC',)
fh_ = m_open.filehandles['/etc/timezone'][0]
assert fh_.call.args == ('/etc/timezone', 'w'), fh_.call.args
assert fh_.write_calls == ['UTC', '\n'], fh_.write_calls
@skipIf(salt.utils.platform.is_windows(), 'os.symlink not available in Windows')
@patch('salt.utils.path.which', MagicMock(return_value=True))

View file

@ -79,13 +79,11 @@ class SSHConfigRosterTestCase(TestCase, mixins.LoaderModuleMockMixin):
def test_all(self):
with patch('salt.utils.files.fopen', self.mock_fp):
with patch('salt.roster.sshconfig._get_ssh_config_file'):
self.mock_fp.return_value.__iter__.return_value = _SAMPLE_SSH_CONFIG.splitlines()
targets = sshconfig.targets('*')
self.assertEqual(targets, _ALL)
def test_abc_glob(self):
with patch('salt.utils.files.fopen', self.mock_fp):
with patch('salt.roster.sshconfig._get_ssh_config_file'):
self.mock_fp.return_value.__iter__.return_value = _SAMPLE_SSH_CONFIG.splitlines()
targets = sshconfig.targets('abc*')
self.assertEqual(targets, _ABC_GLOB)

View file

@ -8,7 +8,14 @@ import shutil
# salt testing libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import patch, call, mock_open, NO_MOCK, NO_MOCK_REASON, MagicMock
from tests.support.mock import(
patch,
mock_open,
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
MockCall,
)
# salt libs
from salt.ext import six
@ -96,19 +103,23 @@ SIG = (
@skipIf(HAS_M2, 'm2crypto is used by salt.crypt if installed')
class CryptTestCase(TestCase):
def test_gen_keys(self):
open_priv_wb = MockCall('/keydir{0}keyname.pem'.format(os.sep), 'wb+')
open_pub_wb = MockCall('/keydir{0}keyname.pub'.format(os.sep), 'wb+')
with patch.multiple(os, umask=MagicMock(), chmod=MagicMock(),
access=MagicMock(return_value=True)):
with patch('salt.utils.files.fopen', mock_open()):
open_priv_wb = call('/keydir{0}keyname.pem'.format(os.sep), 'wb+')
open_pub_wb = call('/keydir{0}keyname.pub'.format(os.sep), 'wb+')
with patch('os.path.isfile', return_value=True):
self.assertEqual(crypt.gen_keys('/keydir', 'keyname', 2048), '/keydir{0}keyname.pem'.format(os.sep))
self.assertNotIn(open_priv_wb, salt.utils.files.fopen.mock_calls)
self.assertNotIn(open_pub_wb, salt.utils.files.fopen.mock_calls)
with patch('os.path.isfile', return_value=False):
with patch('salt.utils.files.fopen', mock_open()):
crypt.gen_keys('/keydir', 'keyname', 2048)
salt.utils.files.fopen.assert_has_calls([open_priv_wb, open_pub_wb], any_order=True)
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch('os.path.isfile', return_value=True):
result = crypt.gen_keys('/keydir', 'keyname', 2048)
assert result == '/keydir{0}keyname.pem'.format(os.sep), result
assert open_priv_wb not in m_open.calls
assert open_pub_wb not in m_open.calls
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch('os.path.isfile', return_value=False):
crypt.gen_keys('/keydir', 'keyname', 2048)
assert open_priv_wb in m_open.calls
assert open_pub_wb in m_open.calls
@patch('os.umask', MagicMock())
@patch('os.chmod', MagicMock())
@ -116,17 +127,23 @@ class CryptTestCase(TestCase):
@patch('os.access', MagicMock(return_value=True))
def test_gen_keys_with_passphrase(self):
key_path = os.path.join(os.sep, 'keydir')
with patch('salt.utils.files.fopen', mock_open()):
open_priv_wb = call(os.path.join(key_path, 'keyname.pem'), 'wb+')
open_pub_wb = call(os.path.join(key_path, 'keyname.pub'), 'wb+')
with patch('os.path.isfile', return_value=True):
self.assertEqual(crypt.gen_keys(key_path, 'keyname', 2048, passphrase='password'), os.path.join(key_path, 'keyname.pem'))
self.assertNotIn(open_priv_wb, salt.utils.files.fopen.mock_calls)
self.assertNotIn(open_pub_wb, salt.utils.files.fopen.mock_calls)
with patch('os.path.isfile', return_value=False):
with patch('salt.utils.files.fopen', mock_open()):
crypt.gen_keys(key_path, 'keyname', 2048)
salt.utils.files.fopen.assert_has_calls([open_priv_wb, open_pub_wb], any_order=True)
open_priv_wb = MockCall(os.path.join(key_path, 'keyname.pem'), 'wb+')
open_pub_wb = MockCall(os.path.join(key_path, 'keyname.pub'), 'wb+')
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch('os.path.isfile', return_value=True):
self.assertEqual(crypt.gen_keys(key_path, 'keyname', 2048, passphrase='password'), os.path.join(key_path, 'keyname.pem'))
result = crypt.gen_keys(key_path, 'keyname', 2048,
passphrase='password')
assert result == os.path.join(key_path, 'keyname.pem'), result
assert open_priv_wb not in m_open.calls
assert open_pub_wb not in m_open.calls
with patch('salt.utils.files.fopen', mock_open()) as m_open, \
patch('os.path.isfile', return_value=False):
crypt.gen_keys(key_path, 'keyname', 2048)
assert open_priv_wb in m_open.calls
assert open_pub_wb in m_open.calls
def test_sign_message(self):
key = RSA.importKey(PRIVKEY_DATA)

785
tests/unit/test_mock.py Normal file
View file

@ -0,0 +1,785 @@
# -*- coding: utf-8 -*-
'''
Tests for our mock_open helper
'''
# Import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
import errno
import logging
import textwrap
# Import Salt libs
import salt.utils.data
import salt.utils.files
import salt.utils.stringutils
from salt.ext import six
# Import Salt Testing Libs
from tests.support.mock import patch, mock_open, NO_MOCK, NO_MOCK_REASON
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
class MockOpenMixin(object):
def _get_values(self, binary=False, multifile=False, split=False):
if split:
questions = (self.questions_bytes_lines if binary
else self.questions_str_lines)
answers = (self.answers_bytes_lines if binary
else self.answers_str_lines)
else:
questions = self.questions_bytes if binary else self.questions_str
answers = self.answers_bytes if binary else self.answers_str
mode = 'rb' if binary else 'r'
if multifile:
read_data = self.contents_bytes if binary else self.contents
else:
read_data = self.questions_bytes if binary else self.questions
return questions, answers, mode, read_data
def _test_read(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
result = self.fh.read()
assert result == questions, result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
result = self.fh2.read()
assert result == answers, result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
result = self.fh3.read()
assert result == answers, result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No patterns should have matched')
except IOError:
# An IOError is expected here
pass
def _test_read_explicit_size(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
# Read 10 bytes
result = self.fh.read(10)
assert result == questions[:10], result
# Read another 10 bytes
result = self.fh.read(10)
assert result == questions[10:20], result
# Read the rest
result = self.fh.read()
assert result == questions[20:], result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
# Read 10 bytes
result = self.fh2.read(10)
assert result == answers[:10], result
# Read another 10 bytes
result = self.fh2.read(10)
assert result == answers[10:20], result
# Read the rest
result = self.fh2.read()
assert result == answers[20:], result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
# Read 10 bytes
result = self.fh3.read(10)
assert result == answers[:10], result
# Read another 10 bytes
result = self.fh3.read(10)
assert result == answers[10:20], result
# Read the rest
result = self.fh3.read()
assert result == answers[20:], result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
def _test_read_explicit_size_larger_than_file_size(self,
binary=False,
multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
result = self.fh.read(999999)
assert result == questions, result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
result = self.fh2.read(999999)
assert result == answers, result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
result = self.fh3.read(999999)
assert result == answers, result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
def _test_read_for_loop(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile, split=True)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
index = 0
for line in self.fh:
assert line == questions[index], \
'Line {0}: {1}'.format(index, line)
index += 1
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
index = 0
for line in self.fh2:
assert line == answers[index], \
'Line {0}: {1}'.format(index, line)
index += 1
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
index = 0
for line in self.fh3:
assert line == answers[index], \
'Line {0}: {1}'.format(index, line)
index += 1
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
def _test_read_readline(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile, split=True)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
size = 8
result = self.fh.read(size)
assert result == questions[0][:size], result
# Use .readline() to read the remainder of the line
result = self.fh.readline()
assert result == questions[0][size:], result
# Read and check the other two lines
result = self.fh.readline()
assert result == questions[1], result
result = self.fh.readline()
assert result == questions[2], result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
size = 20
result = self.fh2.read(size)
assert result == answers[0][:size], result
# Use .readline() to read the remainder of the line
result = self.fh2.readline()
assert result == answers[0][size:], result
# Read and check the other two lines
result = self.fh2.readline()
assert result == answers[1], result
result = self.fh2.readline()
assert result == answers[2], result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
size = 20
result = self.fh3.read(size)
assert result == answers[0][:size], result
# Use .readline() to read the remainder of the line
result = self.fh3.readline()
assert result == answers[0][size:], result
# Read and check the other two lines
result = self.fh3.readline()
assert result == answers[1], result
result = self.fh3.readline()
assert result == answers[2], result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
def _test_readline_readlines(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile, split=True)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
# Read the first line
result = self.fh.readline()
assert result == questions[0], result
# Use .readlines() to read the remainder of the file
result = self.fh.readlines()
assert result == questions[1:], result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
# Read the first line
result = self.fh2.readline()
assert result == answers[0], result
# Use .readlines() to read the remainder of the file
result = self.fh2.readlines()
assert result == answers[1:], result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
# Read the first line
result = self.fh3.readline()
assert result == answers[0], result
# Use .readlines() to read the remainder of the file
result = self.fh3.readlines()
assert result == answers[1:], result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
def _test_readlines_multifile(self, binary=False, multifile=False):
questions, answers, mode, read_data = \
self._get_values(binary=binary, multifile=multifile, split=True)
with patch('salt.utils.files.fopen', mock_open(read_data=read_data)):
with salt.utils.files.fopen('foo.txt', mode) as self.fh:
result = self.fh.readlines()
assert result == questions, result
if multifile:
with salt.utils.files.fopen('bar.txt', mode) as self.fh2:
result = self.fh2.readlines()
assert result == answers, result
with salt.utils.files.fopen('baz.txt', mode) as self.fh3:
result = self.fh3.readlines()
assert result == answers, result
try:
with salt.utils.files.fopen('helloworld.txt'):
raise Exception('No globs should have matched')
except IOError:
# An IOError is expected here
pass
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MockOpenTestCase(TestCase, MockOpenMixin):
'''
Tests for our mock_open helper to ensure that it behaves as closely as
possible to a real filehandle.
'''
# Cyrllic characters used to test unicode handling
questions = textwrap.dedent('''\
Шнат is your name?
Шнат is your quest?
Шнат is the airspeed velocity of an unladen swallow?
''')
answers = textwrap.dedent('''\
It is Аятнця, King of the Britons.
To seek тне Holy Grail.
Шнат do you mean? An African or European swallow?
''')
@classmethod
def setUpClass(cls):
cls.questions_lines = cls.questions.splitlines(True)
cls.answers_lines = cls.answers.splitlines(True)
cls.questions_str = salt.utils.stringutils.to_str(cls.questions)
cls.answers_str = salt.utils.stringutils.to_str(cls.answers)
cls.questions_str_lines = cls.questions_str.splitlines(True)
cls.answers_str_lines = cls.answers_str.splitlines(True)
cls.questions_bytes = salt.utils.stringutils.to_bytes(cls.questions)
cls.answers_bytes = salt.utils.stringutils.to_bytes(cls.answers)
cls.questions_bytes_lines = cls.questions_bytes.splitlines(True)
cls.answers_bytes_lines = cls.answers_bytes.splitlines(True)
# When this is used as the read_data, Python 2 should normalize
# cls.questions and cls.answers to str types.
cls.contents = {'foo.txt': cls.questions,
'b*.txt': cls.answers}
cls.contents_bytes = {'foo.txt': cls.questions_bytes,
'b*.txt': cls.answers_bytes}
cls.read_data_as_list = [
'foo', 'bar', 'спам',
IOError(errno.EACCES, 'Permission denied')
]
cls.normalized_read_data_as_list = salt.utils.data.decode(
cls.read_data_as_list,
to_str=True
)
cls.read_data_as_list_bytes = salt.utils.data.encode(cls.read_data_as_list)
def tearDown(self):
'''
Each test should read the entire contents of the mocked filehandle(s).
This confirms that the other read functions return empty strings/lists,
to simulate being at EOF.
'''
for handle_name in ('fh', 'fh2', 'fh3'):
try:
fh = getattr(self, handle_name)
except AttributeError:
continue
log.debug('Running tearDown tests for self.%s', handle_name)
try:
result = fh.read(5)
assert not result, result
result = fh.read()
assert not result, result
result = fh.readline()
assert not result, result
result = fh.readlines()
assert not result, result
# Last but not least, try to read using a for loop. This should not
# read anything as we should hit EOF immediately, before the generator
# in the mocked filehandle has a chance to yield anything. So the
# exception will only be raised if we aren't at EOF already.
for line in fh:
raise Exception(
'Instead of EOF, read the following from {0}: {1}'.format(
handle_name,
line
)
)
except IOError as exc:
if six.text_type(exc) != 'File not open for reading':
raise
del fh
def test_read(self):
'''
Test reading the entire file
'''
self._test_read(binary=False, multifile=False)
self._test_read(binary=True, multifile=False)
self._test_read(binary=False, multifile=True)
self._test_read(binary=True, multifile=True)
def test_read_explicit_size(self):
'''
Test reading with explicit sizes
'''
self._test_read_explicit_size(binary=False, multifile=False)
self._test_read_explicit_size(binary=True, multifile=False)
self._test_read_explicit_size(binary=False, multifile=True)
self._test_read_explicit_size(binary=True, multifile=True)
def test_read_explicit_size_larger_than_file_size(self):
'''
Test reading with an explicit size larger than the size of read_data.
This ensures that we just return the contents up until EOF and that we
don't raise any errors due to the desired size being larger than the
mocked file's size.
'''
self._test_read_explicit_size_larger_than_file_size(
binary=False, multifile=False)
self._test_read_explicit_size_larger_than_file_size(
binary=True, multifile=False)
self._test_read_explicit_size_larger_than_file_size(
binary=False, multifile=True)
self._test_read_explicit_size_larger_than_file_size(
binary=True, multifile=True)
def test_read_for_loop(self):
'''
Test reading the contents of the file line by line in a for loop
'''
self._test_read_for_loop(binary=False, multifile=False)
self._test_read_for_loop(binary=True, multifile=False)
self._test_read_for_loop(binary=False, multifile=True)
self._test_read_for_loop(binary=True, multifile=True)
def test_read_readline(self):
'''
Test reading part of a line using .read(), then reading the rest of the
line (and subsequent lines) using .readline().
'''
self._test_read_readline(binary=False, multifile=False)
self._test_read_readline(binary=True, multifile=False)
self._test_read_readline(binary=False, multifile=True)
self._test_read_readline(binary=True, multifile=True)
def test_readline_readlines(self):
'''
Test reading the first line using .readline(), then reading the rest of
the file using .readlines().
'''
self._test_readline_readlines(binary=False, multifile=False)
self._test_readline_readlines(binary=True, multifile=False)
self._test_readline_readlines(binary=False, multifile=True)
self._test_readline_readlines(binary=True, multifile=True)
def test_readlines(self):
'''
Test reading the entire file using .readlines
'''
self._test_readlines_multifile(binary=False, multifile=False)
self._test_readlines_multifile(binary=True, multifile=False)
self._test_readlines_multifile(binary=False, multifile=True)
self._test_readlines_multifile(binary=True, multifile=True)
def test_read_data_converted_to_dict(self):
'''
Test that a non-dict value for read_data is converted to a dict mapping
'*' to that value.
'''
contents = 'спам'
normalized = salt.utils.stringutils.to_str(contents)
with patch('salt.utils.files.fopen',
mock_open(read_data=contents)) as m_open:
assert m_open.read_data == {'*': normalized}, m_open.read_data
with patch('salt.utils.files.fopen',
mock_open(read_data=self.read_data_as_list)) as m_open:
assert m_open.read_data == {
'*': self.normalized_read_data_as_list,
}, m_open.read_data
def test_read_data_list(self):
'''
Test read_data when it is a list
'''
with patch('salt.utils.files.fopen',
mock_open(read_data=self.read_data_as_list)):
for value in self.normalized_read_data_as_list:
try:
with salt.utils.files.fopen('foo.txt') as self.fh:
result = self.fh.read()
assert result == value, result
except IOError:
# Only raise the caught exception if it wasn't expected
# (i.e. if value is not an exception)
if not isinstance(value, IOError):
raise
def test_read_data_list_bytes(self):
'''
Test read_data when it is a list and the value is a bytestring
'''
with patch('salt.utils.files.fopen',
mock_open(read_data=self.read_data_as_list_bytes)):
for value in self.read_data_as_list_bytes:
try:
with salt.utils.files.fopen('foo.txt', 'rb') as self.fh:
result = self.fh.read()
assert result == value, result
except IOError:
# Only raise the caught exception if it wasn't expected
# (i.e. if value is not an exception)
if not isinstance(value, IOError):
raise
def test_tell(self):
'''
Test the implementation of tell
'''
with patch('salt.utils.files.fopen',
mock_open(read_data=self.contents)):
# Try with reading explicit sizes and then reading the rest of the
# file.
with salt.utils.files.fopen('foo.txt') as self.fh:
self.fh.read(5)
loc = self.fh.tell()
assert loc == 5, loc
self.fh.read(12)
loc = self.fh.tell()
assert loc == 17, loc
self.fh.read()
loc = self.fh.tell()
assert loc == len(self.questions_str), loc
# Try reading way more content then actually exists in the file,
# tell() should return a value equal to the length of the content
with salt.utils.files.fopen('foo.txt') as self.fh:
self.fh.read(999999)
loc = self.fh.tell()
assert loc == len(self.questions_str), loc
# Try reading a few bytes using .read(), then the rest of the line
# using .readline(), then the rest of the file using .readlines(),
# and check the location after each read.
with salt.utils.files.fopen('foo.txt') as self.fh:
# Read a few bytes
self.fh.read(5)
loc = self.fh.tell()
assert loc == 5, loc
# Read the rest of the line. Location should then be at the end
# of the first line.
self.fh.readline()
loc = self.fh.tell()
assert loc == len(self.questions_str_lines[0]), loc
# Read the rest of the file using .readlines()
self.fh.readlines()
loc = self.fh.tell()
assert loc == len(self.questions_str), loc
# Check location while iterating through the filehandle
with salt.utils.files.fopen('foo.txt') as self.fh:
index = 0
for _ in self.fh:
index += 1
loc = self.fh.tell()
assert loc == sum(
len(x) for x in self.questions_str_lines[:index]
), loc
def test_write(self):
'''
Test writing to a filehandle using .write()
'''
# Test opening for non-binary writing
with patch('salt.utils.files.fopen', mock_open()):
with salt.utils.files.fopen('foo.txt', 'w') as self.fh:
for line in self.questions_str_lines:
self.fh.write(line)
assert self.fh.write_calls == self.questions_str_lines, self.fh.write_calls
# Test opening for binary writing using "wb"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'wb') as self.fh:
for line in self.questions_bytes_lines:
self.fh.write(line)
assert self.fh.write_calls == self.questions_bytes_lines, self.fh.write_calls
# Test opening for binary writing using "ab"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'ab') as self.fh:
for line in self.questions_bytes_lines:
self.fh.write(line)
assert self.fh.write_calls == self.questions_bytes_lines, self.fh.write_calls
# Test opening for read-and-write using "r+b"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'r+b') as self.fh:
for line in self.questions_bytes_lines:
self.fh.write(line)
assert self.fh.write_calls == self.questions_bytes_lines, self.fh.write_calls
# Test trying to write str types to a binary filehandle
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'wb') as self.fh:
try:
self.fh.write('foo\n')
except TypeError:
# This exception is expected on Python 3
if not six.PY3:
raise
else:
# This write should work fine on Python 2
if six.PY3:
raise Exception(
'Should not have been able to write a str to a '
'binary filehandle'
)
if six.PY2:
# Try with non-ascii unicode. Note that the write above
# should work because the mocked filehandle should attempt
# a .encode() to convert it to a str type. But when writing
# a string with non-ascii unicode, it should raise a
# UnicodeEncodeError, which is what we are testing here.
try:
self.fh.write(self.questions)
except UnicodeEncodeError:
pass
else:
raise Exception(
'Should not have been able to write non-ascii '
'unicode to a binary filehandle'
)
# Test trying to write bytestrings to a non-binary filehandle
with patch('salt.utils.files.fopen', mock_open()):
with salt.utils.files.fopen('foo.txt', 'w') as self.fh:
try:
self.fh.write(b'foo\n')
except TypeError:
# This exception is expected on Python 3
if not six.PY3:
raise
else:
# This write should work fine on Python 2
if six.PY3:
raise Exception(
'Should not have been able to write a bytestring '
'to a non-binary filehandle'
)
if six.PY2:
# Try with non-ascii unicode. Note that the write above
# should work because the mocked filehandle should attempt
# a .encode() to convert it to a str type. But when writing
# a string with non-ascii unicode, it should raise a
# UnicodeEncodeError, which is what we are testing here.
try:
self.fh.write(self.questions)
except UnicodeEncodeError:
pass
else:
raise Exception(
'Should not have been able to write non-ascii '
'unicode to a binary filehandle'
)
def test_writelines(self):
'''
Test writing to a filehandle using .writelines()
'''
# Test opening for non-binary writing
with patch('salt.utils.files.fopen', mock_open()):
with salt.utils.files.fopen('foo.txt', 'w') as self.fh:
self.fh.writelines(self.questions_str_lines)
assert self.fh.writelines_calls == [self.questions_str_lines], self.fh.writelines_calls
# Test opening for binary writing using "wb"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'wb') as self.fh:
self.fh.writelines(self.questions_bytes_lines)
assert self.fh.writelines_calls == [self.questions_bytes_lines], self.fh.writelines_calls
# Test opening for binary writing using "ab"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'ab') as self.fh:
self.fh.writelines(self.questions_bytes_lines)
assert self.fh.writelines_calls == [self.questions_bytes_lines], self.fh.writelines_calls
# Test opening for read-and-write using "r+b"
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'r+b') as self.fh:
self.fh.writelines(self.questions_bytes_lines)
assert self.fh.writelines_calls == [self.questions_bytes_lines], self.fh.writelines_calls
# Test trying to write str types to a binary filehandle
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
with salt.utils.files.fopen('foo.txt', 'wb') as self.fh:
try:
self.fh.writelines(['foo\n'])
except TypeError:
# This exception is expected on Python 3
if not six.PY3:
raise
else:
# This write should work fine on Python 2
if six.PY3:
raise Exception(
'Should not have been able to write a str to a '
'binary filehandle'
)
if six.PY2:
# Try with non-ascii unicode. Note that the write above
# should work because the mocked filehandle should attempt
# a .encode() to convert it to a str type. But when writing
# a string with non-ascii unicode, it should raise a
# UnicodeEncodeError, which is what we are testing here.
try:
self.fh.writelines(self.questions_lines)
except UnicodeEncodeError:
pass
else:
raise Exception(
'Should not have been able to write non-ascii '
'unicode to a binary filehandle'
)
# Test trying to write bytestrings to a non-binary filehandle
with patch('salt.utils.files.fopen', mock_open()):
with salt.utils.files.fopen('foo.txt', 'w') as self.fh:
try:
self.fh.write([b'foo\n'])
except TypeError:
# This exception is expected on Python 3
if not six.PY3:
raise
else:
# This write should work fine on Python 2
if six.PY3:
raise Exception(
'Should not have been able to write a bytestring '
'to a non-binary filehandle'
)
if six.PY2:
# Try with non-ascii unicode. Note that the write above
# should work because the mocked filehandle should attempt
# a .encode() to convert it to a str type. But when writing
# a string with non-ascii unicode, it should raise a
# UnicodeEncodeError, which is what we are testing here.
try:
self.fh.writelines(self.questions_lines)
except UnicodeEncodeError:
pass
else:
raise Exception(
'Should not have been able to write non-ascii '
'unicode to a binary filehandle'
)
def test_open(self):
'''
Test that opening a file for binary reading with string read_data
fails, and that the same thing happens for non-binary filehandles and
bytestring read_data.
NOTE: This test should always pass on PY2 since MockOpen will normalize
unicode types to str types.
'''
try:
with patch('salt.utils.files.fopen', mock_open()):
try:
with salt.utils.files.fopen('foo.txt', 'rb') as self.fh:
self.fh.read()
except TypeError:
pass
else:
if six.PY3:
raise Exception(
'Should not have been able open for binary read with '
'non-bytestring read_data'
)
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
try:
with salt.utils.files.fopen('foo.txt', 'r') as self.fh2:
self.fh2.read()
except TypeError:
pass
else:
if six.PY3:
raise Exception(
'Should not have been able open for non-binary read '
'with bytestring read_data'
)
finally:
# Make sure we destroy the filehandles before the teardown, as they
# will also try to read and this will generate another exception
delattr(self, 'fh')
delattr(self, 'fh2')

View file

@ -5,7 +5,6 @@
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import copy
import os
import shutil
import tempfile
@ -24,7 +23,7 @@ from tests.support.paths import BASE_FILES
# Import Salt libs
import salt.exceptions
import salt.state
from salt.utils.odict import OrderedDict, DefaultOrderedDict
from salt.utils.odict import OrderedDict
from salt.utils.decorators import state as statedecorators
try:
@ -173,341 +172,6 @@ class HighStateTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
self.assertEqual(ret, [('somestuff', 'cmd')])
class TopFileMergeTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
'''
Test various merge strategies for multiple tops files collected from
multiple environments. Various options correspond to merge strategies
which can be set by the user with the top_file_merging_strategy config
option.
'''
def setUp(self):
'''
Create multiple top files for use in each test. Envs within self.tops
should be defined in the same order as this ordering will affect
ordering in merge_tops. The envs in each top file are defined in the
same order as self.env_order. This is no accident; it was done this way
in order to produce the proper deterministic results to match the
tests. Changing anything created in this func will affect the tests, as
they would affect ordering in states in real life. So, don't change any
of this unless you know what you're doing. If a test is failing, it is
likely due to incorrect logic in merge_tops.
'''
self.env_order = ['base', 'foo', 'bar', 'baz']
self.addCleanup(delattr, self, 'env_order')
self.tops = {
'base': OrderedDict([
('base', OrderedDict([('*', ['base_base'])])),
('foo', OrderedDict([('*', ['base_foo'])])),
('bar', OrderedDict([('*', ['base_bar'])])),
('baz', OrderedDict([('*', ['base_baz'])])),
]),
'foo': OrderedDict([
('base', OrderedDict([('*', ['foo_base'])])),
('foo', OrderedDict([('*', ['foo_foo'])])),
('bar', OrderedDict([('*', ['foo_bar'])])),
('baz', OrderedDict([('*', ['foo_baz'])])),
]),
'bar': OrderedDict([
('base', OrderedDict([('*', ['bar_base'])])),
('foo', OrderedDict([('*', ['bar_foo'])])),
('bar', OrderedDict([('*', ['bar_bar'])])),
('baz', OrderedDict([('*', ['bar_baz'])])),
]),
# Empty environment
'baz': OrderedDict()
}
self.addCleanup(delattr, self, 'tops')
# Version without the other envs defined in the base top file
self.tops_limited_base = copy.deepcopy(self.tops)
self.tops_limited_base['base'] = OrderedDict([
('base', OrderedDict([('*', ['base_base'])])),
])
self.addCleanup(delattr, self, 'tops_limited_base')
def highstate(self, **opts):
root_dir = tempfile.mkdtemp(dir=integration.TMP)
state_tree_dir = os.path.join(root_dir, 'state_tree')
cache_dir = os.path.join(root_dir, 'cachedir')
overrides = {}
overrides['root_dir'] = root_dir
overrides['state_events'] = False
overrides['id'] = 'match'
overrides['file_client'] = 'local'
overrides['file_roots'] = dict(base=[state_tree_dir])
overrides['cachedir'] = cache_dir
overrides['test'] = False
overrides['default_top'] = 'base'
overrides.update(opts)
return salt.state.HighState(self.get_temp_config('minion', **overrides))
def get_tops(self, tops=None, env_order=None, state_top_saltenv=None):
'''
A test helper to emulate salt.state.HighState.get_tops() but just to
construct an appropriate data structure for top files from multiple
environments
'''
if tops is None:
tops = self.tops
if state_top_saltenv:
append_order = [state_top_saltenv]
elif env_order:
append_order = env_order
else:
append_order = self.env_order
ret = DefaultOrderedDict(list)
for env in append_order:
item = tops[env]
if env_order:
for remove in [x for x in self.env_order if x not in env_order]:
# Remove this env from the tops from the tops since this
# env is not part of env_order.
item.pop(remove)
ret[env].append(tops[env])
return ret
def test_merge_tops_merge(self):
'''
Test the default merge strategy for top files, in an instance where the
base top file contains sections for all envs and the other envs' top
files are therefore ignored.
'''
merged_tops = self.highstate().merge_tops(self.get_tops())
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order:
expected_merge[env]['*'] = ['base_{0}'.format(env)]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_limited_base(self):
'''
Test the default merge strategy for top files when the base environment
only defines states for itself.
'''
tops = self.get_tops(tops=self.tops_limited_base)
merged_tops = self.highstate().merge_tops(tops)
# No baz in the expected results because baz has no top file
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order[:-1]:
expected_merge[env]['*'] = ['_'.join((env, env))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_state_top_saltenv_base(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'base' saltenv, with the default merging strategy. This should
result in all states from the 'base' top file being in the merged
result.
'''
env = 'base'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate().merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env2 in self.env_order:
expected_merge[env2]['*'] = ['_'.join((env, env2))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_state_top_saltenv_foo(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'foo' saltenv, with the default merging strategy. This should
result in just the 'foo' environment's states from the 'foo' top file
being in the merged result.
'''
env = 'foo'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate().merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
expected_merge[env]['*'] = ['_'.join((env, env))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_all(self):
'''
Test the merge_all strategy
'''
tops = self.get_tops()
merged_tops = self.highstate(
top_file_merging_strategy='merge_all').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order:
states = []
for top_env in self.env_order:
if top_env in tops[top_env][0]:
states.extend(tops[top_env][0][env]['*'])
expected_merge[env]['*'] = states
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_all_with_env_order(self):
'''
Test an altered env_order with the 'merge_all' strategy.
'''
env_order = ['bar', 'foo', 'base']
tops = self.get_tops(env_order=env_order)
merged_tops = self.highstate(
top_file_merging_strategy='merge_all',
env_order=env_order).merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env in [x for x in self.env_order if x in env_order]:
states = []
for top_env in env_order:
states.extend(tops[top_env][0][env]['*'])
expected_merge[env]['*'] = states
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_all_state_top_saltenv_base(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'base' saltenv, with the 'merge_all' merging strategy. This should
result in all states from the 'base' top file being in the merged
result.
'''
env = 'base'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate(
top_file_merging_strategy='merge_all').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env2 in self.env_order:
expected_merge[env2]['*'] = ['_'.join((env, env2))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_merge_all_state_top_saltenv_foo(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'foo' saltenv, with the 'merge_all' merging strategy. This should
result in all the states from the 'foo' top file being in the merged
result.
'''
env = 'foo'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate(
top_file_merging_strategy='merge_all').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env2 in self.env_order:
expected_merge[env2]['*'] = ['_'.join((env, env2))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_with_default_top(self):
'''
Test to see if the top file that corresponds to the requested env is
the one that is used by the state system. Also test the 'default_top'
option for env 'baz', which has no top file and should pull its states
from the 'foo' top file.
'''
merged_tops = self.highstate(
top_file_merging_strategy='same',
default_top='foo').merge_tops(self.get_tops())
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order[:-1]:
expected_merge[env]['*'] = ['_'.join((env, env))]
# The 'baz' env should be using the foo top file because baz lacks a
# top file, and default_top has been seet to 'foo'
expected_merge['baz']['*'] = ['foo_baz']
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_without_default_top(self):
'''
Test to see if the top file that corresponds to the requested env is
the one that is used by the state system. default_top will not be set
(falling back to 'base'), so the 'baz' environment should pull its
states from the 'base' top file.
'''
merged_tops = self.highstate(
top_file_merging_strategy='same').merge_tops(self.get_tops())
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order[:-1]:
expected_merge[env]['*'] = ['_'.join((env, env))]
# The 'baz' env should be using the foo top file because baz lacks a
# top file, and default_top == 'base'
expected_merge['baz']['*'] = ['base_baz']
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_limited_base_without_default_top(self):
'''
Test to see if the top file that corresponds to the requested env is
the one that is used by the state system. default_top will not be set
(falling back to 'base'), and since we are using a limited base top
file, the 'baz' environment should not appear in the merged tops.
'''
tops = self.get_tops(tops=self.tops_limited_base)
merged_tops = \
self.highstate(top_file_merging_strategy='same').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
for env in self.env_order[:-1]:
expected_merge[env]['*'] = ['_'.join((env, env))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_state_top_saltenv_base(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'base' saltenv, with the 'same' merging strategy. This should
result in just the 'base' environment's states from the 'base' top file
being in the merged result.
'''
env = 'base'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate(
top_file_merging_strategy='same').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
expected_merge[env]['*'] = ['_'.join((env, env))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_state_top_saltenv_foo(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'foo' saltenv, with the 'same' merging strategy. This should
result in just the 'foo' environment's states from the 'foo' top file
being in the merged result.
'''
env = 'foo'
tops = self.get_tops(state_top_saltenv=env)
merged_tops = self.highstate(
top_file_merging_strategy='same').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
expected_merge[env]['*'] = ['_'.join((env, env))]
self.assertEqual(merged_tops, expected_merge)
def test_merge_tops_same_state_top_saltenv_baz(self):
'''
Test the 'state_top_saltenv' parameter to load states exclusively from
the 'baz' saltenv, with the 'same' merging strategy. This should
result in an empty dictionary since this environment has not top file.
'''
tops = self.get_tops(state_top_saltenv='baz')
merged_tops = self.highstate(
top_file_merging_strategy='same').merge_tops(tops)
expected_merge = DefaultOrderedDict(OrderedDict)
self.assertEqual(merged_tops, expected_merge)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class StateReturnsTestCase(TestCase):

View file

@ -164,7 +164,7 @@ class NetworkTestCase(TestCase):
## ccc
127.0.0.1 localhost thisismyhostname # 本机
''')
fopen_mock = mock_open(read_data=content, match='/etc/hosts')
fopen_mock = mock_open(read_data={'/etc/hosts': content})
with patch('salt.utils.files.fopen', fopen_mock):
assert 'thisismyhostname' in network._generate_minion_id()

View file

@ -1,7 +1,12 @@
[tox]
envlist = py27,py34,py35,py36
envlist = py27,py3
[testenv]
deps = -r{toxinidir}/requirements/tests.txt
commands = pytest --rootdir {toxinidir} {posargs:--no-print-logs --run-destructive}
commands = pytest --rootdir {toxinidir} {posargs}
passenv = LANG HOME
[pytest]
addopts = --log-file /tmp/salt-runtests.log --no-print-logs --ssh-tests -ra -sv
testpaths = tests
norecursedirs = tests/kitchen