mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into 'develop'
Conflicts: - .gitignore - salt/modules/win_lgpo.py - salt/utils/__init__.py - tests/integration/spm/test_build.py - tests/unit/test_pydsl.py
This commit is contained in:
commit
2c052e34b3
21 changed files with 543 additions and 84 deletions
187
.kitchen.yml
Normal file
187
.kitchen.yml
Normal file
|
@ -0,0 +1,187 @@
|
|||
---
|
||||
<% vagrant = system('which vagrant 2>/dev/null >/dev/null') %>
|
||||
<% version = '2017.7.2' %>
|
||||
<% platformsfile = ENV['SALT_KITCHEN_PLATFORMS'] || '.kitchen/platforms.yml' %>
|
||||
<% driverfile = ENV['SALT_KITCHEN_DRIVER'] || '.kitchen/driver.yml' %>
|
||||
|
||||
<% if File.exists?(driverfile) %>
|
||||
<%= ERB.new(File.read(driverfile)).result %>
|
||||
<% else %>
|
||||
driver:
|
||||
name: docker
|
||||
use_sudo: false
|
||||
privileged: true
|
||||
username: root
|
||||
volume:
|
||||
- /var/run/docker.sock:/docker.sock
|
||||
cap_add:
|
||||
- sys_admin
|
||||
disable_upstart: false
|
||||
provision_command:
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
<% end %>
|
||||
|
||||
sudo: false
|
||||
provisioner:
|
||||
name: salt_solo
|
||||
salt_install: bootstrap
|
||||
salt_version: latest
|
||||
salt_bootstrap_url: https://bootstrap.saltstack.com
|
||||
salt_bootstrap_options: -X stable <%= version %>
|
||||
log_level: info
|
||||
require_chef: false
|
||||
remote_states:
|
||||
name: git://github.com/gtmanfred/salt-jenkins.git
|
||||
branch: 2017.7
|
||||
repo: git
|
||||
testingdir: /testing
|
||||
salt_copy_filter:
|
||||
- .bundle
|
||||
- .git
|
||||
- .gitignore
|
||||
- .kitchen
|
||||
- .kitchen.yml
|
||||
- Gemfile
|
||||
- Gemfile.lock
|
||||
- README.rst
|
||||
- .travis.yml
|
||||
state_top:
|
||||
base:
|
||||
"*":
|
||||
- git.salt
|
||||
- kitchen
|
||||
<% if File.exists?(platformsfile) %>
|
||||
<%= ERB.new(File.read(platformsfile)).result %>
|
||||
<% else %>
|
||||
platforms:
|
||||
- name: fedora
|
||||
driver_config:
|
||||
image: fedora:latest
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
- name: centos-7
|
||||
driver_config:
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
- name: centos-6
|
||||
driver_config:
|
||||
run_command: /sbin/init
|
||||
provision_command:
|
||||
- yum install -y upstart
|
||||
provisioner:
|
||||
salt_bootstrap_options: -P -y -x python2.7 -X git v<%= version %> >/dev/null
|
||||
- name: ubuntu-rolling
|
||||
driver_config:
|
||||
image: ubuntu:rolling
|
||||
run_command: /lib/systemd/systemd
|
||||
provisioner:
|
||||
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.sh
|
||||
- name: ubuntu-16.04
|
||||
driver_config:
|
||||
run_command: /lib/systemd/systemd
|
||||
- name: ubuntu-14.04
|
||||
driver_config:
|
||||
run_command: /sbin/init
|
||||
provision_command:
|
||||
- rm -f /sbin/initctl
|
||||
- dpkg-divert --local --rename --remove /sbin/initctl
|
||||
- name: debian-8
|
||||
driver_config:
|
||||
run_command: /lib/systemd/systemd
|
||||
provision_command:
|
||||
- apt-get install -y dbus
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
- name: debian-9
|
||||
driver_config:
|
||||
run_command: /lib/systemd/systemd
|
||||
- name: arch
|
||||
driver_config:
|
||||
image: base/archlinux
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
provision_command:
|
||||
- pacman -Syu --noconfirm systemd
|
||||
- systemctl enable sshd
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
- name: opensuse
|
||||
driver_config:
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
provision_command:
|
||||
- systemctl enable sshd.service
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
<% if vagrant != false %>
|
||||
- name: windows-2012r2
|
||||
driver:
|
||||
box: mwrock/Windows2012R2
|
||||
communicator: winrm
|
||||
name: vagrant
|
||||
gui: true
|
||||
username: administrator
|
||||
password: Pass@word1
|
||||
provisioner:
|
||||
init_environment: |
|
||||
Clear-Host
|
||||
$AddedLocation ="c:\salt"
|
||||
$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"
|
||||
$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path
|
||||
$NewPath= $OldPath + ’;’ + $AddedLocation
|
||||
Set-ItemProperty -Path "$Reg" -Name PATH –Value $NewPath
|
||||
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.ps1
|
||||
salt_bootstrap_options: ''
|
||||
- name: windows-2016
|
||||
driver:
|
||||
box: mwrock/Windows2016
|
||||
communicator: winrm
|
||||
name: vagrant
|
||||
username: Vagrant
|
||||
password: vagrant
|
||||
gui: true
|
||||
provisioner:
|
||||
init_environment: |
|
||||
Clear-Host
|
||||
$AddedLocation ="c:\salt;c:\salt\bin\Scripts"
|
||||
$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"
|
||||
$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path
|
||||
$NewPath= $OldPath + ’;’ + $AddedLocation
|
||||
Set-ItemProperty -Path "$Reg" -Name PATH –Value $NewPath
|
||||
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.ps1
|
||||
salt_bootstrap_options: ''
|
||||
<% end %>
|
||||
<% end %>
|
||||
suites:
|
||||
- name: py2
|
||||
provisioner:
|
||||
pillars:
|
||||
top.sls:
|
||||
base:
|
||||
"*":
|
||||
- jenkins
|
||||
jenkins.sls:
|
||||
testing_dir: /tmp/kitchen/testing
|
||||
clone_repo: false
|
||||
salttesting_namespec: salttesting==2017.6.1
|
||||
- name: py3
|
||||
provisioner:
|
||||
pillars:
|
||||
top.sls:
|
||||
base:
|
||||
"*":
|
||||
- jenkins
|
||||
jenkins.sls:
|
||||
testing_dir: /tmp/kitchen/testing
|
||||
clone_repo: false
|
||||
py3: true
|
||||
salttesting_namespec: salttesting==2017.6.1
|
||||
verifier:
|
||||
name: shell
|
||||
remote_exec: true
|
||||
sudo: false
|
||||
live_stream: {}
|
||||
<% if ENV['TESTOPTS'].nil? %>
|
||||
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --sysinfo --transport=zeromq --output-columns=80 --ssh --coverage-xml=/tmp/coverage.xml --xml=/tmp/xml-unittests-output'
|
||||
<% else %>
|
||||
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --output-columns 80 <%= ENV["TESTOPTS"] %>'
|
||||
<% end %>
|
23
Gemfile
Normal file
23
Gemfile
Normal file
|
@ -0,0 +1,23 @@
|
|||
# This file is only used for running the test suite with kitchen-salt.
|
||||
|
||||
source "https://rubygems.org"
|
||||
|
||||
gem "test-kitchen"
|
||||
gem "kitchen-salt", :git => 'https://github.com/saltstack/kitchen-salt.git'
|
||||
gem 'git'
|
||||
|
||||
group :docker do
|
||||
gem 'kitchen-docker', :git => 'https://github.com/test-kitchen/kitchen-docker.git'
|
||||
end
|
||||
|
||||
group :opennebula do
|
||||
gem 'kitchen-opennebula', :git => 'https://github.com/gtmanfred/kitchen-opennebula.git'
|
||||
gem 'xmlrpc'
|
||||
end
|
||||
|
||||
group :windows do
|
||||
gem 'vagrant-wrapper'
|
||||
gem 'kitchen-vagrant'
|
||||
gem 'winrm', '~>2.0'
|
||||
gem 'winrm-fs', '~>1.0'
|
||||
end
|
|
@ -95,19 +95,19 @@ globally available or passed in through function arguments, file data, etc.
|
|||
Mocking Loader Modules
|
||||
----------------------
|
||||
|
||||
Salt loader modules use a series of globally available dunder variables,
|
||||
``__salt__``, ``__opts__``, ``__pillar__``, etc. To facilitate testing these
|
||||
modules a mixin class was created, ``LoaderModuleMockMixin`` which can be found
|
||||
in ``tests/support/mixins.py``. The reason for the exitance of this class is
|
||||
because, historycally, and because it was easier, one would add these dunder
|
||||
variables directly on the imported module. This however, introduces unexpected
|
||||
behavior when running the full test suite since those attributes would not be
|
||||
removed once we were done testing the module and would therefor leak to other
|
||||
modules being tested with unpredictable results. This is the kind of work that
|
||||
should be defered to mock, and that's exactly what this mixin class does.
|
||||
Salt loader modules use a series of globally available dunder variables,
|
||||
``__salt__``, ``__opts__``, ``__pillar__``, etc. To facilitate testing these
|
||||
modules a mixin class was created, ``LoaderModuleMockMixin`` which can be found
|
||||
in ``tests/support/mixins.py``. The reason for the existance of this class is
|
||||
because historiclly and because it was easier, one would add these dunder
|
||||
variables directly on the imported module. This however, introduces unexpected
|
||||
behavior when running the full test suite since those attributes would not be
|
||||
removed once we were done testing the module and would therefore leak to other
|
||||
modules being tested with unpredictable results. This is the kind of work that
|
||||
should be deferred to mock, and that's exactly what this mixin class does.
|
||||
|
||||
As an example, if one needs to specify some options which should be available
|
||||
to the module being tests one should do:
|
||||
As an example, if one needs to specify some options which should be available
|
||||
to the module being tested one should do:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -122,7 +122,7 @@ to the module being tests one should do:
|
|||
}
|
||||
}
|
||||
|
||||
Consider this more extensive example from
|
||||
Consider this more extensive example from
|
||||
``tests/unit/modules/test_libcloud_dns.py``:
|
||||
|
||||
.. code-block:: python
|
||||
|
@ -173,10 +173,10 @@ Consider this more extensive example from
|
|||
return {libcloud_dns: module_globals}
|
||||
|
||||
|
||||
What happens on the above example is that, we mock a call to
|
||||
`__salt__['config.option']` to return the configuration needed for the
|
||||
execution of the tests. Additionally, if the ``libcloud`` library is not
|
||||
available, since that's not actually part of whats being tested, we mocked that
|
||||
What happens in the above example is we mock a call to
|
||||
`__salt__['config.option']` to return the configuration needed for the
|
||||
execution of the tests. Additionally, if the ``libcloud`` library is not
|
||||
available, since that's not actually part of what's being tested, we mocked that
|
||||
import by patching ``sys.modules`` when tests are running.
|
||||
|
||||
|
||||
|
@ -245,7 +245,7 @@ To understand how one might integrate Mock into writing a unit test for Salt,
|
|||
let's imagine a scenario in which we're testing an execution module that's
|
||||
designed to operate on a database. Furthermore, let's imagine two separate
|
||||
methods, here presented in pseduo-code in an imaginary execution module called
|
||||
'db.py.
|
||||
'db.py'.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
|
|
@ -513,16 +513,16 @@ def tar(options, tarfile, sources=None, dest=None,
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.tar -cjvf /tmp/salt.tar.bz2 {{grains.saltpath}} template=jinja
|
||||
salt '*' archive.tar cjvf /tmp/salt.tar.bz2 {{grains.saltpath}} template=jinja
|
||||
|
||||
CLI Examples:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Create a tarfile
|
||||
salt '*' archive.tar -cjvf /tmp/tarfile.tar.bz2 /tmp/file_1,/tmp/file_2
|
||||
salt '*' archive.tar cjvf /tmp/tarfile.tar.bz2 /tmp/file_1,/tmp/file_2
|
||||
# Create a tarfile using globbing (2017.7.0 and later)
|
||||
salt '*' archive.tar -cjvf /tmp/tarfile.tar.bz2 '/tmp/file_*'
|
||||
salt '*' archive.tar cjvf /tmp/tarfile.tar.bz2 '/tmp/file_*'
|
||||
# Unpack a tarfile
|
||||
salt '*' archive.tar xf foo.tar dest=/target/directory
|
||||
'''
|
||||
|
|
|
@ -39,6 +39,7 @@ Current known limitations
|
|||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
import io
|
||||
import os
|
||||
import logging
|
||||
|
@ -4081,7 +4082,7 @@ def _write_regpol_data(data_to_write,
|
|||
gpt_ini_data = ''
|
||||
if os.path.exists(gpt_ini_path):
|
||||
with salt.utils.files.fopen(gpt_ini_path, 'rb') as gpt_file:
|
||||
gpt_ini_data = gpt_file.read()
|
||||
gpt_ini_data = salt.utils.to_str(gpt_file.read())
|
||||
if not _regexSearchRegPolData(r'\[General\]\r\n', gpt_ini_data):
|
||||
gpt_ini_data = '[General]\r\n' + gpt_ini_data
|
||||
if _regexSearchRegPolData(r'{0}='.format(re.escape(gpt_extension)), gpt_ini_data):
|
||||
|
@ -4136,7 +4137,7 @@ def _write_regpol_data(data_to_write,
|
|||
gpt_ini_data[general_location.end():])
|
||||
if gpt_ini_data:
|
||||
with salt.utils.files.fopen(gpt_ini_path, 'wb') as gpt_file:
|
||||
gpt_file.write(gpt_ini_data)
|
||||
gpt_file.write(salt.utils.to_bytes(gpt_ini_data))
|
||||
except Exception as e:
|
||||
msg = 'An error occurred attempting to write to {0}, the exception was {1}'.format(
|
||||
gpt_ini_path, e)
|
||||
|
@ -5374,7 +5375,7 @@ def set_(computer_policy=None, user_policy=None,
|
|||
_regedits[regedit]['policy']['Registry']['Type'])
|
||||
else:
|
||||
_ret = __salt__['reg.delete_value'](
|
||||
_regedits[regedit]['polic']['Registry']['Hive'],
|
||||
_regedits[regedit]['policy']['Registry']['Hive'],
|
||||
_regedits[regedit]['policy']['Registry']['Path'],
|
||||
_regedits[regedit]['policy']['Registry']['Value'])
|
||||
if not _ret:
|
||||
|
|
|
@ -448,8 +448,9 @@ def stop(name):
|
|||
try:
|
||||
win32serviceutil.StopService(name)
|
||||
except pywintypes.error as exc:
|
||||
raise CommandExecutionError(
|
||||
'Failed To Stop {0}: {1}'.format(name, exc[2]))
|
||||
if exc[0] != 1062:
|
||||
raise CommandExecutionError(
|
||||
'Failed To Stop {0}: {1}'.format(name, exc[2]))
|
||||
|
||||
attempts = 0
|
||||
while info(name)['Status'] in ['Running', 'Stop Pending'] \
|
||||
|
|
|
@ -23,7 +23,14 @@ def output(ret, bar, **kwargs): # pylint: disable=unused-argument
|
|||
Update the progress bar
|
||||
'''
|
||||
if 'return_count' in ret:
|
||||
bar.update(ret['return_count'])
|
||||
val = ret['return_count']
|
||||
# Avoid to fail if targets are behind a syndic. In this case actual return count will be
|
||||
# higher than targeted by MoM itself.
|
||||
# TODO: implement a way to get the proper target minions count and remove this workaround.
|
||||
# Details are in #44239.
|
||||
if val > bar.maxval:
|
||||
bar.maxval = val
|
||||
bar.update(val)
|
||||
return ''
|
||||
|
||||
|
||||
|
|
|
@ -2886,6 +2886,7 @@ def directory(name,
|
|||
if __opts__['test']:
|
||||
ret['result'] = presult
|
||||
ret['comment'] = pcomment
|
||||
ret['changes'] = ret['pchanges']
|
||||
return ret
|
||||
|
||||
if not os.path.isdir(name):
|
||||
|
|
|
@ -320,10 +320,14 @@ def fopen(*args, **kwargs):
|
|||
if len(args) > 1:
|
||||
args = list(args)
|
||||
if 'b' not in args[1]:
|
||||
args[1] += 'b'
|
||||
elif kwargs.get('mode', None):
|
||||
args[1] = args[1].replace('t', 'b')
|
||||
if 'b' not in args[1]:
|
||||
args[1] += 'b'
|
||||
elif kwargs.get('mode'):
|
||||
if 'b' not in kwargs['mode']:
|
||||
kwargs['mode'] += 'b'
|
||||
kwargs['mode'] = kwargs['mode'].replace('t', 'b')
|
||||
if 'b' not in kwargs['mode']:
|
||||
kwargs['mode'] += 'b'
|
||||
else:
|
||||
# the default is to read
|
||||
kwargs['mode'] = 'rb'
|
||||
|
|
|
@ -537,7 +537,7 @@ class GitProvider(object):
|
|||
return root_dir
|
||||
log.error(
|
||||
'Root path \'%s\' not present in %s remote \'%s\', '
|
||||
'skipping.', self.root, self.role, self.id
|
||||
'skipping.', self.root(), self.role, self.id
|
||||
)
|
||||
return None
|
||||
|
||||
|
|
|
@ -803,7 +803,10 @@ class TestDaemon(object):
|
|||
|
||||
# Set up config options that require internal data
|
||||
master_opts['pillar_roots'] = syndic_master_opts['pillar_roots'] = {
|
||||
'base': [os.path.join(FILES, 'pillar', 'base')]
|
||||
'base': [
|
||||
RUNTIME_VARS.TMP_PILLAR_TREE,
|
||||
os.path.join(FILES, 'pillar', 'base'),
|
||||
]
|
||||
}
|
||||
master_opts['file_roots'] = syndic_master_opts['file_roots'] = {
|
||||
'base': [
|
||||
|
@ -979,6 +982,7 @@ class TestDaemon(object):
|
|||
sub_minion_opts['sock_dir'],
|
||||
minion_opts['sock_dir'],
|
||||
RUNTIME_VARS.TMP_STATE_TREE,
|
||||
RUNTIME_VARS.TMP_PILLAR_TREE,
|
||||
RUNTIME_VARS.TMP_PRODENV_STATE_TREE,
|
||||
TMP,
|
||||
],
|
||||
|
@ -1090,7 +1094,8 @@ class TestDaemon(object):
|
|||
os.chmod(path, stat.S_IRWXU)
|
||||
func(path)
|
||||
|
||||
for dirname in (TMP, RUNTIME_VARS.TMP_STATE_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE):
|
||||
for dirname in (TMP, RUNTIME_VARS.TMP_STATE_TREE,
|
||||
RUNTIME_VARS.TMP_PILLAR_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE):
|
||||
if os.path.isdir(dirname):
|
||||
shutil.rmtree(dirname, onerror=remove_readonly)
|
||||
|
||||
|
|
|
@ -5,10 +5,16 @@ Integration tests for the saltutil module.
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import time
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.paths import TMP_PILLAR_TREE
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
class SaltUtilModuleTest(ModuleCase):
|
||||
|
@ -153,3 +159,38 @@ class SaltUtilSyncModuleTest(ModuleCase):
|
|||
ret = self.run_function('saltutil.sync_all', extmod_whitelist={'modules': ['runtests_decorators']},
|
||||
extmod_blacklist={'modules': ['runtests_decorators']})
|
||||
self.assertEqual(ret, expected_return)
|
||||
|
||||
|
||||
class SaltUtilSyncPillarTest(ModuleCase):
|
||||
'''
|
||||
Testcase for the saltutil sync pillar module
|
||||
'''
|
||||
|
||||
def test_pillar_refresh(self):
|
||||
'''
|
||||
test pillar refresh module
|
||||
'''
|
||||
pillar_key = 'itworked'
|
||||
|
||||
pre_pillar = self.run_function('pillar.raw')
|
||||
self.assertNotIn(pillar_key, pre_pillar.get(pillar_key, 'didnotwork'))
|
||||
|
||||
with salt.utils.fopen(os.path.join(TMP_PILLAR_TREE, 'add_pillar.sls'), 'w') as fp:
|
||||
fp.write('{0}: itworked'.format(pillar_key))
|
||||
|
||||
with salt.utils.fopen(os.path.join(TMP_PILLAR_TREE, 'top.sls'), 'w') as fp:
|
||||
fp.write(textwrap.dedent('''\
|
||||
base:
|
||||
'*':
|
||||
- add_pillar
|
||||
'''))
|
||||
|
||||
pillar_refresh = self.run_function('saltutil.refresh_pillar')
|
||||
wait = self.run_function('test.sleep', [1])
|
||||
|
||||
post_pillar = self.run_function('pillar.raw')
|
||||
self.assertIn(pillar_key, post_pillar.get(pillar_key, 'didnotwork'))
|
||||
|
||||
def tearDown(self):
|
||||
for filename in os.listdir(TMP_PILLAR_TREE):
|
||||
os.remove(os.path.join(TMP_PILLAR_TREE, filename))
|
||||
|
|
50
tests/integration/modules/test_service.py
Normal file
50
tests/integration/modules/test_service.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
@destructiveTest
|
||||
class ServiceModuleTest(ModuleCase):
|
||||
'''
|
||||
Module testing the service module
|
||||
'''
|
||||
def setUp(self):
|
||||
self.service_name = 'cron'
|
||||
cmd_name = 'crontab'
|
||||
os_family = self.run_function('grains.get', ['os_family'])
|
||||
if os_family == 'RedHat':
|
||||
self.service_name = 'crond'
|
||||
elif os_family == 'Arch':
|
||||
self.service_name = 'systemd-journald'
|
||||
cmd_name = 'systemctl'
|
||||
|
||||
if salt.utils.which(cmd_name) is None:
|
||||
self.skipTest('{0} is not installed'.format(cmd_name))
|
||||
|
||||
def test_service_status_running(self):
|
||||
'''
|
||||
test service.status execution module
|
||||
when service is running
|
||||
'''
|
||||
start_service = self.run_function('service.start', [self.service_name])
|
||||
|
||||
check_service = self.run_function('service.status', [self.service_name])
|
||||
self.assertTrue(check_service)
|
||||
|
||||
def test_service_status_dead(self):
|
||||
'''
|
||||
test service.status execution module
|
||||
when service is dead
|
||||
'''
|
||||
stop_service = self.run_function('service.stop', [self.service_name])
|
||||
|
||||
check_service = self.run_function('service.status', [self.service_name])
|
||||
self.assertFalse(check_service)
|
|
@ -6,49 +6,24 @@ Tests for the spm build utility
|
|||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import SPMCase
|
||||
from tests.support.case import SPMCase, ModuleCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
# Import Salt Libraries
|
||||
import salt.utils.files
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
|
||||
@destructiveTest
|
||||
class SPMBuildTest(SPMCase):
|
||||
class SPMBuildTest(SPMCase, ModuleCase):
|
||||
'''
|
||||
Validate the spm build command
|
||||
'''
|
||||
def setUp(self):
|
||||
self.config = self._spm_config()
|
||||
self.formula_dir = os.path.join(' '.join(self.config['file_roots']['base']), 'formulas')
|
||||
self.formula_sls_dir = os.path.join(self.formula_dir, 'apache')
|
||||
self.formula_sls = os.path.join(self.formula_sls_dir, 'apache.sls')
|
||||
self.formula_file = os.path.join(self.formula_dir, 'FORMULA')
|
||||
|
||||
dirs = [self.formula_dir, self.formula_sls_dir]
|
||||
for formula_dir in dirs:
|
||||
os.makedirs(formula_dir)
|
||||
|
||||
with salt.utils.files.fopen(self.formula_sls, 'w') as fp:
|
||||
fp.write(textwrap.dedent('''\
|
||||
install-apache:
|
||||
pkg.installed:
|
||||
- name: apache2
|
||||
'''))
|
||||
|
||||
with salt.utils.files.fopen(self.formula_file, 'w') as fp:
|
||||
fp.write(textwrap.dedent('''\
|
||||
name: apache
|
||||
os: RedHat, Debian, Ubuntu, Suse, FreeBSD
|
||||
os_family: RedHat, Debian, Suse, FreeBSD
|
||||
version: 201506
|
||||
release: 2
|
||||
summary: Formula for installing Apache
|
||||
description: Formula for installing Apache
|
||||
'''))
|
||||
self._spm_build_files(self.config)
|
||||
|
||||
def test_spm_build(self):
|
||||
'''
|
||||
|
@ -61,5 +36,50 @@ class SPMBuildTest(SPMCase):
|
|||
# Make sure formula path dir is created
|
||||
self.assertTrue(os.path.isdir(self.config['formula_path']))
|
||||
|
||||
@skipIf(salt.utils.which('fallocate') is None, 'fallocate not installed')
|
||||
def test_spm_build_big_file(self):
|
||||
'''
|
||||
test spm build with a big file
|
||||
'''
|
||||
# check to make sure there is enough space to run this test
|
||||
check_space = self.run_function('status.diskusage', ['/'])
|
||||
space = check_space['/']['available']
|
||||
if space < 2000000:
|
||||
self.skipTest('Not enough space on host to run this test')
|
||||
|
||||
big_file = self.run_function('cmd.run',
|
||||
['fallocate -l 1G {0}'.format(os.path.join(self.formula_sls_dir,
|
||||
'bigfile.txt'))])
|
||||
build_spm = self.run_spm('build', self.config, self.formula_dir)
|
||||
spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm')
|
||||
install = self.run_spm('install', self.config, spm_file)
|
||||
|
||||
get_files = self.run_spm('files', self.config, 'apache')
|
||||
|
||||
files = ['apache.sls', 'bigfile.txt']
|
||||
for sls in files:
|
||||
self.assertIn(sls, ' '.join(get_files))
|
||||
|
||||
def test_spm_build_exclude(self):
|
||||
'''
|
||||
test spm build while excluding directory
|
||||
'''
|
||||
git_dir = os.path.join(self.formula_sls_dir, '.git')
|
||||
os.makedirs(git_dir)
|
||||
files = ['donotbuild1', 'donotbuild2', 'donotbuild3']
|
||||
|
||||
for git_file in files:
|
||||
with salt.utils.fopen(os.path.join(git_dir, git_file), 'w') as fp:
|
||||
fp.write('Please do not include me in build')
|
||||
|
||||
build_spm = self.run_spm('build', self.config, self.formula_dir)
|
||||
spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm')
|
||||
install = self.run_spm('install', self.config, spm_file)
|
||||
|
||||
get_files = self.run_spm('files', self.config, 'apache')
|
||||
|
||||
for git_file in files:
|
||||
self.assertNotIn(git_file, ' '.join(get_files))
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._tmp_spm)
|
||||
|
|
45
tests/integration/spm/test_remove.py
Normal file
45
tests/integration/spm/test_remove.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Tests for the spm remove utility
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import SPMCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
|
||||
@destructiveTest
|
||||
class SPMRemoveTest(SPMCase):
|
||||
'''
|
||||
Validate the spm remove command
|
||||
'''
|
||||
def setUp(self):
|
||||
self.config = self._spm_config()
|
||||
self._spm_build_files(self.config)
|
||||
|
||||
def test_spm_remove(self):
|
||||
'''
|
||||
test spm remove from an inital repo install
|
||||
'''
|
||||
# first install apache package
|
||||
self._spm_create_update_repo(self.config)
|
||||
install = self.run_spm('install', self.config, 'apache')
|
||||
|
||||
sls = os.path.join(self.config['formula_path'], 'apache', 'apache.sls')
|
||||
|
||||
self.assertTrue(os.path.exists(sls))
|
||||
|
||||
#now remove an make sure file is removed
|
||||
remove = self.run_spm('remove', self.config, 'apache')
|
||||
sls = os.path.join(self.config['formula_path'], 'apache', 'apache.sls')
|
||||
|
||||
self.assertFalse(os.path.exists(sls))
|
||||
|
||||
self.assertIn('... removing apache', remove)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._tmp_spm)
|
|
@ -627,7 +627,7 @@ class SPMCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
'spm_repos_config': os.path.join(self._tmp_spm, 'etc', 'spm.repos'),
|
||||
'spm_cache_dir': os.path.join(self._tmp_spm, 'cache'),
|
||||
'spm_build_dir': os.path.join(self._tmp_spm, 'build'),
|
||||
'spm_build_exclude': ['.git'],
|
||||
'spm_build_exclude': ['apache/.git'],
|
||||
'spm_db_provider': 'sqlite3',
|
||||
'spm_files_provider': 'local',
|
||||
'spm_db': os.path.join(self._tmp_spm, 'packages.db'),
|
||||
|
|
68
tests/support/copyartifacts.py
Normal file
68
tests/support/copyartifacts.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Script for copying back xml junit files from tests
|
||||
'''
|
||||
from __future__ import absolute_import, print_function
|
||||
import argparse # pylint: disable=minimum-python-version
|
||||
import os
|
||||
import paramiko
|
||||
import subprocess
|
||||
import yaml
|
||||
|
||||
|
||||
class DownloadArtifacts(object):
|
||||
def __init__(self, instance, artifacts):
|
||||
self.instance = instance
|
||||
self.artifacts = artifacts
|
||||
self.client = self.setup_transport()
|
||||
|
||||
def setup_transport(self):
|
||||
# pylint: disable=minimum-python-version
|
||||
config = yaml.load(subprocess.check_output(['bundle', 'exec', 'kitchen', 'diagnose', self.instance]))
|
||||
# pylint: enable=minimum-python-version
|
||||
state = config['instances'][self.instance]['state_file']
|
||||
tport = config['instances'][self.instance]['transport']
|
||||
transport = paramiko.Transport((
|
||||
state['hostname'],
|
||||
state.get('port', tport.get('port', 22))
|
||||
))
|
||||
pkey = paramiko.rsakey.RSAKey(
|
||||
filename=state.get('ssh_key', tport.get('ssh_key', '~/.ssh/id_rsa'))
|
||||
)
|
||||
transport.connect(
|
||||
username=state.get('username', tport.get('username', 'root')),
|
||||
pkey=pkey
|
||||
)
|
||||
return paramiko.SFTPClient.from_transport(transport)
|
||||
|
||||
def download(self):
|
||||
for remote, local in self.artifacts:
|
||||
if remote.endswith('/'):
|
||||
for fxml in self.client.listdir(remote):
|
||||
self._do_download(os.path.join(remote, fxml), os.path.join(local, os.path.basename(fxml)))
|
||||
else:
|
||||
self._do_download(remote, os.path.join(local, os.path.basename(remote)))
|
||||
|
||||
def _do_download(self, remote, local):
|
||||
print('Copying from {0} to {1}'.format(remote, local))
|
||||
self.client.get(remote, local)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Jenkins Artifact Download Helper')
|
||||
parser.add_argument(
|
||||
'--instance',
|
||||
required=True,
|
||||
action='store',
|
||||
help='Instance on Test Kitchen to pull from',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--download-artifacts',
|
||||
dest='artifacts',
|
||||
nargs=2,
|
||||
action='append',
|
||||
metavar=('REMOTE_PATH', 'LOCAL_PATH'),
|
||||
help='Download remote artifacts',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
downloader = DownloadArtifacts(args.instance, args.artifacts)
|
||||
downloader.download()
|
|
@ -52,6 +52,7 @@ PYEXEC = 'python{0}.{1}'.format(*sys.version_info)
|
|||
MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, 'mockbin')
|
||||
SCRIPT_DIR = os.path.join(CODE_DIR, 'scripts')
|
||||
TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-state-tree')
|
||||
TMP_PILLAR_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-pillar-tree')
|
||||
TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-prodenv-state-tree')
|
||||
TMP_CONF_DIR = os.path.join(TMP, 'config')
|
||||
TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, 'sub-minion')
|
||||
|
|
|
@ -215,6 +215,7 @@ RUNTIME_VARS = RuntimeVars(
|
|||
TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR,
|
||||
TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR,
|
||||
TMP_STATE_TREE=paths.TMP_STATE_TREE,
|
||||
TMP_PILLAR_TREE=paths.TMP_PILLAR_TREE,
|
||||
TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE,
|
||||
RUNNING_TESTS_USER=RUNNING_TESTS_USER,
|
||||
RUNTIME_CONFIGS={}
|
||||
|
|
|
@ -814,7 +814,8 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
ret.update({
|
||||
'comment': comt,
|
||||
'result': None,
|
||||
'pchanges': p_chg
|
||||
'pchanges': p_chg,
|
||||
'changes': {'/etc/grub.conf': {'directory': 'new'}}
|
||||
})
|
||||
self.assertDictEqual(filestate.directory(name,
|
||||
user=user,
|
||||
|
@ -825,7 +826,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
with patch.object(os.path, 'isdir', mock_f):
|
||||
comt = ('No directory to create {0} in'
|
||||
.format(name))
|
||||
ret.update({'comment': comt, 'result': False})
|
||||
ret.update({'comment': comt, 'result': False, 'changes': {}})
|
||||
self.assertDictEqual(filestate.directory
|
||||
(name, user=user, group=group),
|
||||
ret)
|
||||
|
|
|
@ -309,21 +309,21 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
|
|||
- cwd: /
|
||||
.Y:
|
||||
cmd.run:
|
||||
- name: echo Y >> {1}
|
||||
- name: echo Y >> {0}
|
||||
- cwd: /
|
||||
.Z:
|
||||
cmd.run:
|
||||
- name: echo Z >> {2}
|
||||
- name: echo Z >> {0}
|
||||
- cwd: /
|
||||
'''.format(output, output, output)))
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
write_to(os.path.join(dirpath, 'yyy.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
|
||||
__pydsl__.set(ordered=True)
|
||||
state('.D').cmd.run('echo D >> {0}', cwd='/')
|
||||
state('.E').cmd.run('echo E >> {1}', cwd='/')
|
||||
state('.F').cmd.run('echo F >> {2}', cwd='/')
|
||||
'''.format(output, output, output)))
|
||||
state('.E').cmd.run('echo E >> {0}', cwd='/')
|
||||
state('.F').cmd.run('echo F >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
|
@ -339,9 +339,9 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
|
|||
__pydsl__.set(ordered=True)
|
||||
|
||||
state('.A').cmd.run('echo A >> {0}', cwd='/')
|
||||
state('.B').cmd.run('echo B >> {1}', cwd='/')
|
||||
state('.C').cmd.run('echo C >> {2}', cwd='/')
|
||||
'''.format(output, output, output)))
|
||||
state('.B').cmd.run('echo B >> {0}', cwd='/')
|
||||
state('.C').cmd.run('echo C >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(output, 'r') as f:
|
||||
|
@ -361,26 +361,29 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
|
|||
)
|
||||
)
|
||||
try:
|
||||
# The Windows shell will include any spaces before the redirect
|
||||
# in the text that is redirected.
|
||||
# For example: echo hello > test.txt will contain "hello "
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
|
||||
__pydsl__.set(ordered=True)
|
||||
A = state('A')
|
||||
A.cmd.run('echo hehe > {0}/zzz.txt', cwd='/')
|
||||
A.file.managed('{1}/yyy.txt', source='salt://zzz.txt')
|
||||
A.cmd.run('echo hehe>{0}/zzz.txt', cwd='/')
|
||||
A.file.managed('{0}/yyy.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
A()
|
||||
|
||||
state().cmd.run('echo hoho >> {2}/yyy.txt', cwd='/')
|
||||
state().cmd.run('echo hoho>>{0}/yyy.txt', cwd='/')
|
||||
|
||||
A.file.managed('{3}/xxx.txt', source='salt://zzz.txt')
|
||||
A.file.managed('{0}/xxx.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
'''.format(dirpath, dirpath, dirpath, dirpath)))
|
||||
'''.format(dirpath.replace('\\', '/'))))
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'yyy.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe\nhoho\n')
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep + 'hoho' + os.linesep)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'xxx.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe\n')
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep)
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue