Merge branch 'master' into less_flaky

This commit is contained in:
Pedro Algarvio 2019-11-30 10:45:53 +00:00 committed by GitHub
commit 7fde744e5d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
113 changed files with 555 additions and 1232 deletions

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '-n integration.modules.test_pkg',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '-n integration.modules.test_pkg',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -8,10 +8,9 @@ runTestSuite(
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
kitchen_platforms_file: '/var/jenkins/workspace/nox-cloud-platforms.yml',
nox_env_name: 'runtests-zeromq',
nox_env_name: 'runtests-cloud',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -11,7 +11,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -8,10 +8,9 @@ runTestSuite(
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
kitchen_platforms_file: '/var/jenkins/workspace/nox-cloud-platforms.yml',
nox_env_name: 'runtests-zeromq',
nox_env_name: 'runtests-cloud',
nox_passthrough_opts: '',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -11,7 +11,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'highsierra',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py2',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'highsierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'highsierra',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py3',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'highsierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'mojave',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py2',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'mojave',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'mojave',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py3',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'mojave',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'sierra',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py2',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'sierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -1,165 +1,20 @@
@Library('salt@master-1.5') _
// Pre-nox pipeline
//runTestSuite(
// concurrent_builds: 1,
// distro_name: 'macosx',
// distro_version: 'sierra',
// env: env,
// golden_images_branch: 'master',
// jenkins_slave_label: 'kitchen-slave-mac',
// kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml
// nox_env_name: 'runtests-zeromq',
// nox_passthrough_opts: '',
// python_version: 'py3',
// run_full: params.runFull,
// testrun_timeout: 6,
// use_spot_instances: false)
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
runTestSuite(
concurrent_builds: 1,
distro_name: 'macosx',
distro_version: 'sierra',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave-mac',
kitchen_platforms_file: '/var/jenkins/workspace/pre-golden-nox-platforms.yml',
kitchen_verifier_file: '/var/jenkins/workspace/nox-verifier.yml',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: false)
// vim: ft=groovy

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -11,7 +11,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tornado',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-m2crypto',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -11,7 +11,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--proxy',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-tcp',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 8,
use_spot_instances: false)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 8,
use_spot_instances: false)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py2',
run_full: params.runFull,
testrun_timeout: 8,
use_spot_instances: false)

View file

@ -10,7 +10,6 @@ runTestSuite(
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '--unit',
python_version: 'py3',
run_full: params.runFull,
testrun_timeout: 8,
use_spot_instances: false)

View file

@ -12,16 +12,22 @@ Salt is available in the FreeBSD ports tree at `sysutils/py-salt
FreeBSD binary repo
===================
Install Salt via the official package repository. Salt is packaged both as a Python 2.7 or 3.6 version.
.. code-block:: bash
pkg install py27-salt
.. code-block:: bash
pkg install py36-salt
FreeBSD ports
=============
By default salt is packaged using python 2.7, but if you build your own packages from FreeBSD ports either by hand or with poudriere you can instead package it with your choice of python. Add a line to /etc/make.conf to choose your python flavour:
You can also build your own packages from FreeBSD ports either by hand or with poudriere you can instead package it with your choice of python. Add a line to /etc/make.conf to choose your python flavour:
.. code-block:: text
.. code-block:: bash
echo "DEFAULT_VERSIONS+= python=3.6" >> /etc/make.conf

View file

@ -54,6 +54,9 @@ RUNTESTS_LOGFILE = os.path.join(
'runtests-{}.log'.format(datetime.datetime.now().strftime('%Y%m%d%H%M%S.%f'))
)
# Prevent Python from writing bytecode
os.environ[str('PYTHONDONTWRITEBYTECODE')] = str('1')
def _create_ci_directories():
for dirname in ('logs', 'coverage', 'xml-unittests-output'):
@ -350,19 +353,23 @@ def _run_with_coverage(session, *test_cmd):
python_path_entries.remove(SITECUSTOMIZE_DIR)
python_path_entries.insert(0, SITECUSTOMIZE_DIR)
python_path_env_var = os.pathsep.join(python_path_entries)
env = {
# The updated python path so that sitecustomize is importable
'PYTHONPATH': python_path_env_var,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
'COVERAGE_FILE': os.path.abspath(os.path.join(REPO_ROOT, '.coverage')),
# Instruct sub processes to also run under coverage
'COVERAGE_PROCESS_START': os.path.join(REPO_ROOT, '.coveragerc')
}
if IS_DARWIN:
# Don't nuke our multiprocessing efforts objc!
# https://stackoverflow.com/questions/50168647/multiprocessing-causes-python-to-crash-and-gives-an-error-may-have-been-in-progr
env['OBJC_DISABLE_INITIALIZE_FORK_SAFETY'] = 'YES'
try:
session.run(
*test_cmd,
env={
# The updated python path so that sitecustomize is importable
'PYTHONPATH': python_path_env_var,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
'COVERAGE_FILE': os.path.abspath(os.path.join(REPO_ROOT, '.coverage')),
# Instruct sub processes to also run under coverage
'COVERAGE_PROCESS_START': os.path.join(REPO_ROOT, '.coveragerc')
}
)
session.run(*test_cmd, env=env)
finally:
# Always combine and generate the XML coverage report
try:
@ -394,7 +401,13 @@ def _runtests(session, coverage, cmd_args):
if coverage is True:
_run_with_coverage(session, 'coverage', 'run', os.path.join('tests', 'runtests.py'), *cmd_args)
else:
session.run('python', os.path.join('tests', 'runtests.py'), *cmd_args)
cmd_args = ['python', os.path.join('tests', 'runtests.py')] + list(cmd_args)
env = None
if IS_DARWIN:
# Don't nuke our multiprocessing efforts objc!
# https://stackoverflow.com/questions/50168647/multiprocessing-causes-python-to-crash-and-gives-an-error-may-have-been-in-progr
env = {'OBJC_DISABLE_INITIALIZE_FORK_SAFETY': 'YES'}
session.run(*cmd_args, env=env)
except CommandFailed:
# Disabling re-running failed tests for the time being
raise
@ -845,11 +858,17 @@ def _pytest(session, coverage, cmd_args):
# Create required artifacts directories
_create_ci_directories()
env = None
if IS_DARWIN:
# Don't nuke our multiprocessing efforts objc!
# https://stackoverflow.com/questions/50168647/multiprocessing-causes-python-to-crash-and-gives-an-error-may-have-been-in-progr
env = {'OBJC_DISABLE_INITIALIZE_FORK_SAFETY': 'YES'}
try:
if coverage is True:
_run_with_coverage(session, 'coverage', 'run', '-m', 'py.test', *cmd_args)
else:
session.run('py.test', *cmd_args)
session.run('py.test', *cmd_args, env=env)
except CommandFailed:
# Re-run failed tests
session.log('Re-running failed tests')
@ -857,7 +876,7 @@ def _pytest(session, coverage, cmd_args):
if coverage is True:
_run_with_coverage(session, 'coverage', 'run', '-m', 'py.test', *cmd_args)
else:
session.run('py.test', *cmd_args)
session.run('py.test', *cmd_args, env=env)
def _lint(session, rcfile, flags, paths):

View file

@ -14,7 +14,9 @@ kubernetes<4.0
mock>=3.0.5; python_version < '3.6'
more-itertools==5.0.0
moto
pylxd>=2.2.5
# XXX: Temporarily do not install pylxd.
# pylxd(or likely ws4py) will cause the test suite to hang at the finish line under runtests.py
# pylxd>=2.2.5
pyopenssl
python-etcd>0.4.2
pyvmomi

View file

@ -73,7 +73,6 @@ netaddr==0.7.19 # via junos-eznc
paramiko==2.4.2 # via junos-eznc, ncclient, scp
pathlib2==2.3.3 # via pytest
pathtools==0.1.2 # via watchdog
pbr==5.1.3 # via pylxd
pluggy==0.9.0 # via pytest
portend==2.4 # via cherrypy
psutil==5.6.1
@ -85,7 +84,6 @@ pycparser==2.19
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
# pycrypto==2.6.1 ; sys_platform != "win32"
pycryptodome==3.8.1
pylxd==2.2.9
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyserial==3.4 # via junos-eznc
@ -104,8 +102,6 @@ pytz==2019.1 # via moto, tempora
pyvmomi==6.7.1.2018.12
pyyaml==5.1.2
pyzmq==18.0.1 ; python_version != "3.4"
requests-toolbelt==0.9.1 # via pylxd
requests-unixsocket==0.1.5 # via pylxd
requests==2.21.0
responses==0.10.6 # via moto
rfc3987==1.3.8
@ -117,7 +113,7 @@ scp==0.13.2 # via junos-eznc
selectors2==2.0.1 # via ncclient
setproctitle==1.1.10
singledispatch==3.4.0.3
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
smmap2==2.0.5 # via gitdb2
smmap==0.9.0
strict-rfc3339==0.7
@ -125,14 +121,13 @@ supervisor==3.3.5 ; python_version < "3"
tempora==1.14.1 # via portend
timelib==0.2.4
tornado==4.5.3 ; python_version < "3"
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
virtualenv==16.4.3
vultr==1.0.1
watchdog==0.9.0
websocket-client==0.40.0 # via docker, kubernetes
werkzeug==0.15.6 # via moto
wrapt==1.11.1 # via aws-xray-sdk
ws4py==0.5.1 # via pylxd
xmltodict==0.12.0 # via moto
yamlordereddictloader==0.4.0
zc.lockfile==1.4 # via cherrypy

View file

@ -66,7 +66,6 @@ netaddr==0.7.19 # via junos-eznc
paramiko==2.4.2 # via junos-eznc, ncclient, scp
pathlib2==2.3.3 # via pytest
pathtools==0.1.2 # via watchdog
pbr==5.1.3 # via pylxd
pluggy==0.9.0 # via pytest
portend==2.4 # via cherrypy
psutil==5.6.1
@ -78,7 +77,6 @@ pycparser==2.19
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
# pycrypto==2.6.1 ; sys_platform != "win32"
pycryptodome==3.8.1
pylxd==2.2.9
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyserial==3.4 # via junos-eznc
@ -97,8 +95,6 @@ pytz==2019.1 # via moto, tempora
pyvmomi==6.7.1.2018.12
pyyaml==5.1.2
pyzmq==18.0.1 ; python_version != "3.4"
requests-toolbelt==0.9.1 # via pylxd
requests-unixsocket==0.1.5 # via pylxd
requests==2.21.0
responses==0.10.6 # via moto
rfc3987==1.3.8
@ -108,21 +104,20 @@ salttesting==2017.6.1
scp==0.13.2 # via junos-eznc
setproctitle==1.1.10
singledispatch==3.4.0.3
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
smmap2==2.0.5 # via gitdb2
smmap==0.9.0
strict-rfc3339==0.7
tempora==1.14.1 # via portend
timelib==0.2.4
tornado==4.5.3 ; python_version >= "3.4"
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
virtualenv==16.4.3
vultr==1.0.1
watchdog==0.9.0
websocket-client==0.40.0 # via docker, kubernetes
werkzeug==0.15.6 # via moto
wrapt==1.11.1 # via aws-xray-sdk
ws4py==0.5.1 # via pylxd
xmltodict==0.12.0 # via moto
yamlordereddictloader==0.4.0
zc.lockfile==1.4 # via cherrypy

View file

@ -65,7 +65,7 @@ ncclient==0.6.4 # via junos-eznc
netaddr==0.7.19 # via junos-eznc
paramiko==2.4.2 # via junos-eznc, ncclient, scp
pathtools==0.1.2 # via watchdog
pbr==5.1.3 # via mock, pylxd
pbr==5.1.3 # via mock
pluggy==0.9.0 # via pytest
portend==2.4 # via cherrypy
psutil==5.6.1
@ -77,7 +77,6 @@ pycparser==2.19
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
# pycrypto==2.6.1 ; sys_platform != "win32"
pycryptodome==3.8.1
pylxd==2.2.9
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyserial==3.4 # via junos-eznc
@ -96,8 +95,6 @@ pytz==2019.1 # via moto, tempora
pyvmomi==6.7.1.2018.12
pyyaml==5.1.2
pyzmq==18.0.1 ; python_version != "3.4"
requests-toolbelt==0.9.1 # via pylxd
requests-unixsocket==0.1.5 # via pylxd
requests==2.21.0
responses==0.10.6 # via moto
rfc3987==1.3.8
@ -107,21 +104,20 @@ salttesting==2017.6.1
scp==0.13.2 # via junos-eznc
setproctitle==1.1.10
singledispatch==3.4.0.3
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
smmap2==2.0.5 # via gitdb2
smmap==0.9.0
strict-rfc3339==0.7
tempora==1.14.1 # via portend
timelib==0.2.4
tornado==4.5.3 ; python_version >= "3.4"
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
virtualenv==16.4.3
vultr==1.0.1
watchdog==0.9.0
websocket-client==0.40.0 # via docker, kubernetes
werkzeug==0.15.6 # via moto
wrapt==1.11.1 # via aws-xray-sdk
ws4py==0.5.1 # via pylxd
xmltodict==0.12.0 # via moto
yamlordereddictloader==0.4.0
zc.lockfile==1.4 # via cherrypy

View file

@ -65,7 +65,7 @@ ncclient==0.6.4 # via junos-eznc
netaddr==0.7.19 # via junos-eznc
paramiko==2.4.2 # via junos-eznc, ncclient, scp
pathtools==0.1.2 # via watchdog
pbr==5.1.3 # via mock, pylxd
pbr==5.1.3 # via mock
pluggy==0.9.0 # via pytest
portend==2.4 # via cherrypy
psutil==5.6.1
@ -77,7 +77,6 @@ pycparser==2.19
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
# pycrypto==2.6.1 ; sys_platform != "win32"
pycryptodome==3.8.1
pylxd==2.2.9
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyserial==3.4 # via junos-eznc
@ -96,8 +95,6 @@ pytz==2019.1 # via moto, tempora
pyvmomi==6.7.1.2018.12
pyyaml==5.1.2
pyzmq==18.0.1 ; python_version != "3.4"
requests-toolbelt==0.9.1 # via pylxd
requests-unixsocket==0.1.5 # via pylxd
requests==2.21.0
responses==0.10.6 # via moto
rfc3987==1.3.8
@ -107,21 +104,20 @@ salttesting==2017.6.1
scp==0.13.2 # via junos-eznc
setproctitle==1.1.10
singledispatch==3.4.0.3
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
smmap2==2.0.5 # via gitdb2
smmap==0.9.0
strict-rfc3339==0.7
tempora==1.14.1 # via portend
timelib==0.2.4
tornado==4.5.3 ; python_version >= "3.4"
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
virtualenv==16.4.3
vultr==1.0.1
watchdog==0.9.0
websocket-client==0.40.0 # via docker, kubernetes
werkzeug==0.15.6 # via moto
wrapt==1.11.1 # via aws-xray-sdk
ws4py==0.5.1 # via pylxd
xmltodict==0.12.0 # via moto
yamlordereddictloader==0.4.0
zc.lockfile==1.4 # via cherrypy

View file

@ -244,7 +244,7 @@ if sys.version_info < (3, 2):
elif sys.version_info < (3, 7):
# On python versions lower than 3.7, we sill subclass and overwrite prepare to include the fix for:
# https://bugs.python.org/issue35726
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member,inconsistent-mro
def __init__(self, queue):
super(QueueHandler, self).__init__(queue)
@ -297,7 +297,7 @@ elif sys.version_info < (3, 7):
record.exc_text = None
return record
else:
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member,inconsistent-mro
def __init__(self, queue):
super(QueueHandler, self).__init__(queue)

View file

@ -121,9 +121,9 @@ LOGGING_STORE_HANDLER = __StoreLoggingHandler()
class SaltLogRecord(logging.LogRecord):
def __init__(self, *args, **kwargs):
logging.LogRecord.__init__(self, *args, **kwargs)
self.bracketname = '[{:<17}]'.format(self.name)
self.bracketlevel = '[{:<8}]'.format(self.levelname)
self.bracketprocess = '[{:>5}]'.format(self.process)
self.bracketname = '[{:<17}]'.format(str(self.name))
self.bracketlevel = '[{:<8}]'.format(str(self.levelname))
self.bracketprocess = '[{:>5}]'.format(str(self.process))
class SaltColorLogRecord(SaltLogRecord):
@ -241,7 +241,7 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
def _log(self, level, msg, args, exc_info=None,
extra=None, # pylint: disable=arguments-differ
stack_info=False,
stack_level=1,
stacklevel=1,
exc_info_on_loglevel=None):
if extra is None:
extra = {}
@ -290,7 +290,7 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
else:
LOGGING_LOGGER_CLASS._log(
self, level, msg, args, exc_info=exc_info, extra=extra,
stack_info=stack_info, stack_level=stack_level
stack_info=stack_info, stacklevel=stacklevel
)
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,

View file

@ -36,6 +36,7 @@ import salt.utils.zeromq
import salt.syspaths
import salt.exceptions
import salt.defaults.exitcodes
import salt.utils.immutabletypes as immutabletypes
try:
import psutil
@ -100,11 +101,7 @@ _DFLT_IPC_WBUFFER = _gather_buffer_space() * .5
# TODO: Reserved for future use
_DFLT_IPC_RBUFFER = _gather_buffer_space() * .5
FLO_DIR = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'daemons', 'flo')
VALID_OPTS = {
VALID_OPTS = immutabletypes.freeze({
# The address of the salt master. May be specified as IP address or hostname
'master': (six.string_types, list),
@ -1194,10 +1191,10 @@ VALID_OPTS = {
# Thorium top file location
'thorium_top': six.string_types,
}
})
# default configurations
DEFAULT_MINION_OPTS = {
DEFAULT_MINION_OPTS = immutabletypes.freeze({
'interface': '0.0.0.0',
'master': 'salt',
'master_type': 'str',
@ -1489,9 +1486,9 @@ DEFAULT_MINION_OPTS = {
'discovery': False,
'schedule': {},
'ssh_merge_pillar': True
}
})
DEFAULT_MASTER_OPTS = {
DEFAULT_MASTER_OPTS = immutabletypes.freeze({
'interface': '0.0.0.0',
'publish_port': 4505,
'zmq_backlog': 1000,
@ -1816,12 +1813,12 @@ DEFAULT_MASTER_OPTS = {
'auth_events': True,
'minion_data_cache_events': True,
'enable_ssh_minions': False,
}
})
# ----- Salt Proxy Minion Configuration Defaults ----------------------------------->
# These are merged with DEFAULT_MINION_OPTS since many of them also apply here.
DEFAULT_PROXY_MINION_OPTS = {
DEFAULT_PROXY_MINION_OPTS = immutabletypes.freeze({
'conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'proxy'),
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'proxy'),
'add_proxymodule_to_opts': False,
@ -1847,9 +1844,10 @@ DEFAULT_PROXY_MINION_OPTS = {
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'proxy'),
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'proxy'),
'sock_dir': os.path.join(salt.syspaths.SOCK_DIR, 'proxy'),
}
})
# ----- Salt Cloud Configuration Defaults ----------------------------------->
DEFAULT_CLOUD_OPTS = {
DEFAULT_CLOUD_OPTS = immutabletypes.freeze({
'verify_env': True,
'default_include': 'cloud.conf.d/*.conf',
# Global defaults
@ -1877,17 +1875,17 @@ DEFAULT_CLOUD_OPTS = {
'log_rotate_backup_count': 0,
'bootstrap_delay': None,
'cache': 'localfs',
}
})
DEFAULT_API_OPTS = {
DEFAULT_API_OPTS = immutabletypes.freeze({
# ----- Salt master settings overridden by Salt-API --------------------->
'api_pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-api.pid'),
'api_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'api'),
'rest_timeout': 300,
# <---- Salt master settings overridden by Salt-API ----------------------
}
})
DEFAULT_SPM_OPTS = {
DEFAULT_SPM_OPTS = immutabletypes.freeze({
# ----- Salt master settings overridden by SPM --------------------->
'spm_conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'spm'),
'formula_path': salt.syspaths.SPM_FORMULA_PATH,
@ -1908,15 +1906,15 @@ DEFAULT_SPM_OPTS = {
'spm_node_type': '',
'spm_share_dir': os.path.join(salt.syspaths.SHARE_DIR, 'spm'),
# <---- Salt master settings overridden by SPM ----------------------
}
})
VM_CONFIG_DEFAULTS = {
VM_CONFIG_DEFAULTS = immutabletypes.freeze({
'default_include': 'cloud.profiles.d/*.conf',
}
})
PROVIDER_CONFIG_DEFAULTS = {
PROVIDER_CONFIG_DEFAULTS = immutabletypes.freeze({
'default_include': 'cloud.providers.d/*.conf',
}
})
# <---- Salt Cloud Configuration Defaults ------------------------------------
@ -2460,10 +2458,10 @@ def syndic_config(master_config_path,
master_defaults=None):
if minion_defaults is None:
minion_defaults = DEFAULT_MINION_OPTS
minion_defaults = DEFAULT_MINION_OPTS.copy()
if master_defaults is None:
master_defaults = DEFAULT_MASTER_OPTS
master_defaults = DEFAULT_MASTER_OPTS.copy()
opts = {}
master_opts = master_config(
@ -2782,7 +2780,7 @@ def apply_cloud_config(overrides, defaults=None):
Return a cloud config
'''
if defaults is None:
defaults = DEFAULT_CLOUD_OPTS
defaults = DEFAULT_CLOUD_OPTS.copy()
config = defaults.copy()
if overrides:
@ -3682,7 +3680,7 @@ def apply_minion_config(overrides=None,
Returns minion configurations dict.
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS
defaults = DEFAULT_MINION_OPTS.copy()
if overrides is None:
overrides = {}
@ -3837,7 +3835,7 @@ def master_config(path, env_var='SALT_MASTER_CONFIG', defaults=None, exit_on_con
:py:func:`salt.client.client_config`.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
defaults = DEFAULT_MASTER_OPTS.copy()
if not os.environ.get(env_var, None):
# No valid setting was given using the configuration variable.
@ -3879,7 +3877,7 @@ def apply_master_config(overrides=None, defaults=None):
Returns master configurations dict.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
defaults = DEFAULT_MASTER_OPTS.copy()
if overrides is None:
overrides = {}
@ -4054,7 +4052,7 @@ def client_config(path, env_var='SALT_CLIENT_CONFIG', defaults=None):
:py:class:`~salt.client.LocalClient`.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
defaults = DEFAULT_MASTER_OPTS.copy()
xdg_dir = salt.utils.xdg.xdg_config_dir()
if os.path.isdir(xdg_dir):
@ -4122,10 +4120,10 @@ def api_config(path):
need to be stubbed out for salt-api
'''
# Let's grab a copy of salt-api's required defaults
opts = DEFAULT_API_OPTS
opts = DEFAULT_API_OPTS.copy()
# Let's override them with salt's master opts
opts.update(client_config(path, defaults=DEFAULT_MASTER_OPTS))
opts.update(client_config(path, defaults=DEFAULT_MASTER_OPTS.copy()))
# Let's set the pidfile and log_file values in opts to api settings
opts.update({

View file

@ -50,7 +50,6 @@ from __future__ import absolute_import, unicode_literals, print_function
import glob
import os
import re
import itertools
import fnmatch
# Import salt libs
@ -58,6 +57,7 @@ import salt.modules.cmdmod
import salt.utils.files
import salt.utils.path
import salt.utils.systemd
from salt.ext.six.moves import filter # pylint: disable=import-error,redefined-builtin
__func_alias__ = {
'reload_': 'reload'
@ -190,7 +190,7 @@ def _upstart_is_disabled(name):
in /etc/init/[name].conf.
'''
files = ['/etc/init/{0}.conf'.format(name), '/etc/init/{0}.override'.format(name)]
for file_name in itertools.ifilter(os.path.isfile, files):
for file_name in filter(os.path.isfile, files):
with salt.utils.files.fopen(file_name) as fp_:
if re.search(r'^\s*manual',
salt.utils.stringutils.to_unicode(fp_.read()),
@ -516,7 +516,7 @@ def _upstart_enable(name):
return _upstart_is_enabled(name)
override = '/etc/init/{0}.override'.format(name)
files = ['/etc/init/{0}.conf'.format(name), override]
for file_name in itertools.ifilter(os.path.isfile, files):
for file_name in filter(os.path.isfile, files):
with salt.utils.files.fopen(file_name, 'r+') as fp_:
new_text = re.sub(r'^\s*manual\n?',
'',

View file

@ -9,6 +9,7 @@
Immutable types
'''
from __future__ import absolute_import, unicode_literals
import copy
# Import python libs
try:
@ -37,6 +38,15 @@ class ImmutableDict(Mapping):
def __repr__(self):
return '<{0} {1}>'.format(self.__class__.__name__, repr(self.__obj))
def __deepcopy__(self, memo):
return copy.deepcopy(self.__obj)
def copy(self):
'''
Return an un-frozen copy of self
'''
return copy.deepcopy(self.__obj)
class ImmutableList(Sequence):
'''
@ -64,6 +74,9 @@ class ImmutableList(Sequence):
def __repr__(self):
return '<{0} {1}>'.format(self.__class__.__name__, repr(self.__obj))
def __deepcopy__(self, memo):
return copy.deepcopy(self.__obj)
class ImmutableSet(Set):
'''
@ -85,6 +98,9 @@ class ImmutableSet(Set):
def __repr__(self):
return '<{0} {1}>'.format(self.__class__.__name__, repr(self.__obj))
def __deepcopy__(self, memo):
return copy.deepcopy(self.__obj)
def freeze(obj):
'''

View file

@ -59,8 +59,13 @@ def represent_ordereddict(dumper, data):
return dumper.represent_dict(list(data.items()))
def represent_undefined(dumper, data):
return dumper.represent_scalar(u'tag:yaml.org,2002:null', u'NULL')
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(None, represent_undefined)
OrderedDumper.add_representer(
collections.defaultdict,

View file

@ -705,6 +705,9 @@ class TestDaemon(object):
def transplant_configs(cls, transport='zeromq'):
if os.path.isdir(RUNTIME_VARS.TMP_CONF_DIR):
shutil.rmtree(RUNTIME_VARS.TMP_CONF_DIR)
if os.path.isdir(RUNTIME_VARS.TMP_ROOT_DIR):
shutil.rmtree(RUNTIME_VARS.TMP_ROOT_DIR)
os.makedirs(RUNTIME_VARS.TMP_ROOT_DIR)
os.makedirs(RUNTIME_VARS.TMP_CONF_DIR)
os.makedirs(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR)
os.makedirs(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR)
@ -719,11 +722,11 @@ class TestDaemon(object):
# This master connects to syndic_master via a syndic
master_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'master'))
master_opts['known_hosts_file'] = tests_known_hosts_file
master_opts['cachedir'] = os.path.join(TMP, 'rootdir', 'cache')
master_opts['cachedir'] = os.path.join(TMP_ROOT_DIR, 'cache')
master_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER
master_opts['config_dir'] = RUNTIME_VARS.TMP_CONF_DIR
master_opts['root_dir'] = os.path.join(TMP, 'rootdir')
master_opts['pki_dir'] = os.path.join(TMP, 'rootdir', 'pki', 'master')
master_opts['root_dir'] = os.path.join(TMP_ROOT_DIR)
master_opts['pki_dir'] = os.path.join(TMP_ROOT_DIR, 'pki', 'master')
master_opts['syndic_master'] = 'localhost'
file_tree = {
'root_dir': os.path.join(FILES, 'pillar', 'base', 'file_tree'),
@ -771,13 +774,13 @@ class TestDaemon(object):
# This minion connects to master
minion_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'minion'))
minion_opts['cachedir'] = os.path.join(TMP, 'rootdir', 'cache')
minion_opts['cachedir'] = os.path.join(TMP_ROOT_DIR, 'cache')
minion_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER
minion_opts['config_dir'] = RUNTIME_VARS.TMP_CONF_DIR
minion_opts['root_dir'] = os.path.join(TMP, 'rootdir')
minion_opts['pki_dir'] = os.path.join(TMP, 'rootdir', 'pki')
minion_opts['hosts.file'] = os.path.join(TMP, 'rootdir', 'hosts')
minion_opts['aliases.file'] = os.path.join(TMP, 'rootdir', 'aliases')
minion_opts['root_dir'] = os.path.join(TMP_ROOT_DIR)
minion_opts['pki_dir'] = os.path.join(TMP_ROOT_DIR, 'pki')
minion_opts['hosts.file'] = os.path.join(TMP_ROOT_DIR, 'hosts')
minion_opts['aliases.file'] = os.path.join(TMP_ROOT_DIR, 'aliases')
if virtualenv_binary:
minion_opts['venv_bin'] = virtualenv_binary
@ -788,8 +791,8 @@ class TestDaemon(object):
sub_minion_opts['config_dir'] = RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR
sub_minion_opts['root_dir'] = os.path.join(TMP, 'rootdir-sub-minion')
sub_minion_opts['pki_dir'] = os.path.join(TMP, 'rootdir-sub-minion', 'pki', 'minion')
sub_minion_opts['hosts.file'] = os.path.join(TMP, 'rootdir', 'hosts')
sub_minion_opts['aliases.file'] = os.path.join(TMP, 'rootdir', 'aliases')
sub_minion_opts['hosts.file'] = os.path.join(TMP_ROOT_DIR, 'hosts')
sub_minion_opts['aliases.file'] = os.path.join(TMP_ROOT_DIR, 'aliases')
if virtualenv_binary:
sub_minion_opts['venv_bin'] = virtualenv_binary
@ -801,6 +804,15 @@ class TestDaemon(object):
syndic_master_opts['root_dir'] = os.path.join(TMP, 'rootdir-syndic-master')
syndic_master_opts['pki_dir'] = os.path.join(TMP, 'rootdir-syndic-master', 'pki', 'master')
# This is the syndic for master
# Let's start with a copy of the syndic master configuration
syndic_opts = copy.deepcopy(syndic_master_opts)
# Let's update with the syndic configuration
syndic_opts.update(salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'syndic')))
syndic_opts['config_dir'] = RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR
syndic_opts['cachedir'] = os.path.join(TMP_ROOT_DIR, 'cache')
syndic_opts['root_dir'] = os.path.join(TMP_ROOT_DIR)
# This proxy connects to master
proxy_opts = salt.config._read_conf_file(os.path.join(CONF_DIR, 'proxy'))
proxy_opts['cachedir'] = os.path.join(TMP, 'rootdir-proxy', 'cache')

View file

@ -59,7 +59,7 @@ class TestGrainsReg(ModuleCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(opts, whitelist=['reg'])
return {
salt.modules.reg: {

View file

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import functools
import os
import pytest
@ -10,7 +12,7 @@ if os.environ.get('KITCHEN_USERNAME') == 'vagrant':
else:
test_host = testinfra.get_host('paramiko://{KITCHEN_USERNAME}@{KITCHEN_HOSTNAME}:{KITCHEN_PORT}'.format(**os.environ),
ssh_identity_file=os.environ.get('KITCHEN_SSH_KEY'))
else:
elif 'KITCHEN_USERNAME' in os.environ:
test_host = testinfra.get_host('docker://{KITCHEN_USERNAME}@{KITCHEN_CONTAINER_ID}'.format(**os.environ))

View file

@ -15,6 +15,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import base64
import errno
import fnmatch
import functools
import inspect
import logging
@ -44,7 +45,7 @@ except ImportError:
from tests.integration import get_unused_localhost_port
# Import Salt Tests Support libs
from tests.support.unit import skip, _id
from tests.support.unit import skip, _id, SkipTest
from tests.support.mock import patch
from tests.support.paths import FILES, TMP
@ -1058,21 +1059,10 @@ def requires_system_grains(func):
@functools.wraps(func)
def decorator(*args, **kwargs):
if not hasattr(requires_system_grains, '__grains__'):
import salt.config
root_dir = tempfile.mkdtemp(dir=TMP)
defaults = salt.config.DEFAULT_MINION_OPTS.copy()
defaults.pop('conf_file')
defaults.update({
'root_dir': root_dir,
'cachedir': 'cachedir',
'sock_dir': 'sock',
'pki_dir': 'pki',
'log_file': 'logs/minion',
'pidfile': 'pids/minion.pid'
})
opts = salt.config.minion_config(None, defaults=defaults)
# Late import
from tests.support.sminion import build_minion_opts
opts = build_minion_opts(minion_id='runtests-internal-sminion')
requires_system_grains.__grains__ = salt.loader.grains(opts)
shutil.rmtree(root_dir, ignore_errors=True)
kwargs['grains'] = requires_system_grains.__grains__
return func(*args, **kwargs)
return decorator
@ -1084,6 +1074,38 @@ def requires_salt_modules(*names):
.. versionadded:: 0.5.2
'''
def _check_required_salt_modules(*required_salt_modules):
# Late import
from tests.support.sminion import create_sminion
required_salt_modules = set(required_salt_modules)
sminion = create_sminion(minion_id='runtests-internal-sminion')
available_modules = list(sminion.functions)
not_available_modules = set()
try:
cached_not_available_modules = sminion.__not_availiable_modules__
except AttributeError:
cached_not_available_modules = sminion.__not_availiable_modules__ = set()
if cached_not_available_modules:
for not_available_module in cached_not_available_modules:
if not_available_module in required_salt_modules:
not_available_modules.add(not_available_module)
required_salt_modules.remove(not_available_module)
for required_module_name in required_salt_modules:
search_name = required_module_name
if '.' not in search_name:
search_name += '.*'
if not fnmatch.filter(available_modules, search_name):
not_available_modules.add(required_module_name)
cached_not_available_modules.add(required_module_name)
if not_available_modules:
if len(not_available_modules) == 1:
raise SkipTest('Salt module \'{}\' is not available'.format(*not_available_modules))
raise SkipTest('Salt modules not available: {}'.format(', '.join(not_available_modules)))
def decorator(caller):
if inspect.isclass(caller):
@ -1091,67 +1113,19 @@ def requires_salt_modules(*names):
old_setup = getattr(caller, 'setUp', None)
def setUp(self, *args, **kwargs):
_check_required_salt_modules(*names)
if old_setup is not None:
old_setup(self, *args, **kwargs)
if not hasattr(self, 'run_function'):
raise RuntimeError(
'{0} does not have the \'run_function\' method which '
'is necessary to collect the loaded modules'.format(
self.__class__.__name__
)
)
if not hasattr(requires_salt_modules, '__available_modules__'):
requires_salt_modules.__available_modules__ = set()
_names = []
for name in names:
if name not in requires_salt_modules.__available_modules__:
_names.append(name)
if _names:
not_found_modules = self.run_function('runtests_helpers.modules_available', _names)
for name in _names:
if name not in not_found_modules:
requires_salt_modules.__available_modules__.add(name)
if not_found_modules:
if len(not_found_modules) == 1:
self.skipTest('Salt module {0!r} is not available'.format(not_found_modules[0]))
self.skipTest('Salt modules not available: {0!r}'.format(not_found_modules))
caller.setUp = setUp
return caller
# We're simply decorating functions
@functools.wraps(caller)
def wrapper(cls):
if not hasattr(cls, 'run_function'):
raise RuntimeError(
'{0} does not have the \'run_function\' method which is '
'necessary to collect the loaded modules'.format(
cls.__class__.__name__
)
)
if not hasattr(requires_salt_modules, '__available_modules__'):
requires_salt_modules.__available_modules__ = set()
_names = []
for name in names:
if name not in requires_salt_modules.__available_modules__:
_names.append(name)
if _names:
not_found_modules = cls.run_function('runtests_helpers.modules_available', _names)
for name in _names:
if name not in not_found_modules:
requires_salt_modules.__available_modules__.add(name)
if not_found_modules:
if len(not_found_modules) == 1:
cls.skipTest('Salt module {0!r} is not available'.format(not_found_modules[0]))
cls.skipTest('Salt modules not available: {0!r}'.format(not_found_modules))
_check_required_salt_modules(*names)
return caller(cls)
return wrapper
return decorator

View file

@ -72,7 +72,7 @@ def __global_logging_exception_handler(exc_type, exc_value, exc_traceback,
# Log the exception
try:
msg = (
'An un-handled exception was caught by salt-testing\'s global exception handler:\n{}: {}\n{}'.format(
'An un-handled exception was caught by salt\'s testing global exception handler:\n{}: {}\n{}'.format(
exc_type.__name__,
exc_value,
''.join(_format_exception(exc_type, exc_value, exc_traceback)).strip()
@ -402,7 +402,7 @@ class SaltTestingParser(optparse.OptionParser):
try:
return self.__test_mods
except AttributeError:
self.__test_mods = set(tests.support.paths.test_mods())
self.__test_mods = set(tests.support.paths.list_test_mods())
return self.__test_mods
def _map_files(self, files):

View file

@ -55,6 +55,7 @@ SYS_TMP_DIR = os.path.abspath(os.path.realpath(
os.environ.get('TMPDIR', tempfile.gettempdir()) if not sys.platform.startswith('darwin') else '/tmp'
))
TMP = os.path.join(SYS_TMP_DIR, 'salt-tests-tmpdir')
TMP_ROOT_DIR = os.path.join(TMP, 'rootdir')
FILES = os.path.join(INTEGRATION_TEST_DIR, 'files')
BASE_FILES = os.path.join(INTEGRATION_TEST_DIR, 'files', 'file', 'base')
PROD_FILES = os.path.join(INTEGRATION_TEST_DIR, 'files', 'file', 'prod')
@ -125,7 +126,7 @@ SCRIPT_TEMPLATES = {
}
def test_mods():
def list_test_mods():
'''
A generator which returns all of the test files
'''

View file

@ -204,6 +204,7 @@ RUNTIME_VARS = RuntimeVars(
PILLAR_DIR=paths.PILLAR_DIR,
ENGINES_DIR=paths.ENGINES_DIR,
LOG_HANDLERS_DIR=paths.LOG_HANDLERS_DIR,
TMP_ROOT_DIR=paths.TMP_ROOT_DIR,
TMP_CONF_DIR=paths.TMP_CONF_DIR,
TMP_CONF_MASTER_INCLUDES=os.path.join(paths.TMP_CONF_DIR, 'master.d'),
TMP_CONF_MINION_INCLUDES=os.path.join(paths.TMP_CONF_DIR, 'minion.d'),

218
tests/support/sminion.py Normal file
View file

@ -0,0 +1,218 @@
# -*- coding: utf-8 -*-
'''
tests.support.sminion
~~~~~~~~~~~~~~~~~~~~~
SMinion's support functions
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
import shutil
import hashlib
import logging
# Import salt libs
import salt.minion
import salt.utils.stringutils
# Import testing libs
from tests.support.runtests import RUNTIME_VARS
log = logging.getLogger(__name__)
def build_minion_opts(minion_id=None,
root_dir=None,
initial_conf_file=None,
minion_opts_overrides=None,
skip_cached_opts=False,
cache_opts=True,
minion_role=None):
if minion_id is None:
minion_id = 'pytest-internal-sminion'
if skip_cached_opts is False:
try:
opts_cache = build_minion_opts.__cached_opts__
except AttributeError:
opts_cache = build_minion_opts.__cached_opts__ = {}
cached_opts = opts_cache.get(minion_id)
if cached_opts:
return cached_opts
log.info('Generating testing minion %r configuration...', minion_id)
if root_dir is None:
hashed_minion_id = hashlib.sha1()
hashed_minion_id.update(salt.utils.stringutils.to_bytes(minion_id))
root_dir = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, hashed_minion_id.hexdigest()[:6])
if initial_conf_file is not None:
minion_opts = salt.config._read_conf_file(initial_conf_file) # pylint: disable=protected-access
else:
minion_opts = {}
conf_dir = os.path.join(root_dir, 'conf')
conf_file = os.path.join(conf_dir, 'minion')
minion_opts['id'] = minion_id
minion_opts['conf_file'] = conf_file
minion_opts['root_dir'] = root_dir
minion_opts['cachedir'] = 'cache'
minion_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER
minion_opts['pki_dir'] = 'pki'
minion_opts['hosts.file'] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, 'hosts')
minion_opts['aliases.file'] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, 'aliases')
minion_opts['file_client'] = 'local'
minion_opts['server_id_use_crc'] = 'adler32'
minion_opts['pillar_roots'] = {
'base': [
RUNTIME_VARS.TMP_PILLAR_TREE,
]
}
minion_opts['file_roots'] = {
'base': [
# Let's support runtime created files that can be used like:
# salt://my-temp-file.txt
RUNTIME_VARS.TMP_STATE_TREE
],
# Alternate root to test __env__ choices
'prod': [
os.path.join(RUNTIME_VARS.FILES, 'file', 'prod'),
RUNTIME_VARS.TMP_PRODENV_STATE_TREE
]
}
if initial_conf_file and initial_conf_file.startswith(RUNTIME_VARS.FILES):
# We assume we were passed a minion configuration file defined fo testing and, as such
# we define the file and pillar roots to include the testing states/pillar trees
minion_opts['pillar_roots']['base'].append(
os.path.join(RUNTIME_VARS.FILES, 'pillar', 'base'),
)
minion_opts['file_roots']['base'].append(
os.path.join(RUNTIME_VARS.FILES, 'file', 'base'),
)
minion_opts['file_roots']['prod'].append(
os.path.join(RUNTIME_VARS.FILES, 'file', 'prod'),
)
# We need to copy the extension modules into the new master root_dir or
# it will be prefixed by it
extension_modules_path = os.path.join(root_dir, 'extension_modules')
if not os.path.exists(extension_modules_path):
shutil.copytree(
os.path.join(
RUNTIME_VARS.FILES, 'extension_modules'
),
extension_modules_path
)
minion_opts['extension_modules'] = extension_modules_path
# Custom grains
if 'grains' not in minion_opts:
minion_opts['grains'] = {}
if minion_role is not None:
minion_opts['grains']['role'] = minion_role
# Under windows we can't seem to properly create a virtualenv off of another
# virtualenv, we can on linux but we will still point to the virtualenv binary
# outside the virtualenv running the test suite, if that's the case.
try:
real_prefix = sys.real_prefix
# The above attribute exists, this is a virtualenv
if salt.utils.platform.is_windows():
virtualenv_binary = os.path.join(real_prefix, 'Scripts', 'virtualenv.exe')
else:
# We need to remove the virtualenv from PATH or we'll get the virtualenv binary
# from within the virtualenv, we don't want that
path = os.environ.get('PATH')
if path is not None:
path_items = path.split(os.pathsep)
for item in path_items[:]:
if item.startswith(sys.base_prefix):
path_items.remove(item)
os.environ['PATH'] = os.pathsep.join(path_items)
virtualenv_binary = salt.utils.which('virtualenv')
if path is not None:
# Restore previous environ PATH
os.environ['PATH'] = path
if not virtualenv_binary.startswith(real_prefix):
virtualenv_binary = None
if virtualenv_binary and not os.path.exists(virtualenv_binary):
# It doesn't exist?!
virtualenv_binary = None
except AttributeError:
# We're not running inside a virtualenv
virtualenv_binary = None
if virtualenv_binary:
minion_opts['venv_bin'] = virtualenv_binary
# Override minion_opts with minion_opts_overrides
if minion_opts_overrides:
minion_opts.update(minion_opts_overrides)
if not os.path.exists(conf_dir):
os.makedirs(conf_dir)
with salt.utils.files.fopen(conf_file, 'w') as fp_:
salt.utils.yaml.safe_dump(minion_opts, fp_, default_flow_style=False)
log.info('Generating testing minion %r configuration completed.', minion_id)
minion_opts = salt.config.minion_config(conf_file, minion_id=minion_id, cache_minion_id=True)
salt.utils.verify.verify_env(
[
os.path.join(minion_opts['pki_dir'], 'accepted'),
os.path.join(minion_opts['pki_dir'], 'rejected'),
os.path.join(minion_opts['pki_dir'], 'pending'),
os.path.dirname(minion_opts['log_file']),
minion_opts['extension_modules'],
minion_opts['cachedir'],
minion_opts['sock_dir'],
RUNTIME_VARS.TMP_STATE_TREE,
RUNTIME_VARS.TMP_PILLAR_TREE,
RUNTIME_VARS.TMP_PRODENV_STATE_TREE,
RUNTIME_VARS.TMP,
],
RUNTIME_VARS.RUNNING_TESTS_USER,
root_dir=root_dir
)
if cache_opts:
try:
opts_cache = build_minion_opts.__cached_opts__
except AttributeError:
opts_cache = build_minion_opts.__cached_opts__ = {}
opts_cache[minion_id] = minion_opts
return minion_opts
def create_sminion(minion_id=None,
root_dir=None,
initial_conf_file=None,
sminion_cls=salt.minion.SMinion,
minion_opts_overrides=None,
skip_cached_minion=False,
cache_sminion=True):
if minion_id is None:
minion_id = 'pytest-internal-sminion'
if skip_cached_minion is False:
try:
minions_cache = create_sminion.__cached_minions__
except AttributeError:
create_sminion.__cached_minions__ = {}
cached_minion = create_sminion.__cached_minions__.get(minion_id)
if cached_minion:
return cached_minion
minion_opts = build_minion_opts(minion_id=minion_id,
root_dir=root_dir,
initial_conf_file=initial_conf_file,
minion_opts_overrides=minion_opts_overrides,
skip_cached_opts=skip_cached_minion,
cache_opts=cache_sminion)
log.info('Instantiating a testing %s(%s)', sminion_cls.__name__, minion_id)
sminion = sminion_cls(minion_opts)
if cache_sminion:
try:
minions_cache = create_sminion.__cached_minions__
except AttributeError:
minions_cache = create_sminion.__cached_minions__ = {}
minions_cache[minion_id] = sminion
return sminion

View file

@ -30,7 +30,7 @@ class StatusBeaconTestCase(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
opts = salt.config.DEFAULT_MINION_OPTS
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts['grains'] = salt.loader.grains(opts)
module_globals = {
'__opts__': opts,

View file

@ -9,6 +9,7 @@ import time
# Salt libs
import salt.utils.files
import salt.utils.platform
from salt.beacons import watchdog
from salt.ext.six.moves import range
@ -40,6 +41,7 @@ def create(path, content=None):
@skipIf(not watchdog.HAS_WATCHDOG, 'watchdog is not available')
@skipIf(salt.utils.platform.is_darwin(), 'Tests were being skipped pre macos under nox. Keep it like that for now.')
class IWatchdogBeaconTestCase(TestCase, LoaderModuleMockMixin):
'''
Test case for salt.beacons.watchdog

View file

@ -30,7 +30,7 @@ class EngineSlackTestCase(TestCase, LoaderModuleMockMixin):
return {slack: {}}
def setUp(self):
mock_opts = salt.config.DEFAULT_MINION_OPTS
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
token = 'xoxb-xxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxx'
with patch.dict(slack.__opts__, mock_opts):

View file

@ -173,7 +173,7 @@ class BotoApiGatewayTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'])

View file

@ -102,7 +102,7 @@ class BotoCloudTrailTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -91,7 +91,7 @@ class BotoCloudWatchEventTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -116,7 +116,7 @@ class BotoCognitoIdentityTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -88,7 +88,7 @@ class BotoElasticsearchDomainTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = salt.config.DEFAULT_MINION_OPTS
self.opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
self.opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -104,7 +104,7 @@ class BotoElbTestCase(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
opts = salt.config.DEFAULT_MASTER_OPTS
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto', 'args', 'systemd', 'path', 'platform'])

View file

@ -127,7 +127,7 @@ class BotoIoTTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -122,7 +122,7 @@ class BotoLambdaTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -76,7 +76,7 @@ class BotoRoute53TestCase(TestCase, LoaderModuleMockMixin):
TestCase for salt.modules.boto_route53 module
'''
def setup_loader_modules(self):
self.opts = salt.config.DEFAULT_MINION_OPTS
self.opts = salt.config.DEFAULT_MINION_OPTS.copy()
self.opts['route53.keyid'] = 'GKTADJGHEIQSXMKKRBJ08H'
self.opts['route53.key'] = 'askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs'
utils = salt.loader.utils(self.opts)

View file

@ -205,7 +205,7 @@ class BotoS3BucketTestCaseBase(TestCase, LoaderModuleMockMixin):
conn = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto3', 'args', 'systemd', 'path', 'platform'],

View file

@ -95,7 +95,7 @@ class BotoSecgroupTestCase(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
opts = salt.config.DEFAULT_MASTER_OPTS
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto', 'args', 'systemd', 'path', 'platform'])

View file

@ -140,7 +140,7 @@ class BotoVpcTestCaseBase(TestCase, LoaderModuleMockMixin):
conn3 = None
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['boto', 'boto3', 'args', 'systemd', 'path', 'platform'])

View file

@ -45,7 +45,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
utils = salt.loader.utils(
salt.config.DEFAULT_MINION_OPTS,
salt.config.DEFAULT_MINION_OPTS.copy(),
whitelist=['args', 'docker', 'json', 'state', 'thin',
'systemd', 'path', 'platform']
)

View file

@ -30,7 +30,7 @@ class NaclTest(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
self.unencrypted_data = salt.utils.stringutils.to_bytes('hello')
self.opts = salt.config.DEFAULT_MINION_OPTS
self.opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(self.opts)
funcs = salt.loader.minion_mods(self.opts, utils=utils, whitelist=['nacl'])

View file

@ -27,7 +27,7 @@ class IpsTestCase(TestCase, LoaderModuleMockMixin):
Test cases for salt.modules.solarisips
'''
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['pkg', 'path', 'platform'])

View file

@ -344,7 +344,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
utils = salt.loader.utils(
salt.config.DEFAULT_MINION_OPTS,
salt.config.DEFAULT_MINION_OPTS.copy(),
whitelist=['state', 'args', 'systemd', 'path', 'platform']
)
utils.keys()

View file

@ -42,7 +42,7 @@ class ZfsTestCase(TestCase, LoaderModuleMockMixin):
This class contains a set of functions that test salt.modules.zfs module
'''
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['zfs', 'args', 'systemd', 'path', 'platform'])

View file

@ -43,7 +43,7 @@ class ZpoolTestCase(TestCase, LoaderModuleMockMixin):
This class contains a set of functions that test salt.modules.zpool module
'''
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
self.opts = opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
opts,
whitelist=['zfs', 'args', 'systemd', 'path', 'platform'])

View file

@ -397,7 +397,7 @@ class BotoApiGatewayStateTestCaseBase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.opts['grains'] = salt.loader.grains(cls.opts)
@classmethod

View file

@ -36,7 +36,7 @@ class BotoCloudfrontTestCase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.name = 'my_distribution'
cls.base_ret = {'name': cls.name, 'changes': {}}

View file

@ -128,7 +128,7 @@ class BotoCloudTrailStateTestCaseBase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.opts['grains'] = salt.loader.grains(cls.opts)
@classmethod

View file

@ -105,7 +105,7 @@ class BotoCloudWatchEventStateTestCaseBase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.opts['grains'] = salt.loader.grains(cls.opts)
@classmethod

View file

@ -150,7 +150,7 @@ class BotoCognitoIdentityStateTestCaseBase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.opts['grains'] = salt.loader.grains(cls.opts)
@classmethod

View file

@ -108,7 +108,7 @@ class BotoElasticsearchDomainStateTestCaseBase(TestCase, LoaderModuleMockMixin):
@classmethod
def setUpClass(cls):
cls.opts = salt.config.DEFAULT_MINION_OPTS
cls.opts = salt.config.DEFAULT_MINION_OPTS.copy()
cls.opts['grains'] = salt.loader.grains(cls.opts)
@classmethod

Some files were not shown because too many files have changed in this diff Show more