Merge branch 'master' into 2019_2_1_port_52365

This commit is contained in:
Daniel Wozniak 2019-10-24 09:30:43 -07:00 committed by GitHub
commit 06bcd4af96
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
378 changed files with 14746 additions and 29889 deletions

View file

@ -2,7 +2,7 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 2
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),

159
.ci/kitchen-amazon1-py2 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '1'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -10,7 +10,7 @@ def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

159
.ci/kitchen-archlts-py2 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

159
.ci/kitchen-archlts-py3 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -10,7 +10,7 @@ def distro_name = 'centos'
def distro_version = '6'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'TCP'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'TCP'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

159
.ci/kitchen-debian10-py3 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '10'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -10,7 +10,7 @@ def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

159
.ci/kitchen-fedora30-py2 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

159
.ci/kitchen-fedora30-py3 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -0,0 +1,146 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = ''
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -0,0 +1,146 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = ''
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

159
.ci/kitchen-opensuse15-py2 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

159
.ci/kitchen-opensuse15-py3 Normal file
View file

@ -0,0 +1,159 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'TCP'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'TCP'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -65,7 +65,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
@ -76,74 +77,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -76,74 +76,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -10,7 +10,7 @@ def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -76,74 +76,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

158
.ci/kitchen-windows2019-py2 Normal file
View file

@ -0,0 +1,158 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,18 +1,18 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def nox_passthrough_opts = '--unit'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
@ -42,8 +42,8 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin',
'RBENV_VERSION=2.4.2',
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
@ -76,74 +76,77 @@ wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}" || true
fi
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}

View file

@ -2,7 +2,7 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 3
def salt_target_branch = '2019.2.1'
def salt_target_branch = 'master'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
@ -46,9 +46,10 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
eval "$(pyenv init -)"
pyenv --version
pyenv install --skip-existing 2.7.15
pyenv shell 2.7.15
pyenv install --skip-existing 3.6.8
pyenv shell 3.6.8 2.7.15
python --version
pip install -U nox-py2
pip3 install -U nox-py2
nox --version
# Create the required virtualenvs in serial
nox --install-only -e lint-salt
@ -69,7 +70,7 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
if (readFile('file-list-changed.log') =~ /(?i)(^|\n)(salt\/.*\.py|setup\.py)\n/) {
sh shell_header + '''
eval "$(pyenv init - --no-rehash)"
pyenv shell 2.7.15
pyenv shell 3.6.8 2.7.15
EC=254
export PYLINT_REPORT=pylint-report-salt-chg.log
grep -Ei '^salt/.*\\.py$|^setup\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-salt --
@ -80,7 +81,7 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
// Always lint something so reporting doesn't fail
sh shell_header + '''
eval "$(pyenv init - --no-rehash)"
pyenv shell 2.7.15
pyenv shell 3.6.8 2.7.15
EC=254
export PYLINT_REPORT=pylint-report-salt-chg.log
nox -e lint-salt -- salt/ext/__init__.py
@ -95,7 +96,7 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
if (readFile('file-list-changed.log') =~ /(?i)(^|\n)tests\/.*\.py\n/) {
sh shell_header + '''
eval "$(pyenv init - --no-rehash)"
pyenv shell 2.7.15
pyenv shell 3.6.8 2.7.15
EC=254
export PYLINT_REPORT=pylint-report-tests-chg.log
grep -Ei '^tests/.*\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-tests --
@ -128,7 +129,7 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
stage('Lint Salt Full') {
sh shell_header + '''
eval "$(pyenv init - --no-rehash)"
pyenv shell 2.7.15
pyenv shell 3.6.8 2.7.15
EC=254
export PYLINT_REPORT=pylint-report-salt-full.log
nox -e lint-salt
@ -141,7 +142,7 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
stage('Lint Tests Full') {
sh shell_header + '''
eval "$(pyenv init - --no-rehash)"
pyenv shell 2.7.15
pyenv shell 3.6.8 2.7.15
EC=254
export PYLINT_REPORT=pylint-report-tests-full.log
nox -e lint-salt

View file

@ -3,7 +3,7 @@ codecov:
- drone.saltstack.com
- jenkinsci.saltstack.com
branch: 2019.2
branch: master
notify:
require_ci_to_pass: no

80
.github/CODEOWNERS vendored
View file

@ -8,72 +8,58 @@
# This file uses an fnmatch-style matching pattern.
# Team Boto
salt/*/*boto* @saltstack/team-boto
# Team Core
requirements/* @saltstack/team-core
rfcs/* @saltstack/team-core
salt/auth/* @saltstack/team-core
salt/cache/* @saltstack/team-core
salt/cli/* @saltstack/team-core
salt/client/* @saltstack/team-core
salt/config/* @saltstack/team-core
salt/daemons/* @saltstack/team-core
salt/pillar/* @saltstack/team-core
salt/loader.py @saltstack/team-core
salt/payload.py @saltstack/team-core
salt/master.py @saltstack/team-core
salt/*/master* @saltstack/team-core
salt/minion.py @saltstack/team-core
salt/*/minion* @saltstack/team-core
* @saltstack/team-core
# Team Boto
salt/*/*boto* @saltstack/team-boto @saltstack/team-core
# Team Cloud
salt/cloud/* @saltstack/team-cloud
salt/utils/openstack/* @saltstack/team-cloud
salt/utils/aws.py @saltstack/team-cloud
salt/*/*cloud* @saltstack/team-cloud
salt/cloud/* @saltstack/team-cloud @saltstack/team-core
salt/utils/openstack/* @saltstack/team-cloud @saltstack/team-core
salt/utils/aws.py @saltstack/team-cloud @saltstack/team-core
salt/*/*cloud* @saltstack/team-cloud @saltstack/team-core
# Team NetAPI
salt/cli/api.py @saltstack/team-netapi
salt/client/netapi.py @saltstack/team-netapi
salt/netapi/* @saltstack/team-netapi
salt/cli/api.py @saltstack/team-netapi @saltstack/team-core
salt/client/netapi.py @saltstack/team-netapi @saltstack/team-core
salt/netapi/* @saltstack/team-netapi @saltstack/team-core
# Team Network
salt/proxy/* @saltstack/team-proxy
salt/proxy/* @saltstack/team-proxy @saltstack/team-core
# Team SPM
salt/cli/spm.py @saltstack/team-spm
salt/spm/* @saltstack/team-spm
salt/cli/spm.py @saltstack/team-spm @saltstack/team-core
salt/spm/* @saltstack/team-spm @saltstack/team-core
# Team SSH
salt/cli/ssh.py @saltstack/team-ssh
salt/client/ssh/* @saltstack/team-ssh
salt/roster/* @saltstack/team-ssh
salt/runners/ssh.py @saltstack/team-ssh
salt/*/thin.py @saltstack/team-ssh
salt/cli/ssh.py @saltstack/team-ssh @saltstack/team-core
salt/client/ssh/* @saltstack/team-ssh @saltstack/team-core
salt/roster/* @saltstack/team-ssh @saltstack/team-core
salt/runners/ssh.py @saltstack/team-ssh @saltstack/team-core
salt/*/thin.py @saltstack/team-ssh @saltstack/team-core
# Team State
salt/state.py @saltstack/team-state
salt/state.py @saltstack/team-state @saltstack/team-core
# Team SUSE
salt/*/*btrfs* @saltstack/team-suse
salt/*/*kubernetes* @saltstack/team-suse
salt/*/*pkg* @saltstack/team-suse
salt/*/*snapper* @saltstack/team-suse
salt/*/*xfs* @saltstack/team-suse
salt/*/*zypper* @saltstack/team-suse
salt/*/*btrfs* @saltstack/team-suse @saltstack/team-core
salt/*/*kubernetes* @saltstack/team-suse @saltstack/team-core
salt/*/*pkg* @saltstack/team-suse @saltstack/team-core
salt/*/*snapper* @saltstack/team-suse @saltstack/team-core
salt/*/*xfs* @saltstack/team-suse @saltstack/team-core
salt/*/*zypper* @saltstack/team-suse @saltstack/team-core
# Team Transport
salt/transport/* @saltstack/team-transport
salt/utils/zeromq.py @saltstack/team-transport
salt/transport/* @saltstack/team-transport @saltstack/team-core
salt/utils/zeromq.py @saltstack/team-transport @saltstack/team-core
# Team Windows
salt/*/*win* @saltstack/team-windows
salt/modules/reg.py @saltstack/team-windows
salt/states/reg.py @saltstack/team-windows
tests/*/*win* @saltstack/team-windows
tests/*/test_reg.py @saltstack/team-windows
salt/*/*win* @saltstack/team-windows @saltstack/team-core
salt/modules/reg.py @saltstack/team-windows @saltstack/team-core
salt/states/reg.py @saltstack/team-windows @saltstack/team-core
tests/*/*win* @saltstack/team-windows @saltstack/team-core
tests/*/test_reg.py @saltstack/team-windows @saltstack/team-core
# Jenkins Integration
.ci/* @saltstack/saltstack-sre-team @saltstack/team-core

View file

@ -9,6 +9,8 @@ Remove this section if not relevant
Remove this section if not relevant
### Tests written?
**[NOTICE] Bug fixes or features added to Salt require tests.**
Please review the [test documentation](https://docs.saltstack.com/en/latest/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite.
Yes/No

View file

@ -39,7 +39,7 @@ provisioner:
max_retries: 2
remote_states:
name: git://github.com/saltstack/salt-jenkins.git
branch: 2019.2
branch: master
repo: git
testingdir: /testing
salt_copy_filter:

View file

@ -6,13 +6,12 @@ repos:
- id: pip-tools-compile
alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=2.7
- --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -25,7 +24,6 @@ repos:
- -v
- --py-version=2.7
- --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
@ -41,58 +39,12 @@ repos:
- -v
- --py-version=2.7
- --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py2.7-raet-requirements
name: Linux Py2.7 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=2.7
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py2.7-raet-requirements
name: OSX Py2.7 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=2.7
- --out-prefix=raet
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py2.7-raet-requirements
name: Windows Py2.7 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=2.7
- --out-prefix=raet
- --platform=windows
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-lint-py2.7-requirements
@ -115,32 +67,16 @@ repos:
- id: pip-tools-compile
alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.4
- --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.4-raet-requirements
name: Linux Py3.4 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.4
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-cloud-py3.4-requirements
@ -153,13 +89,12 @@ repos:
- id: pip-tools-compile
alias: compile-linux-py3.5-zmq-requirements
name: Linux Py3.5 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.5
- --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -172,7 +107,6 @@ repos:
- -v
- --py-version=3.5
- --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
@ -188,61 +122,12 @@ repos:
- -v
- --py-version=3.5
- --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.5-raet-requirements
name: Linux Py3.5 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.5
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.5-raet-requirements
name: OSX Py3.5 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.5
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.5-raet-requirements
name: Windows Py3.5 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.5
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-cloud-py3.5-requirements
@ -264,13 +149,12 @@ repos:
- id: pip-tools-compile
alias: compile-linux-py3.6-zmq-requirements
name: Linux Py3.6 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.6
- --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -283,7 +167,6 @@ repos:
- -v
- --py-version=3.6
- --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
@ -299,61 +182,12 @@ repos:
- -v
- --py-version=3.6
- --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.6-raet-requirements
name: Linux Py3.6 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.6
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.6-raet-requirements
name: OSX Py3.6 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.6
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.6-raet-requirements
name: Windows Py3.6 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.6
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-cloud-py3.6-requirements
@ -375,13 +209,12 @@ repos:
- id: pip-tools-compile
alias: compile-linux-py3.7-zmq-requirements
name: Linux Py3.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.7
- --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
@ -394,7 +227,6 @@ repos:
- -v
- --py-version=3.7
- --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
@ -410,61 +242,12 @@ repos:
- -v
- --py-version=3.7
- --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.7-raet-requirements
name: Linux Py3.7 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.7
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.7-raet-requirements
name: OSX Py3.7 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.7
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.7-raet-requirements
name: Windows Py3.7 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.7
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-cloud-py3.7-requirements

54
CHANGELOG.md Normal file
View file

@ -0,0 +1,54 @@
# Changelog
All notable changes to Salt will be documented in this file.
This changelog follows [keepachangelog](https://keepachangelog.com/en/1.0.0/) format, and is intended for human consumption.
This project versioning is *similar* to [Semantic Versioning](https://semver.org), and is documented in [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20/files).
Versions are `MAJOR.PATCH`.
## Unreleased (Neon)
### Removed
- [#54943](https://github.com/saltstack/salt/pull/54943) - RAET transport method has been removed per the deprecation schedule - [@s0undt3ch](https://github.com/s0undt3ch)
### Deprecated
### Changed
- [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20) - Changed to numeric versions.
- [SEP 1](https://github.com/saltstack/salt-enhancement-proposals/blob/master/accepted/0001-changelog-format.md), [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20) - Adopted keepachangelog format.
### Fixed
### Added
---
## [2019.2.2]
### Changed
- [#54758](https://github.com/saltstack/salt/issues/54758) - Missing sls file during `state.show_states` displays message instead of failing - [@Ch3LL](https://github.com/Ch3LL)
### Fixed
- [#54521](https://github.com/saltstack/salt/issues/54521) - `failhard` during orchestration now fails as expected - [@mattp-](https://github.com/mattp-) / [@Oloremo](https://github.com/Oloremo)
- [#54741](https://github.com/saltstack/salt/issues/54741) - `schedule.run_job` without time element now works as expected - [@garethgreenaway](https://github.com/garethgreenaway)
- [#54755](https://github.com/saltstack/salt/issues/54755) - Pip state ensures pip was imported before trying to remove - [@dwoz](https://github.com/dwoz)
- [#54760](https://github.com/saltstack/salt/issues/54760) - Fix `salt-cloud -Q` for OpenStack driver - [@vdloo](https://github.com/vdloo) / [@Akm0d](https://github.com/Akm0d)
- [#54762](https://github.com/saltstack/salt/issues/54762) - IPv6 addresses with brackets no longer break master/minion communication - [@dhiltonp](https://github.com/dhiltonp)
- [#54765](https://github.com/saltstack/salt/issues/54765) - Masterless jinja imports - [@dwoz](https://github.com/dwoz)
- [#54776](https://github.com/saltstack/salt/issues/54776) - `ping_interval` in minion config no longer prevents startup - [@dwoz](https://github.com/dwoz)
- [#54820](https://github.com/saltstack/salt/issues/54820) - `scheduler.present` no longer always reports changes when scheduler is disabled - [@garethgreenaway](https://github.com/garethgreenaway)
- [#54941](https://github.com/saltstack/salt/issues/54941) - Pillar data is no longer refreshed on every call - [@dwoz](https://github.com/dwoz)
### Added
- [#54919](https://github.com/saltstack/salt/pull/54919) - Added missing `win_wusa` state and module docs - [@twangboy](https://github.com/twangboy)
## [2019.2.1] - 2019-09-25 [YANKED]
- See [old release notes](https://docs.saltstack.com/en/latest/topics/releases/2019.2.1.html)

View file

@ -4,7 +4,7 @@ source 'https://rubygems.org'
# Point this back at the test-kitchen package after >1.2.5 is relased
gem 'test-kitchen', :git => 'https://github.com/dwoz/test-kitchen.git', :branch => 'keepalive_maxcount'
gem 'kitchen-salt', :git => 'https://github.com/s0undt3ch/kitchen-salt.git', :branch => 'features/nox'
gem 'kitchen-salt', :git => 'https://github.com/saltstack/kitchen-salt.git'
gem 'kitchen-sync'
gem 'git'

View file

@ -155,15 +155,6 @@ ZeroMQ Transport:
pip install -e .
RAET Transport:
.. code-block:: bash
pip install -r requirements/raet.txt
pip install psutil
pip install -e .
Running a self-contained development version
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View file

@ -60,7 +60,6 @@ services`_ offerings.
* Facebook - `<https://www.facebook.com/SaltStack/>`_
* LinkedIn - `<https://www.linkedin.com/company/salt-stack-inc>`_
* LinkedIn Group - `<https://www.linkedin.com/groups/4877160>`_
* Google+ - `<https://plus.google.com/b/112856352920437801867/+SaltStackInc/posts>`_
.. _global community: http://www.meetup.com/pro/saltstack/
.. _SaltConf: http://saltconf.com/

View file

@ -4112,12 +4112,6 @@ source_file = _build/locale/topics/development/hacking.pot
source_lang = en
source_name = topics/development/hacking.rst
[salt.topics--development--raet--index]
file_filter = locale/<lang>/LC_MESSAGES/topics/development/raet/index.po
source_file = _build/locale/topics/development/raet/index.pot
source_lang = en
source_name = topics/development/raet/index.rst
[salt.topics--development--salt_projects]
file_filter = locale/<lang>/LC_MESSAGES/topics/development/salt_projects.po
source_file = _build/locale/topics/development/salt_projects.pot
@ -4184,18 +4178,6 @@ source_file = _build/locale/topics/topology/syndic.pot
source_lang = en
source_name = topics/topology/syndic.rst
[salt.topics--transports--raet--index]
file_filter = locale/<lang>/LC_MESSAGES/topics/transports/raet/index.po
source_file = _build/locale/topics/transports/raet/index.pot
source_lang = en
source_name = topics/transports/raet/index.rst
[salt.topics--transports--raet--programming_intro]
file_filter = locale/<lang>/LC_MESSAGES/topics/transports/raet/programming_intro.po
source_file = _build/locale/topics/transports/raet/programming_intro.pot
source_lang = en
source_name = topics/transports/raet/programming_intro.rst
[salt.topics--tutorials--states_pt5]
file_filter = locale/<lang>/LC_MESSAGES/topics/tutorials/states_pt5.po
source_file = _build/locale/topics/tutorials/states_pt5.pot
@ -4652,12 +4634,6 @@ source_file = _build/locale/ref/modules/all/salt.modules.pyenv.pot
source_lang = en
source_name = ref/modules/all/salt.modules.pyenv.rst
[salt.ref--modules--all--salt_modules_raet_publish]
file_filter = locale/<lang>/LC_MESSAGES/ref/modules/all/salt.modules.raet_publish.po
source_file = _build/locale/ref/modules/all/salt.modules.raet_publish.pot
source_lang = en
source_name = ref/modules/all/salt.modules.raet_publish.rst
[salt.ref--modules--all--salt_modules_schedule]
file_filter = locale/<lang>/LC_MESSAGES/ref/modules/all/salt.modules.schedule.po
source_file = _build/locale/ref/modules/all/salt.modules.schedule.pot

View file

@ -147,8 +147,7 @@ pdf: check_sphinx-build translations
fi
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through xelatex..."
perl -pi -e 's!pdflatex!xelatex!' $(BUILDDIR)/latex/Makefile
$(MAKE) -C $(BUILDDIR)/latex -i LATEXOPTS="-interaction=nonstopmode"
$(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet"
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
cheatsheet: translations

View file

@ -10,7 +10,7 @@ from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.domains.python import PyObject
from sphinx.locale import l_, _
from sphinx.locale import _
from sphinx.roles import XRefRole
from sphinx.util.nodes import make_refnode
from sphinx.util.nodes import nested_parse_with_titles
@ -240,8 +240,8 @@ class SLSXRefRole(XRefRole):
class SaltModuleIndex(python_domain.PythonModuleIndex):
name = 'modindex'
localname = l_('Salt Module Index')
shortname = l_('all salt modules')
localname = _('Salt Module Index')
shortname = _('all salt modules')
class SaltDomain(python_domain.PythonDomain):
@ -251,7 +251,7 @@ class SaltDomain(python_domain.PythonDomain):
object_types = python_domain.PythonDomain.object_types
object_types.update({
'state': ObjType(l_('state'), 'state'),
'state': ObjType(_('state'), 'state'),
})
directives = python_domain.PythonDomain.directives
@ -290,7 +290,7 @@ class SaltDomain(python_domain.PythonDomain):
type, target, node, contnode)
# Monkey-patch the Python domain remove the python module index
python_domain.PythonDomain.indices = []
python_domain.PythonDomain.indices = [SaltModuleIndex]
def setup(app):

View file

@ -269,7 +269,7 @@
<!--
<a href="https://saltstack.com/saltstack-enterprise/" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/enterprise_ad.jpg', 1) }}"/></a>
-->
<a href="http://saltconf.com/saltconf18-speakers/" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/DOCBANNER.png', 1) }}"/></a>
<a href="https://saltconf.com/ad" target="_blank"><img class="nolightbox footer-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-ad-330x330.png"/></a>
</div>
{% endif %}
</div>
@ -291,6 +291,7 @@
{% else %}
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
{% endif %}-->
<a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-menu-ad-250x63.jpg"/></a>
{% if build_type=="next" %}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 767 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View file

@ -255,7 +255,7 @@ on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
project = 'Salt'
version = salt.version.__version__
latest_release = '2019.2.0' # latest release
latest_release = '2019.2.2' # latest release
previous_release = '2018.3.4' # latest release from previous branch
previous_release_dir = '2018.3' # path on web server for previous branch
next_release = '' # next release
@ -387,7 +387,7 @@ html_short_title = 'Salt'
html_static_path = ['_static']
html_logo = None # specified in the theme layout.html
html_favicon = 'favicon.ico'
html_use_smartypants = False
smartquotes = False
# Use Google customized search or use Sphinx built-in JavaScript search
if on_saltstack:

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-API" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-API" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-api \- salt-api Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CALL" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-CALL" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-call \- salt-call Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CLOUD" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-CLOUD" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-cloud \- Salt Cloud Command
.
@ -205,21 +205,21 @@ the provider\(aqs associated profiles, such as \fBdigitalocean\fP, or pass in
.TP
.B \-\-list\-locations=LIST_LOCATIONS
Display a list of locations available in configured cloud providers. Pass
the cloud provider that available locations are desired on, aka "linode",
the cloud provider that available locations are desired on, such as "linode",
or pass "all" to list locations for all configured cloud providers
.UNINDENT
.INDENT 0.0
.TP
.B \-\-list\-images=LIST_IMAGES
Display a list of images available in configured cloud providers. Pass the
cloud provider that available images are desired on, aka "linode", or pass
cloud provider that available images are desired on, such as "linode", or pass
"all" to list images for all configured cloud providers
.UNINDENT
.INDENT 0.0
.TP
.B \-\-list\-sizes=LIST_SIZES
Display a list of sizes available in configured cloud providers. Pass the
cloud provider that available sizes are desired on, aka "AWS", or pass
cloud provider that available sizes are desired on, such as "AWS", or pass
"all" to list sizes for all configured cloud providers
.UNINDENT
.SS Cloud Credentials

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CP" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-CP" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-cp \- salt-cp Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-KEY" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-KEY" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-key \- salt-key Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MASTER" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-MASTER" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-master \- salt-master Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MINION" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-MINION" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-minion \- salt-minion Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-PROXY" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-PROXY" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-proxy \- salt-proxy Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-RUN" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-RUN" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-run \- salt-run Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SSH" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-SSH" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-ssh \- salt-ssh Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SYNDIC" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-SYNDIC" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-syndic \- salt-syndic Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-UNITY" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT-UNITY" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt-unity \- salt-unity Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SALT" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
salt \- salt
.

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SPM" "1" "Sep 05, 2019" "2019.2.1" "Salt"
.TH "SPM" "1" "Oct 02, 2019" "2019.2.2" "Salt"
.SH NAME
spm \- Salt Package Manager Command
.

View file

@ -1,7 +1,7 @@
.. _all-salt.cache:
=============
Cache Modules
cache modules
=============
.. currentmodule:: salt.cache

View file

@ -146,19 +146,19 @@ Cloud Providers Listings
.. option:: --list-locations=LIST_LOCATIONS
Display a list of locations available in configured cloud providers. Pass
the cloud provider that available locations are desired on, aka "linode",
the cloud provider that available locations are desired on, such as "linode",
or pass "all" to list locations for all configured cloud providers
.. option:: --list-images=LIST_IMAGES
Display a list of images available in configured cloud providers. Pass the
cloud provider that available images are desired on, aka "linode", or pass
cloud provider that available images are desired on, such as "linode", or pass
"all" to list images for all configured cloud providers
.. option:: --list-sizes=LIST_SIZES
Display a list of sizes available in configured cloud providers. Pass the
cloud provider that available sizes are desired on, aka "AWS", or pass
cloud provider that available sizes are desired on, such as "AWS", or pass
"all" to list sizes for all configured cloud providers
Cloud Credentials

View file

@ -866,9 +866,8 @@ Default: ``zeromq``
Changes the underlying transport layer. ZeroMQ is the recommended transport
while additional transport layers are under development. Supported values are
``zeromq``, ``raet`` (experimental), and ``tcp`` (experimental). This setting has
a significant impact on performance and should not be changed unless you know
what you are doing!
``zeromq`` and ``tcp`` (experimental). This setting has a significant impact on
performance and should not be changed unless you know what you are doing!
.. code-block:: yaml

View file

@ -1367,9 +1367,8 @@ Default: ``zeromq``
Changes the underlying transport layer. ZeroMQ is the recommended transport
while additional transport layers are under development. Supported values are
``zeromq``, ``raet`` (experimental), and ``tcp`` (experimental). This setting has
a significant impact on performance and should not be changed unless you know
what you are doing!
``zeromq`` and ``tcp`` (experimental). This setting has a significant impact
on performance and should not be changed unless you know what you are doing!
.. code-block:: yaml

View file

@ -362,7 +362,6 @@ execution modules
qemu_nbd
quota
rabbitmq
raet_publish
rallydev
random_org
rbac_solaris
@ -505,6 +504,7 @@ execution modules
win_timezone
win_useradd
win_wua
win_wusa
x509
xapi_virt
xbpspkg

View file

@ -1,6 +0,0 @@
=========================
salt.modules.raet_publish
=========================
.. automodule:: salt.modules.raet_publish
:members:

View file

@ -0,0 +1,6 @@
=====================
salt.modules.win_wusa
=====================
.. automodule:: salt.modules.win_wusa
:members:

View file

@ -14,5 +14,6 @@ serializer modules
json
msgpack
python
toml
yaml
yamlex

View file

@ -0,0 +1,6 @@
=====================
salt.serializers.toml
=====================
.. automodule:: salt.serializers.toml
:members:

View file

@ -311,6 +311,7 @@ state modules
win_snmp
win_system
win_wua
win_wusa
winrepo
x509
xmpp

View file

@ -0,0 +1,5 @@
salt.states.win_wusa module
===========================
.. automodule:: salt.states.win_wusa
:members:

View file

@ -5,10 +5,10 @@ Ordering States
===============
The way in which configuration management systems are executed is a hotly
debated topic in the configuration management world. Two
major philosophies exist on the subject, to either execute in an imperative
fashion where things are executed in the order in which they are defined, or
in a declarative fashion where dependencies need to be mapped between objects.
debated topic in the configuration management world. Two major philosophies
exist on the subject, to either execute in an imperative fashion where things
are executed in the order in which they are defined, or in a declarative
fashion where dependencies need to be mapped between objects.
Imperative ordering is finite and generally considered easier to write, but
declarative ordering is much more powerful and flexible but generally considered
@ -27,20 +27,17 @@ State Auto Ordering
.. versionadded: 0.17.0
Salt always executes states in a finite manner, meaning that they will always
execute in the same order regardless of the system that is executing them.
But in Salt 0.17.0, the ``state_auto_order`` option was added. This option
makes states get evaluated in the order in which they are defined in sls
files, including the top.sls file.
execute in the same order regardless of the system that is executing them. This
evaluation order makes it easy to know what order the states will be executed in,
but it is important to note that the requisite system will override the ordering
defined in the files, and the ``order`` option, described below, will also
override the order in which states are executed.
The evaluation order makes it easy to know what order the states will be
executed in, but it is important to note that the requisite system will
override the ordering defined in the files, and the ``order`` option described
below will also override the order in which states are defined in sls files.
If the classic ordering is preferred (lexicographic), then set
``state_auto_order`` to ``False`` in the master configuration file. Otherwise,
``state_auto_order`` defaults to ``True``.
This ordering system can be disabled in preference of lexicographic (classic)
ordering by setting the ``state_auto_order`` option to ``False`` in the master
configuration file. Otherwise, ``state_auto_order`` defaults to ``True``.
How compiler ordering is managed is described further in :ref:`compiler-ordering`.
.. _ordering_requisites:

View file

@ -10,7 +10,6 @@ Developing Salt
modules/index
extend/index
tests/*
raet/index
git/index
conventions/index
../../ref/internals/index

View file

@ -182,7 +182,7 @@ From above example, 'external_pillar' is the top dictionary name. Therefore:
.. code-block:: bash
salt-call '*' pillar.get external_pillar
salt '*' pillar.get external_pillar
You shouldn't just add items to ``pillar`` and return that, since that will

View file

@ -1,272 +0,0 @@
raet
====
# RAET
# Reliable Asynchronous Event Transport Protocol
.. seealso:: :ref:`RAET Overview <raet>`
Protocol
--------
Layering:
OSI Layers
7: Application: Format: Data (Stack to Application interface buffering etc)
6: Presentation: Format: Data (Encrypt-Decrypt convert to machine independent format)
5: Session: Format: Data (Interhost communications. Authentication. Groups)
4: Transport: Format: Segments (Reliable delivery of Message, Transactions, Segmentation, Error checking)
3: Network: Format: Packets/Datagrams (Addressing Routing)
2: Link: Format: Frames ( Reliable per frame communications connection, Media access controller )
1: Physical: Bits (Transceiver communication connection not reliable)
Link is hidden from Raet
Network is IP host address and Udp Port
Transport is Raet transactions, service kind, tail error checking,
Could include header signing as part of transport reliable delivery serialization of header
Session is session id key exchange for signing. Grouping is Road (like 852 channel)
Presentation is Encrypt Decrypt body Serialize Deserialize Body
Application is body data dictionary
Header signing spans both the Transport and Session layers.
Header
------
JSON Header (Tradeoff some processing speed for extensibility, ease of use, readability)
Body initially JSON but support for "packed" binary body
Packet
------
Header ASCII Safe JSON
Header termination:
Empty line given by double pair of carriage return linefeed
/r/n/r/n
10 13 10 13
ADAD
1010 1101 1010 1101
In json carriage return and newline characters cannot appear in a json encoded
string unless they are escaped with backslash, so the 4 byte combination is illegal in valid
json that does not have multi-byte unicode characters.
These means the header must be ascii safe so no multibyte utf-8 strings
allowed in header.
Following Header Terminator is variable length signature block. This is binary
and the length is provided in the header.
Following the signature block is the packet body or data.
This may either be JSON or packed binary.
The format is given in the json header
Finally is an optional tail block for error checking or encryption details
Header Fields
-------------
In UDP header
sh = source host
sp = source port
dh = destination host
dp = destination port
In RAET Header
hk = header kind
hl = header length
vn = version number
sd = Source Device ID
dd = Destination Device ID
cf = Corresponder Flag
mf = Multicast Flag
si = Session ID
ti = Transaction ID
sk = Service Kind
pk = Packet Kind
bf = Burst Flag (Send all Segments or Ordered packets without interleaved acks)
oi = Order Index
dt = DateTime Stamp
sn = Segment Number
sc = Segment Count
pf = Pending Segment Flag
af = All Flag (Resent all Segments not just one)
nk = Auth header kind
nl = Auth header length
bk = body kind
bl = body length
tk = tail kind
tl = tail length
fg = flags packed (Flags) Default '00' hex string
2 byte Hex string with bits (0, 0, af, pf, 0, bf, mf, cf)
Zeros are TBD flags
Session Bootstrap
-----------------
Minion sends packet with SID of Zero with public key of minions Public Private Key pair
Master acks packet with SID of Zero to let minion know it received the request
Some time later Master sends packet with SID of zero that accepts the Minion
Minion
Session
-------
Session is important for security. Want one session opened and then multiple
transactions within session.
Session ID
SID
sid
GUID hash to guarantee uniqueness since no guarantee of nonvolatile storage
or require file storage to keep last session ID used.
Service Types or Modular Services
---------------------------------
Four Service Types
A) One or more maybe (unacknowledged repeat) maybe means no guarantee
B) Exactly one at most (ack with retries) (duplicate detection idempotent)
at most means fixed number of retries has finite probability of failing
B1) finite retries
B2) infinite retries with exponential back-off up to a maximum delay
C) Exactly one of sequence at most (sequence numbered)
Receiver requests retry of missing packet with same B1 or B2 retry type
D) End to End (Application layer Request Response)
This is two B sub transactions
Initially unicast messaging
Eventually support for Multicast
The use case for C) is to fragment large packets as once a UDP packet
exceeds the frame size its reliability goes way down
So its more reliable to fragment large packets.
Better approach might be to have more modularity.
Services Levels
1) Maybe one or more
A) Fire and forget
no transaction either side
B) Repeat, no ack, no dupdet
repeat counter send side,
no transaction on receive side
C) Repeat, no Ack, dupdet
repeat counter send side,
dup detection transaction receive side
2) More or Less Once
A) retry finite, ack no dupdet
retry timer send side, finite number of retires
ack receive side no dupdet
3) At most Once
A) retry finite, ack, dupdet
retry timer send side, finite number of retires
ack receive side dupdet
4) Exactly once
A) ack retry
retry timer send side,
ack and duplicate detection receive side
Infinite retries with exponential backoff
5) Sequential sequence number
A) reorder escrow
B) Segmented packets
6) request response to application layer
Service Features
1) repeats
2) ack retry transaction id
3) sequence number duplicate detection out of order detection sequencing
4) rep-req
Always include transaction id since multiple transactions on same port
So get duplicate detection for free if keep transaction alive but if use
A) Maybe one or more
B1) At Least One
B2) Exactly One
C) One of sequence
D) End to End
A) Sender creates transaction id for number of repeats but receiver does not
keep transaction alive
B1) Sender creates transaction id keeps it for retries.
Receiver keeps it to send ack then kills so retry could be duplicate not detected
B2) Sender creates transaction id keeps for retries
Receiver keeps tid for acks on any retires so no duplicates.
C) Sender creates TID and Sequence Number.
Receiver checks for out of order sequence and can request retry.
D) Application layer sends response. So question is do we keep transaction open
or have response be new transaction. No because then we need a rep-req ID so
might as well use the same transaction id. Just keep alive until get response.
Little advantage to B1 vs B2 not having duplicates.
So 4 service types
A) Maybe one or more (unacknowledged repeat)
B) Exactly One (At most one) (ack with retry) (duplicate detection idempotent)
C) One of Sequence (sequence numbered)
D) End to End
Also multicast or unicast
Modular Transaction Table
Sender Side:
Transaction ID plus transaction source sender or receiver generated transaction id
Repeat Counter
Retry Timer Retry Counter (finite retries)
Redo Timer (infinite redos with exponential backoff)
Sequence number without acks (look for resend requests)
Sequence with ack (wait for ack before sending next in sequence)
Segmentation
Receiver Side:
Nothing just accept packet
Acknowledge (can delete transaction after acknowledge)
No duplicate detection
Transaction timeout (keep transaction until timeout)
Duplicate detection save transaction id duplicate detection timeout
Request resend of missing packet in sequence
Sequence reordering with escrow timeout wait escrow before requesting resend
Unsegmentation (request resends of missing segment)

View file

@ -97,27 +97,20 @@ Salt should run on any Unix-like platform so long as the dependencies are met.
* `Tornado`_ - Web framework and asynchronous networking library
* `futures`_ - Python2 only dependency. Backport of the concurrent.futures package from Python 3.2
Depending on the chosen Salt transport, `ZeroMQ`_ or `RAET`_, dependencies
vary:
* ZeroMQ:
* `ZeroMQ`_ >= 3.2.0
* `pyzmq`_ >= 2.2.0 - ZeroMQ Python bindings
* `PyCrypto`_ - The Python cryptography toolkit
* RAET:
* `libnacl`_ - Python bindings to `libsodium`_
* `ioflo`_ - The flo programming interface raet and salt-raet is built on
* `RAET`_ - The worlds most awesome UDP protocol
Salt defaults to the `ZeroMQ`_ transport, and the choice can be made at install
time, for example:
Salt defaults to the `ZeroMQ`_ transport. The ``--salt-transport`` installation
option is available, but currently only supports the ``szeromq`` option. This
may be expanded in the future.
.. code-block:: bash
python setup.py --salt-transport=raet install
python setup.py --salt-transport=zeromq install
This way, only the required dependencies are pulled by the setup script if need
be.
@ -127,7 +120,7 @@ provided like:
.. code-block:: bash
pip install --install-option="--salt-transport=raet" salt
pip install --install-option="--salt-transport=zeromq" salt
.. note::
Salt does not bundle dependencies that are typically distributed as part of
@ -156,10 +149,6 @@ Optional Dependencies
.. _`apache-libcloud`: http://libcloud.apache.org
.. _`Requests`: http://docs.python-requests.org/en/latest
.. _`Tornado`: http://www.tornadoweb.org/en/stable/
.. _`libnacl`: https://github.com/saltstack/libnacl
.. _`ioflo`: https://github.com/ioflo/ioflo
.. _`RAET`: https://github.com/saltstack/raet
.. _`libsodium`: https://github.com/jedisct1/libsodium
.. _`futures`: https://github.com/agronholm/pythonfutures

View file

@ -54,8 +54,6 @@ release.
Simply stated, users running Salt with RAET should expect some hiccups as we
hammer out the update. This is a BETA release of Salt RAET.
For information about how to use Salt with RAET please see the
:ref:`tutorial <raet>`.
Salt SSH Enhancements
=====================

View file

@ -4,6 +4,25 @@ Salt 2019.2.1 Release Notes
Version 2019.2.1 is a bugfix release for :ref:`2019.2.0 <release-2019-2-0>`.
Known Issues
============
* **ISSUE** `#54751`_: Proxy minion fails to start. Fix planned in 2019.2.2 release.
* **ISSUE** `#54762`_: URIs with IPv6 addresses are broken, preventing master-minion communication in IPv6-only environments. Fix planned in 2019.2.2 release.
* **ISSUE** `#54776`_: Minion fails to start when it contains `ping_interval` in minion configuration file. Fix planned in 2019.2.2 release.
* **ISSUE** `#54731`_: Returners (except for default salt master returner) not loading properly in Py3. Fix planned in 2019.2.2 release.
* **ISSUE** `#54758`_: salt-call state.show_states gives "Passed invalid arguments" error when a sls defined in top.sls file is missing. Fix planned in 2019.2.2 release.
* **ISSUE** `#54765`_: Jinja from import is broken. Fix planned in 2019.2.2 release.
* **ISSUE** `#54771`_: Affects only Debian 10. pkgrepo.managed does not work if used with proxy (configured at salt-minion). No fix date available yet.
* **ISSUE** `#54759`_: Deprecation warnings for pyzmq. No fix date available yet.
Change to YAML Renderer
=======================
@ -6779,6 +6798,14 @@ Changelog for v2019.2.0..v2019.2.1
.. _`#54429`: https://github.com/saltstack/salt/issues/54429
.. _`#54433`: https://github.com/saltstack/salt/pull/54433
.. _`#54434`: https://github.com/saltstack/salt/pull/54434
.. _`#54751`: https://github.com/saltstack/salt/issues/54751
.. _`#54762`: https://github.com/saltstack/salt/issues/54762
.. _`#54776`: https://github.com/saltstack/salt/issues/54776
.. _`#54731`: https://github.com/saltstack/salt/pull/54731
.. _`#54758`: https://github.com/saltstack/salt/issues/54758
.. _`#54765`: https://github.com/saltstack/salt/issues/54765
.. _`#54771`: https://github.com/saltstack/salt/issues/54771
.. _`#54759`: https://github.com/saltstack/salt/issues/54759
.. _`#5`: https://github.com/saltstack/salt/issues/5
.. _`#6`: https://github.com/saltstack/salt/issues/6
.. _`#7745`: https://github.com/saltstack/salt/issues/7745

View file

@ -0,0 +1,359 @@
===========================
Salt 2019.2.2 Release Notes
===========================
Version 2019.2.2 is a bugfix release for :ref:`2019.2.0 <release-2019-2-0>`.
Statistics
==========
- Total Merges: **26**
- Total Issue References: **12**
- Total PR References: **26**
- Contributors: **13** (`Akm0d`_, `Ch3LL`_, `Oloremo`_, `OrlandoArcapix`_, `bryceml`_, `dhiltonp`_, `dwoz`_, `frogunder`_, `garethgreenaway`_, `javierbertoli`_, `pizzapanther`_, `s0undt3ch`_, `twangboy`_)
Changelog for v2019.2.1..v2019.2.2
==================================
*Generated at: 2019-10-11 20:54:15 UTC*
* **PR** `#54919`_: (`twangboy`_) Add missing docs for win_wusa state and module (2019.2.1)
@ *2019-10-11 18:28:11 UTC*
* 7d253bc Merge pull request `#54919`_ from twangboy/update_docs
* 57ff199 Add docs for win_wusa
* **ISSUE** `#54941`_: (`UtahDave`_) Pillar data is refreshed for EVERY salt command in 2019.2.1 and 2019.2.2 (refs: `#54942`_)
* **PR** `#54942`_: (`dwoz`_) Fix for 54941 pillar_refresh regression
@ *2019-10-11 18:27:31 UTC*
* 2f817bc Merge pull request `#54942`_ from dwoz/fix-54941
* cb5d326 Add a test for 54941 using test.ping
* 348d1c4 Add regression tests for issue 54941
* 766f3ca Initial commit of a potential fix for 54941
* **PR** `#54897`_: (`bryceml`_) update version numbers to be correct
@ *2019-10-05 01:59:14 UTC*
* f783108 Merge pull request `#54897`_ from bryceml/2019.2.1_fix_docs
* e9a2a70 update version numbers to be correct
* **PR** `#54894`_: (`bryceml`_) 2019.2.1 fix docs
@ *2019-10-04 22:31:26 UTC*
* 3233663 Merge pull request `#54894`_ from bryceml/2019.2.1_fix_docs
* 2456aaa Porting PR `#52948`_ to 2019.2.1
* 94a1e3b Porting PR `#52752`_ to 2019.2.1
* c7b7474 modifying saltconf ads
* d48057b add new saltconf ads
* **PR** `#54858`_: (`frogunder`_) remove in progress from releasenotes 2019.2.2
@ *2019-10-02 20:42:59 UTC*
* 4b06eca Merge pull request `#54858`_ from frogunder/releasenotes_remove2019.2.2
* a697abd remove in progress from releasenotes 2019.2.2
* **PR** `#54854`_: (`frogunder`_) releasenotes 2019.2.2
@ *2019-10-02 18:58:21 UTC*
* aaf2d1c Merge pull request `#54854`_ from frogunder/release_notes_2019.2.2
* a41dc59 Update 2019.2.2.rst
* 9bea043 releasenotes 2019.2.2
* **PR** `#54852`_: (`frogunder`_) Update man pages for 2019.2.2
@ *2019-10-02 18:27:07 UTC*
* 10d433f Merge pull request `#54852`_ from frogunder/man_pages_2019.2.2
* 92bc4b2 Update man pages for 2019.2.2
* **PR** `#54845`_: (`s0undt3ch`_) Remove debug print
@ *2019-10-02 17:38:21 UTC*
* 8ca6b20 Merge pull request `#54845`_ from s0undt3ch/hotfix/event-return-fix-2019.2.1
* 3937890 Remove debug print
* **ISSUE** `#54755`_: (`Reiner030`_) 2019.2.1/2019.2.0 pip failures even when not using pip (refs: `#54826`_)
* **PR** `#54826`_: (`dwoz`_) Fix issue 54755 and add regression tests
@ *2019-10-01 20:07:46 UTC*
* 9e3914a Merge pull request `#54826`_ from dwoz/issue_54755
* 0bad9cb Handle locals and globals separatly
* bcbe9a2 Only purge pip when needed
* d2f98ca Fix issue 54755 and add regression tests
* **PR** `#54830`_: (`frogunder`_) Add known issues to 2019.2.1 release notes
@ *2019-10-01 16:23:30 UTC*
* ba569d0 Merge pull request `#54830`_ from frogunder/update_relasenotes_2019.2.1
* 8cdb27b Update 2019.2.1.rst
* 14f955c Add known issues to 2019.2.1 release notes
* **ISSUE** `#54521`_: (`Oloremo`_) [Regression] Failhard, batch and retcodes (refs: `#54806`_)
* **PR** `#54806`_: (`Oloremo`_) [Regression] Batch with failhard fix
@ *2019-10-01 14:51:47 UTC*
* 433b6fa Merge pull request `#54806`_ from Oloremo/failhard-batch-fix-2019.2.1
* 6684793 Merge branch '2019.2.1' into failhard-batch-fix-2019.2.1
* 3e0e928 Added tests for cli and runner
* 2416516 Made batch work properly with failhard in cli and runner
* **ISSUE** `#54820`_: (`OrangeDog`_) schedule.present not idempotent when scheduler disabled (refs: `#54828`_)
* **PR** `#54828`_: (`garethgreenaway`_) [2019.2.1] Fix global disabling code in scheduler
@ *2019-10-01 09:27:49 UTC*
* ed94aa5 Merge pull request `#54828`_ from garethgreenaway/54820_fix_schedule_disabled_job_enabled_bug
* be15a28 Rework code that handles individual jobs being disabled and scheduler being globally being disabled. Previously disabling the schedule would result in individual jobs being disabled when they were run through eval. This change does not change schedule items.
* **PR** `#54778`_: (`Akm0d`_) fix broken salt-cloud openstack query
@ *2019-10-01 09:23:27 UTC*
* 435b40c Merge pull request `#54778`_ from Akm0d/master_openstack_query_fix
* ba4ba2a fixed pylint errors in openstack test
* d9a8517 Added openstack tests for openstack --query fix
* 59214ad Fallback to image id if we don't have an image name
* 3a42a4d fixed pylint error
* 0074d18 created unit tests for openstack
* 4255e3e Merge branch '2019.2.1' of https://github.com/saltstack/salt into HEAD
* 1c2821b Return a configured provider, not a bool
* c585550 fix broken salt-cloud openstack query
* **ISSUE** `#54762`_: (`margau`_) 2019.2.1: Breaks Minion-Master Communication (refs: `#54823`_, `#54784`_, `#54807`_)
* **PR** `#54823`_: (`dhiltonp`_) ip_bracket can now accept ipv6 addresses with brackets
@ *2019-10-01 01:13:34 UTC*
* 93b1c4d Merge pull request `#54823`_ from dhiltonp/maybe-bracket
* faa1d98 ip_bracket can now accept ipv6 addresses with brackets
* **ISSUE** `#54762`_: (`margau`_) 2019.2.1: Breaks Minion-Master Communication (refs: `#54823`_, `#54784`_, `#54807`_)
* **PR** `#54807`_: (`dwoz`_) Fix pip state pip >=10.0 and <=18.0
@ *2019-09-30 09:20:14 UTC*
* **PR** `#54772`_: (`OrlandoArcapix`_) Fix import of pip modules (refs: `#54807`_)
* b61b30d Merge pull request `#54807`_ from dwoz/patch-2
* 664806b Add unit test for pip state fix
* e637658 Revert change to pip version query
* 42810a2 Fix import of pip modules
* **ISSUE** `#54741`_: (`kjkeane`_) Schedulers Fail to Run (refs: `#54799`_)
* **PR** `#54799`_: (`garethgreenaway`_) Fix to scheduler when job without a time element is run with schedule.run_job
@ *2019-09-30 00:19:43 UTC*
* 4ee1ff6 Merge pull request `#54799`_ from garethgreenaway/54741_run_job_fails_without_time_element
* 44caa81 Merge branch '54741_run_job_fails_without_time_element' of github.com:garethgreenaway/salt into 54741_run_job_fails_without_time_element
* 3ae4f75 Merge branch '2019.2.1' into 54741_run_job_fails_without_time_element
* 8afd2d8 Removing extra, unnecessary code.
* 549cfb8 Fixing test_run_job test to ensure the right data is being asserted. Updating unit/test_module_names.py to include integration.scheduler.test_run_job.
* 7d716d6 Fixing lint.
* ec68591 If a scheduled job does not contains a time element parameter then running that job with schedule.run_job fails with a traceback because data['run'] does not exist.
* **PR** `#54785`_: (`Ch3LL`_) Fix state.show_states when sls file missing in top file
@ *2019-09-30 00:00:34 UTC*
* b90c3f2 Merge pull request `#54785`_ from Ch3LL/fix_show_states
* 96540be Clean up files after state.show_states test
* ad265ae Fix state.show_states when sls file missing
* **ISSUE** `#54768`_: (`paul-palmer`_) 2019.2.1 Some Jinja imports not found (refs: `#54780`_)
* **ISSUE** `#54765`_: (`awerner`_) 2019.2.1: Jinja from import broken (refs: `#54780`_)
* **PR** `#54780`_: (`dwoz`_) Fix masterless jinja imports
@ *2019-09-29 22:12:48 UTC*
* b9459e6 Merge pull request `#54780`_ from dwoz/fix-masterless-jinja-imports
* 5d873cc Merge branch '2019.2.1' into fix-masterless-jinja-imports
* e901a83 Add regression tests for jinja import bug
* 3925bb7 Fix broken jinja imports in masterless salt-call
* **ISSUE** `#54776`_: (`javierbertoli`_) Setting `ping_interval` in salt-minion's config (version 2019.2.1) prevents it from starting (refs: `#54777`_)
* **PR** `#54777`_: (`javierbertoli`_) Fix minion's remove_periodic_callback()
@ *2019-09-29 21:33:53 UTC*
* 4c240e5 Merge pull request `#54777`_ from netmanagers/2019.2.1
* 459c790 Merge branch '2019.2.1' into 2019.2.1
* **PR** `#54805`_: (`bryceml`_) improve lint job
@ *2019-09-29 21:24:05 UTC*
* 83f8f5c Merge pull request `#54805`_ from bryceml/2019.2.1_update_lint_salt
* ffa4ed6 improve lint job
* fa1a767 Merge branch '2019.2.1' into 2019.2.1
* **ISSUE** `#54751`_: (`jnmatlock`_) NXOS_API Proxy Minions Error KeyError: 'proxy.post_master_init' after upgrading to 2019.2.1 (refs: `#54783`_)
* **PR** `#54783`_: (`garethgreenaway`_) Ensure metaproxy directory is included in sdist
@ *2019-09-29 02:17:23 UTC*
* 6b43fbe Merge pull request `#54783`_ from garethgreenaway/54751_fixing_missing_metaproxy_directory
* 67d9938 Merge branch '2019.2.1' into 54751_fixing_missing_metaproxy_directory
* a35e609 Adding __init__.py to metaproxy directory so that metaproxy is included when running setup.py.
* **ISSUE** `#54762`_: (`margau`_) 2019.2.1: Breaks Minion-Master Communication (refs: `#54823`_, `#54784`_, `#54807`_)
* **PR** `#54784`_: (`dhiltonp`_) fix dns_check to return uri-compatible ipv6 addresses, add tests
@ *2019-09-28 08:36:51 UTC*
* 7912b67 Merge pull request `#54784`_ from dhiltonp/ipv46
* 042a101 Merge branch '2019.2.1' into ipv46
* **PR** `#54779`_: (`frogunder`_) Add 2019.2.2 release notes
@ *2019-09-27 17:45:46 UTC*
* 2f94b44 Merge pull request `#54779`_ from frogunder/releasenotes_2019.2.2
* 67f564b Add 2019.2.2 release notes
* ac6b54f Merge branch '2019.2.1' into ipv46
* 93ebd09 update mock (py2) from 2.0.0 to 3.0.5
* 37bcc4c fix dns_check to return uri-compatible ipv6 addresses, add tests
* dd86c46 Merge pull request `#1`_ from waynew/pull/54777-callback-typo
* a57f7d0 Add tests
* c19d0b0 Fix minion's remove_periodic_callback()
* **PR** `#54731`_: (`pizzapanther`_) Fix returners not loading properly
@ *2019-09-26 17:24:27 UTC*
* 46bec3c Merge pull request `#54731`_ from pizzapanther/not-so-__new__-and-shiny
* bdf24f4 Make sure we tests salt-master's `event_return` setting
* 5499518 remove unnecessary import
* 3f8a382 fix module import
* 0746aa7 remove __new__ method since it was removed from parent class
* **PR** `#54706`_: (`bryceml`_) 2019.2.1 ruby
@ *2019-09-23 16:00:27 UTC*
* e2b86bf Merge pull request `#54706`_ from bryceml/2019.2.1_ruby
* 168a6c1 switch to ruby 2.6.3
.. _`#1`: https://github.com/saltstack/salt/issues/1
.. _`#52752`: https://github.com/saltstack/salt/pull/52752
.. _`#52948`: https://github.com/saltstack/salt/pull/52948
.. _`#54521`: https://github.com/saltstack/salt/issues/54521
.. _`#54706`: https://github.com/saltstack/salt/pull/54706
.. _`#54731`: https://github.com/saltstack/salt/pull/54731
.. _`#54741`: https://github.com/saltstack/salt/issues/54741
.. _`#54751`: https://github.com/saltstack/salt/issues/54751
.. _`#54755`: https://github.com/saltstack/salt/issues/54755
.. _`#54762`: https://github.com/saltstack/salt/issues/54762
.. _`#54765`: https://github.com/saltstack/salt/issues/54765
.. _`#54768`: https://github.com/saltstack/salt/issues/54768
.. _`#54772`: https://github.com/saltstack/salt/pull/54772
.. _`#54776`: https://github.com/saltstack/salt/issues/54776
.. _`#54777`: https://github.com/saltstack/salt/pull/54777
.. _`#54778`: https://github.com/saltstack/salt/pull/54778
.. _`#54779`: https://github.com/saltstack/salt/pull/54779
.. _`#54780`: https://github.com/saltstack/salt/pull/54780
.. _`#54783`: https://github.com/saltstack/salt/pull/54783
.. _`#54784`: https://github.com/saltstack/salt/pull/54784
.. _`#54785`: https://github.com/saltstack/salt/pull/54785
.. _`#54799`: https://github.com/saltstack/salt/pull/54799
.. _`#54805`: https://github.com/saltstack/salt/pull/54805
.. _`#54806`: https://github.com/saltstack/salt/pull/54806
.. _`#54807`: https://github.com/saltstack/salt/pull/54807
.. _`#54820`: https://github.com/saltstack/salt/issues/54820
.. _`#54823`: https://github.com/saltstack/salt/pull/54823
.. _`#54826`: https://github.com/saltstack/salt/pull/54826
.. _`#54828`: https://github.com/saltstack/salt/pull/54828
.. _`#54830`: https://github.com/saltstack/salt/pull/54830
.. _`#54845`: https://github.com/saltstack/salt/pull/54845
.. _`#54852`: https://github.com/saltstack/salt/pull/54852
.. _`#54854`: https://github.com/saltstack/salt/pull/54854
.. _`#54858`: https://github.com/saltstack/salt/pull/54858
.. _`#54894`: https://github.com/saltstack/salt/pull/54894
.. _`#54897`: https://github.com/saltstack/salt/pull/54897
.. _`#54919`: https://github.com/saltstack/salt/pull/54919
.. _`#54941`: https://github.com/saltstack/salt/issues/54941
.. _`#54942`: https://github.com/saltstack/salt/pull/54942
.. _`Akm0d`: https://github.com/Akm0d
.. _`Ch3LL`: https://github.com/Ch3LL
.. _`Oloremo`: https://github.com/Oloremo
.. _`OrangeDog`: https://github.com/OrangeDog
.. _`OrlandoArcapix`: https://github.com/OrlandoArcapix
.. _`Reiner030`: https://github.com/Reiner030
.. _`UtahDave`: https://github.com/UtahDave
.. _`awerner`: https://github.com/awerner
.. _`bryceml`: https://github.com/bryceml
.. _`dhiltonp`: https://github.com/dhiltonp
.. _`dwoz`: https://github.com/dwoz
.. _`frogunder`: https://github.com/frogunder
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`javierbertoli`: https://github.com/javierbertoli
.. _`jnmatlock`: https://github.com/jnmatlock
.. _`kjkeane`: https://github.com/kjkeane
.. _`margau`: https://github.com/margau
.. _`paul-palmer`: https://github.com/paul-palmer
.. _`pizzapanther`: https://github.com/pizzapanther
.. _`s0undt3ch`: https://github.com/s0undt3ch
.. _`twangboy`: https://github.com/twangboy

View file

@ -0,0 +1,5 @@
:orphan:
==================================
Salt Release Notes - Codename Neon
==================================

View file

@ -8,7 +8,7 @@ Installing/Testing a Salt Release Candidate
It's time for a new feature release of Salt! Follow the instructions below to
install the latest release candidate of Salt, and try :ref:`all the shiny new
features <release-2018-3-0>`! Be sure to report any bugs you find on `Github
features <release-2019-2-0>`! Be sure to report any bugs you find on `Github
<https://github.com/saltstack/salt/issues/new/>`_.
Installing Using Packages
@ -18,22 +18,52 @@ Builds for a few platforms are available as part of the RC at https://repo.salts
.. note::
For RHEL and Ubuntu, Follow the instructions on
https://repo.saltstack.com/, but insert ``salt_rc/`` into the URL between
the hostname and the remainder of the path. For example:
Follow the instructions on https://repo.saltstack.com/,
but insert ``salt_rc/`` into the URL between the hostname
and the remainder of the path.
For Redhat Python 2
.. code-block:: bash
baseurl=https://repo.saltstack.com/salt_rc/yum/redhat/$releasever/$basearch/
For Redhat Python 3
.. code-block:: bash
baseurl=https://repo.saltstack.com/salt_rc/py3/redhat/$releasever/$basearch/
For Ubuntu Python 2
.. code-block:: none
deb http://repo.saltstack.com/salt_rc/apt/ubuntu/14.04/amd64 jessie main
deb http://repo.saltstack.com/salt_rc/apt/ubuntu/18.04/amd64 bionic main
For Ubuntu Python 3
.. code-block:: none
deb http://repo.saltstack.com/salt_rc/py3/ubuntu/18.04/amd64 bionic main
For Debian Python 2
.. code-block:: none
deb http://repo.saltstack.com/salt_rc/apt/debian/9/amd64 stretch main
For Debian Python 3
.. code-block:: none
deb http://repo.saltstack.com/salt_rc/py3/debian/9/amd64 stretch main
Available builds:
- Ubuntu16
- Redhat7
- Ubuntu 18
- Debian 9
- Redhat 7
- Windows
.. FreeBSD
@ -47,14 +77,14 @@ You can install a release candidate of Salt using `Salt Bootstrap
.. code-block:: bash
curl -o install_salt.sh -L https://bootstrap.saltstack.com
sudo sh install_salt.sh -P git v2018.3.0rc1
sudo sh install_salt.sh -P git v2019.2.0rc1
If you want to also install a master using Salt Bootstrap, use the ``-M`` flag:
.. code-block:: bash
curl -o install_salt.sh -L https://bootstrap.saltstack.com
sudo sh install_salt.sh -P -M git v2018.3.0rc1
sudo sh install_salt.sh -P -M git v2019.2.0rc1
If you want to install only a master and not a minion using Salt Bootstrap, use
the ``-M`` and ``-N`` flags:
@ -62,13 +92,13 @@ the ``-M`` and ``-N`` flags:
.. code-block:: bash
curl -o install_salt.sh -L https://bootstrap.saltstack.com
sudo sh install_salt.sh -P -M -N git v2018.3.0rc1
sudo sh install_salt.sh -P -M -N git v2019.2.0rc1
Installing Using PyPI
=====================
Installing from the `source archive
<https://pypi.python.org/pypi?:action=display&name=salt&version=2018.3.0rc1>`_ on
<https://pypi.python.org/pypi?:action=display&name=salt&version=2019.2.0rc1>`_ on
`PyPI <https://pypi.python.org/pypi>`_ is fairly straightforward.
.. note::
@ -106,4 +136,4 @@ Then install salt using the following command:
.. code-block:: bash
sudo pip install salt==2018.3.0rc1
sudo pip install salt==2019.2.0rc1

View file

@ -4,18 +4,6 @@
Thorium Complex Reactor
=======================
.. note::
Thorium is a provisional feature of Salt and is subject to change
and removal if the feature proves to not be a viable solution.
.. note::
Thorium was added to Salt as an experimental feature in the 2016.3.0
release, as of 2016.3.0 this feature is considered experimental, no
guarantees are made for support of any kind yet.
The original Salt Reactor is based on the idea of listening for a specific
event and then reacting to it. This model comes with many logical limitations,
for instance it is very difficult (and hacky) to fire a reaction based on

View file

@ -34,4 +34,3 @@ guarantee minion-master confidentiality.
zeromq
tcp
raet/index

View file

@ -1,145 +0,0 @@
.. _raet:
==================
The RAET Transport
==================
.. note::
The RAET transport is in very early development, it is functional but no
promises are yet made as to its reliability or security.
As for reliability and security, the encryption used has been audited and
our tests show that raet is reliable. With this said we are still conducting
more security audits and pushing the reliability.
This document outlines the encryption used in RAET
.. versionadded:: 2014.7.0
The Reliable Asynchronous Event Transport, or RAET, is an alternative transport
medium developed specifically with Salt in mind. It has been developed to
allow queuing to happen up on the application layer and comes with socket
layer encryption. It also abstracts a great deal of control over the socket
layer and makes it easy to bubble up errors and exceptions.
RAET also offers very powerful message routing capabilities, allowing for
messages to be routed between processes on a single machine all the way up to
processes on multiple machines. Messages can also be restricted, allowing
processes to be sent messages of specific types from specific sources
allowing for trust to be established.
Using RAET in Salt
==================
Using RAET in Salt is easy, the main difference is that the core dependencies
change, instead of needing pycrypto, M2Crypto, ZeroMQ, and PYZMQ, the packages
`libsodium`_, libnacl, ioflo, and raet are required. Encryption is handled very cleanly
by libnacl, while the queueing and flow control is handled by
ioflo. Distribution packages are forthcoming, but `libsodium`_ can be easily
installed from source, or many distributions do ship packages for it.
The libnacl and ioflo packages can be easily installed from pypi, distribution
packages are in the works.
Once the new deps are installed the 2014.7 release or higher of Salt needs to
be installed.
Once installed, modify the configuration files for the minion and master to
set the transport to raet:
``/etc/salt/master``:
.. code-block:: yaml
transport: raet
``/etc/salt/minion``:
.. code-block:: yaml
transport: raet
Now start salt as it would normally be started, the minion will connect to the
master and share long term keys, which can then in turn be managed via
salt-key. Remote execution and salt states will function in the same way as
with Salt over ZeroMQ.
Limitations
===========
The 2014.7 release of RAET is not complete! The Syndic and Multi Master have
not been completed yet and these are slated for completion in the 2015.5.0
release.
Also, Salt-Raet allows for more control over the client but these hooks have
not been implemented yet, thereforre the client still uses the same system
as the ZeroMQ client. This means that the extra reliability that RAET exposes
has not yet been implemented in the CLI client.
Why?
====
Customer and User Request
-------------------------
Why make an alternative transport for Salt? There are many reasons, but the
primary motivation came from customer requests, many large companies came with
requests to run Salt over an alternative transport, the reasoning was varied,
from performance and scaling improvements to licensing concerns. These
customers have partnered with SaltStack to make RAET a reality.
More Capabilities
-----------------
RAET has been designed to allow salt to have greater communication
capabilities. It has been designed to allow for development into features
which out ZeroMQ topologies can't match.
Many of the proposed features are still under development and will be
announced as they enter proof of concept phases, but these features include
`salt-fuse` - a filesystem over salt, `salt-vt` - a parallel api driven shell
over the salt transport and many others.
RAET Reliability
================
RAET is reliable, hence the name (Reliable Asynchronous Event Transport).
The concern posed by some over RAET reliability is based on the fact that
RAET uses UDP instead of TCP and UDP does not have built in reliability.
RAET itself implements the needed reliability layers that are not natively
present in UDP, this allows RAET to dynamically optimize packet delivery
in a way that keeps it both reliable and asynchronous.
RAET and ZeroMQ
===============
When using RAET, ZeroMQ is not required. RAET is a complete networking
replacement. It is noteworthy that RAET is not a ZeroMQ replacement in a
general sense, the ZeroMQ constructs are not reproduced in RAET, but they are
instead implemented in such a way that is specific to Salt's needs.
RAET is primarily an async communication layer over truly async connections,
defaulting to UDP. ZeroMQ is over TCP and abstracts async constructs within the
socket layer.
Salt is not dropping ZeroMQ support and has no immediate plans to do so.
Encryption
==========
RAET uses Dan Bernstein's NACL encryption libraries and `CurveCP`_ handshake.
The libnacl python binding binds to both `libsodium`_ and tweetnacl to execute
the underlying cryptography. This allows us to completely rely on an
externally developed cryptography system.
Programming Intro
=================
.. toctree::
programming_intro
.. _libsodium: http://doc.libsodium.org/
.. _CurveCP: http://curvecp.org/

View file

@ -1,41 +0,0 @@
.. _raet-programming:
=========================
Intro to RAET Programming
=========================
.. note::
This page is still under construction
The first thing to cover is that RAET does not present a socket api, it
presents, and queueing api, all messages in RAET are made available to via
queues. This is the single most differentiating factor with RAET vs other
networking libraries, instead of making a socket, a stack is created.
Instead of calling send() or recv(), messages are placed on the stack to be
sent and messages that are received appear on the stack.
Different kinds of stacks are also available, currently two stacks exist,
the UDP stack, and the UXD stack. The UDP stack is used to communicate over
udp sockets, and the UXD stack is used to communicate over Unix Domain
Sockets.
The UDP stack runs a context for communicating over networks, while the
UXD stack has contexts for communicating between processes.
UDP Stack Messages
==================
To create a UDP stack in RAET, simply create the stack, manage the queues,
and process messages:
.. code-block:: python
from salt.transport.road.raet import stacking
from salt.transport.road.raet import estating
udp_stack = stacking.StackUdp(ha=('127.0.0.1', 7870))
r_estate = estating.Estate(stack=stack, name='foo', ha=('192.168.42.42', 7870))
msg = {'hello': 'world'}
udp_stack.transmit(msg, udp_stack.estates[r_estate.name])
udp_stack.serviceAll()

View file

@ -95,7 +95,7 @@ redundant master. Both masters are first-class and have rights to the minions.
.. note::
Minions can automatically detect failed masters and attempt to reconnect
to reconnect to them quickly. To enable this functionality, set
to them quickly. To enable this functionality, set
`master_alive_interval` in the minion config and specify a number of
seconds to poll the masters for connection status.

View file

@ -175,9 +175,9 @@ def _install_system_packages(session):
shutil.copyfile(src, dst)
def _install_requirements(session, transport, *extra_requirements):
def _get_distro_pip_constraints(session, transport):
# Install requirements
distro_requirements = None
distro_constraints = []
if transport == 'tcp':
# The TCP requirements are the exact same requirements as the ZeroMQ ones
@ -186,56 +186,71 @@ def _install_requirements(session, transport, *extra_requirements):
pydir = _get_pydir(session)
if IS_WINDOWS:
_distro_requirements = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'{}-windows.txt'.format(transport))
if os.path.exists(_distro_requirements):
if transport == 'raet':
# Because we still install ioflo, which requires setuptools-git, which fails with a
# weird SSL certificate issue(weird because the requirements file requirements install
# fine), let's previously have setuptools-git installed
session.install('--progress-bar=off', 'setuptools-git', silent=PIP_INSTALL_SILENT)
distro_requirements = _distro_requirements
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'{}-windows.txt'.format(transport))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'windows.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
else:
_install_system_packages(session)
distro = _get_distro_info(session)
distro_keys = [
'linux',
'{id}'.format(**distro),
'{id}-{version}'.format(**distro),
'{id}-{version_parts[major]}'.format(**distro)
]
for distro_key in distro_keys:
_distro_requirements = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'{}-{}.txt'.format(transport, distro_key))
if os.path.exists(_distro_requirements):
distro_requirements = _distro_requirements
break
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'{}.txt'.format(distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
'static',
pydir,
'{}-{}.txt'.format(transport, distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
return distro_constraints
if distro_requirements is not None:
_requirements_files = [distro_requirements]
requirements_files = []
else:
_requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'pytest.txt')
def _install_requirements(session, transport, *extra_requirements):
# Install requirements
distro_constraints = _get_distro_pip_constraints(session, transport)
_requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'base.txt'),
os.path.join(REPO_ROOT, 'requirements', 'zeromq.txt'),
os.path.join(REPO_ROOT, 'requirements', 'pytest.txt')
]
if sys.platform.startswith('linux'):
requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'static', 'linux.in')
]
elif sys.platform.startswith('win'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'windows', 'req.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'windows.in')
]
elif sys.platform.startswith('darwin'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req.txt'),
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req_ext.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'osx.in')
]
if sys.platform.startswith('linux'):
requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'tests.txt')
]
elif sys.platform.startswith('win'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'windows', 'req.txt'),
]
elif sys.platform.startswith('darwin'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req.txt'),
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req_ext.txt'),
]
while True:
if not requirements_files:
@ -259,10 +274,25 @@ def _install_requirements(session, transport, *extra_requirements):
continue
for requirements_file in _requirements_files:
session.install('--progress-bar=off', '-r', requirements_file, silent=PIP_INSTALL_SILENT)
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
if extra_requirements:
session.install('--progress-bar=off', *extra_requirements, silent=PIP_INSTALL_SILENT)
install_command = [
'--progress-bar=off',
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
install_command += list(extra_requirements)
session.install(*install_command, silent=PIP_INSTALL_SILENT)
def _run_with_coverage(session, *test_cmd):
@ -365,7 +395,7 @@ def _runtests(session, coverage, cmd_args):
@nox.session(python=_PYTHON_VERSIONS, name='runtests-parametrized')
@nox.parametrize('coverage', [False, True])
@nox.parametrize('transport', ['zeromq', 'raet', 'tcp'])
@nox.parametrize('transport', ['zeromq', 'tcp'])
@nox.parametrize('crypto', [None, 'm2crypto', 'pycryptodomex'])
def runtests_parametrized(session, coverage, transport, crypto):
# Install requirements
@ -376,7 +406,16 @@ def runtests_parametrized(session, coverage, transport, crypto):
session.run('pip', 'uninstall', '-y', 'pycrypto', 'pycryptodome', 'pycryptodomex', silent=True)
else:
session.run('pip', 'uninstall', '-y', 'm2crypto', silent=True)
session.install('--progress-bar=off', crypto, silent=PIP_INSTALL_SILENT)
distro_constraints = _get_distro_pip_constraints(session, transport)
install_command = [
'--progress-bar=off',
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
install_command.append(crypto)
session.install(*install_command, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
@ -429,20 +468,6 @@ def runtests_zeromq(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-raet')
@nox.parametrize('coverage', [False, True])
def runtests_raet(session, coverage):
'''
runtests.py session with raet transport and default crypto
'''
session.notify(
'runtests-parametrized-{}(coverage={}, crypto=None, transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-m2crypto')
@nox.parametrize('coverage', [False, True])
def runtests_m2crypto(session, coverage):
@ -485,20 +510,6 @@ def runtests_zeromq_m2crypto(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-raet-m2crypto')
@nox.parametrize('coverage', [False, True])
def runtests_raet_m2crypto(session, coverage):
'''
runtests.py session with raet transport and m2crypto
'''
session.notify(
'runtests-parametrized-{}(coverage={}, crypto=\'m2crypto\', transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-pycryptodomex')
@nox.parametrize('coverage', [False, True])
def runtests_pycryptodomex(session, coverage):
@ -541,20 +552,6 @@ def runtests_zeromq_pycryptodomex(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-raet-pycryptodomex')
@nox.parametrize('coverage', [False, True])
def runtests_raet_pycryptodomex(session, coverage):
'''
runtests.py session with raet transport and pycryptodomex
'''
session.notify(
'runtests-parametrized-{}(coverage={}, crypto=\'pycryptodomex\', transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='runtests-cloud')
@nox.parametrize('coverage', [False, True])
def runtests_cloud(session, coverage):
@ -593,7 +590,7 @@ def runtests_tornado(session, coverage):
@nox.session(python=_PYTHON_VERSIONS, name='pytest-parametrized')
@nox.parametrize('coverage', [False, True])
@nox.parametrize('transport', ['zeromq', 'raet', 'tcp'])
@nox.parametrize('transport', ['zeromq', 'tcp'])
@nox.parametrize('crypto', [None, 'm2crypto', 'pycryptodomex'])
def pytest_parametrized(session, coverage, transport, crypto):
# Install requirements
@ -604,7 +601,16 @@ def pytest_parametrized(session, coverage, transport, crypto):
session.run('pip', 'uninstall', '-y', 'pycrypto', 'pycryptodome', 'pycryptodomex', silent=True)
else:
session.run('pip', 'uninstall', '-y', 'm2crypto', silent=True)
session.install('--progress-bar=off', crypto, silent=PIP_INSTALL_SILENT)
distro_constraints = _get_distro_pip_constraints(session, transport)
install_command = [
'--progress-bar=off',
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
install_command.append(crypto)
session.install(*install_command, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--rootdir', REPO_ROOT,
@ -661,20 +667,6 @@ def pytest_zeromq(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-raet')
@nox.parametrize('coverage', [False, True])
def pytest_raet(session, coverage):
'''
pytest session with raet transport and default crypto
'''
session.notify(
'pytest-parametrized-{}(coverage={}, crypto=None, transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-m2crypto')
@nox.parametrize('coverage', [False, True])
def pytest_m2crypto(session, coverage):
@ -717,20 +709,6 @@ def pytest_zeromq_m2crypto(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-raet-m2crypto')
@nox.parametrize('coverage', [False, True])
def pytest_raet_m2crypto(session, coverage):
'''
pytest session with raet transport and m2crypto
'''
session.notify(
'pytest-parametrized-{}(coverage={}, crypto=\'m2crypto\', transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-pycryptodomex')
@nox.parametrize('coverage', [False, True])
def pytest_pycryptodomex(session, coverage):
@ -773,20 +751,6 @@ def pytest_zeromq_pycryptodomex(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-raet-pycryptodomex')
@nox.parametrize('coverage', [False, True])
def pytest_raet_pycryptodomex(session, coverage):
'''
pytest session with raet transport and pycryptodomex
'''
session.notify(
'pytest-parametrized-{}(coverage={}, crypto=\'pycryptodomex\', transport=\'raet\')'.format(
session.python,
coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name='pytest-cloud')
@nox.parametrize('coverage', [False, True])
def pytest_cloud(session, coverage):
@ -851,7 +815,6 @@ def _pytest(session, coverage, cmd_args):
def _lint(session, rcfile, flags, paths):
_install_requirements(session, 'zeromq')
_install_requirements(session, 'raet')
session.install('--progress-bar=off', '-r', 'requirements/static/{}/lint.txt'.format(_get_pydir(session)), silent=PIP_INSTALL_SILENT)
session.run('pylint', '--version')
pylint_report_path = os.environ.get('PYLINT_REPORT')

View file

@ -24,7 +24,7 @@ pyobjc==5.1.2
pyopenssl
python-dateutil==2.8.0
python-gnupg==0.4.4
pyyaml==3.13
pyyaml==5.1.2
pyzmq==18.0.1
requests==2.21.0
setproctitle

View file

@ -28,7 +28,7 @@ pyopenssl==19.0.0
python-dateutil==2.8.0
python-gnupg==0.4.4
pythonnet==2.3.0
pyyaml==3.13
pyyaml==5.1.2
pyzmq==18.0.1
requests==2.21.0
setproctitle

View file

@ -2,7 +2,7 @@ Jinja2
# This should be changed to msgpack-python for Packages
# msgpack-python>0.3,!=0.5.5
msgpack>=0.5,!=0.5.5
PyYAML<5.1
PyYAML
MarkupSafe
requests>=1.0.0
tornado>=4.2.1,<6.0; python_version < '3'

View file

@ -1,12 +0,0 @@
-r base.txt
mock>=2.0.0
SaltPyLint>=v2017.3.6
testinfra>=1.7.0,!=1.17.0
# httpretty Needs to be here for now even though it's a dependency of boto.
# A pip install on a fresh system will decide to target httpretty 0.8.10 to
# satisfy other requirements, and httpretty 0.8.10 has bugs in setup.py that
# prevent it from being successfully installed (at least on Python 3.4).
httpretty; python_version >= '3.4'
pylint==1.6.5

View file

@ -1,2 +0,0 @@
# This is a legacy file, use dev.txt
-r dev.txt

View file

@ -1,2 +0,0 @@
# This is a legacy file, use dev.txt
-r dev.txt

View file

@ -1,9 +0,0 @@
mysql-python
timelib
yappi>=0.8.2
--allow-unverified python-novaclient>2.17.0
--allow-unverified python-neutronclient>2.3.6
python-gnupg
cherrypy>=3.2.2,<18.0.0; python_version < '3.5'
cherrypy>=3.2.2; python_version >= '3.5'
libnacl

View file

@ -1,5 +0,0 @@
-r base.txt
libnacl>=1.0.0
ioflo>=1.1.7
raet>=0.6.0

View file

@ -1,42 +0,0 @@
# This is a compilation of requirements installed on salt-jenkins git.salt state run
apache-libcloud==2.0.0
boto3
boto>=2.46.0
cffi
cherrypy==17.3.0
croniter>=0.3.0,!=0.3.22
dnspython
docker
futures>=2.0; python_version < '3.0'
GitPython
jsonschema<=2.6.0
junos-eznc
jxmlease
kazoo
keyring==5.7.1
kubernetes<4.0
mock>=2.0.0; python_version < '3.6'
more-itertools==5.0.0
moto
msgpack-python >= 0.4.2, != 0.5.5
paramiko==2.1.2; python_version < '3.7'
paramiko>=2.2.3; python_version >= '3.7'
psutil
# Let's install cryptodome instead of pycrypto because of pycrypto's outstanding security issues
# PyCrypto, if pulled, will be removed from the generated static requirements
pycryptodome
pyinotify
pyopenssl
python-etcd>0.4.2
python-gnupg
pyvmomi
requests
rfc3987
salttesting==2017.6.1
setproctitle
strict_rfc3339
supervisor==3.3.5; python_version < '3'
timelib
tornado<5.0
virtualenv
watchdog

Some files were not shown because too many files have changed in this diff Show more