Merge branch 'master' into master

This commit is contained in:
Daniel Wozniak 2019-11-20 01:05:39 -07:00 committed by GitHub
commit 82794696fb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
455 changed files with 8338 additions and 39961 deletions

View file

@ -2,20 +2,22 @@
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def global_timeout = 2 def global_timeout = 2
def salt_target_branch = 'master'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
def shell_header def shell_header
@ -41,13 +43,22 @@ wrappedNode('docs', global_timeout, '#jenkins-prod-pr') {
''' '''
} }
stage('Build') { stage('Build HTML Docs') {
sh shell_header + ''' sh shell_header + '''
eval "$(pyenv init -)" eval "$(pyenv init -)"
pyenv shell 3.6.8 pyenv shell 3.6.8
nox -e docs nox -e 'docs-html(compress=True)'
''' '''
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz' archiveArtifacts artifacts: 'doc/html-archive.tar.gz'
}
stage('Build Man Pages') {
sh shell_header + '''
eval "$(pyenv init -)"
pyenv shell 3.6.8
nox -e 'docs-man(compress=True, update=False)'
'''
archiveArtifacts artifacts: 'doc/man-archive.tar.gz'
} }
} }

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon' def distro_name = 'amazon'
def distro_version = '1' def distro_version = '1'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon' def distro_name = 'amazon'
def distro_version = '2' def distro_version = '2'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon' def distro_name = 'amazon'
def distro_version = '2' def distro_version = '2'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch' def distro_name = 'arch'
def distro_version = 'lts' def distro_version = 'lts'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg' def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch' def distro_name = 'arch'
def distro_version = 'lts' def distro_version = 'lts'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg' def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos' def distro_name = 'centos'
def distro_version = '6' def distro_version = '6'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos' def distro_name = 'centos'
def distro_version = '7' def distro_version = '7'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,37 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-cloud'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
def kitchen_platforms_file = '/var/jenkins/workspace/nox-cloud-platforms.yml'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
kitchen_platforms_file: kitchen_platforms_file)
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -0,0 +1,49 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos' def distro_name = 'centos'
def distro_version = '7' def distro_version = '7'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'TCP' def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-tornado'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos' def distro_name = 'centos'
def distro_version = '7' def distro_version = '7'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,37 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def nox_env_name = 'runtests-cloud'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
def kitchen_platforms_file = '/var/jenkins/workspace/nox-cloud-platforms.yml'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
kitchen_platforms_file: kitchen_platforms_file)
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -0,0 +1,49 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos' def distro_name = 'centos'
def distro_version = '7' def distro_version = '7'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'TCP' def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

48
.ci/kitchen-centos8-py3 Normal file
View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '8'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian' def distro_name = 'debian'
def distro_version = '10' def distro_version = '10'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian' def distro_name = 'debian'
def distro_version = '8' def distro_version = '8'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian' def distro_name = 'debian'
def distro_version = '9' def distro_version = '9'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian' def distro_name = 'debian'
def distro_version = '9' def distro_version = '9'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +0,0 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,159 +0,0 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora' def distro_name = 'fedora'
def distro_version = '30' def distro_version = '30'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora' def distro_name = 'fedora'
def distro_version = '30' def distro_version = '30'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

48
.ci/kitchen-fedora31-py3 Normal file
View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'fedora'
def distro_version = '31'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View file

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
@ -9,43 +9,44 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx' def distro_name = 'macosx'
def distro_version = 'mojave' def distro_version = 'mojave'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '' def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') { wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([ withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml', 'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}", "NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true', 'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}", "NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}", "GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin", "PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3', 'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}", "TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}", "TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}", "FORCE_FULL=${params.runFull}",
]) { ]) {
stage('VM Cleanup') { stage('VM Cleanup') {
@ -65,7 +66,6 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
stage('Clone') { stage('Clone') {
cleanWs notFailBuild: true cleanWs notFailBuild: true
checkout scm checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
} }
// Setup the kitchen required bundle // Setup the kitchen required bundle
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
} }
try { try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') { timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') { stage('Converge VM') {
sh ''' sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"' ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
''' '''
} }
stage('Upload Coverage') { stage('Upload Coverage') {
script { def distro_strings = [
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) { distro_name,
sh ''' distro_version
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then ]
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true def report_strings = (
fi [python_version] + nox_env_name.split('-')
''' ).flatten()
} uploadCodeCoverage(
} report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
} }
} }
} }

View file

@ -1,4 +1,4 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
@ -9,43 +9,44 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx' def distro_name = 'macosx'
def distro_version = 'mojave' def distro_version = 'mojave'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '' def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') { wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([ withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml', 'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}", "NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true', 'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}", "NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}", "GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin", "PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3', 'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}", "TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}", "TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}", "FORCE_FULL=${params.runFull}",
]) { ]) {
stage('VM Cleanup') { stage('VM Cleanup') {
@ -65,7 +66,6 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
stage('Clone') { stage('Clone') {
cleanWs notFailBuild: true cleanWs notFailBuild: true
checkout scm checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
} }
// Setup the kitchen required bundle // Setup the kitchen required bundle
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
} }
try { try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') { timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') { stage('Converge VM') {
sh ''' sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"' ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
''' '''
} }
stage('Upload Coverage') { stage('Upload Coverage') {
script { def distro_strings = [
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) { distro_name,
sh ''' distro_version
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then ]
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true def report_strings = (
fi [python_version] + nox_env_name.split('-')
''' ).flatten()
} uploadCodeCoverage(
} report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
} }
} }
} }

View file

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View file

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse' def distro_name = 'opensuse'
def distro_version = '15' def distro_version = '15'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse' def distro_name = 'opensuse'
def distro_version = '15' def distro_version = '15'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1604' def distro_version = '1604'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -0,0 +1,49 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1604' def distro_version = '1604'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'TCP' def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def nox_env_name = 'runtests-tornado'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1604' def distro_version = '1604'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -0,0 +1,49 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View file

@ -0,0 +1,48 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1604' def distro_version = '1604'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'TCP' def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1804' def distro_version = '1804'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,159 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6 def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu' def distro_name = 'ubuntu'
def distro_version = '1804' def distro_version = '1804'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests' def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,158 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8 def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows' def distro_name = 'windows'
def distro_version = '2016' def distro_version = '2016'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit' def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,158 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8 def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows' def distro_name = 'windows'
def distro_version = '2016' def distro_version = '2016'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit' def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,158 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8 def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows' def distro_name = 'windows'
def distro_version = '2019' def distro_version = '2019'
def python_version = 'py2' def python_version = 'py2'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit' def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -1,158 +1,48 @@
@Library('salt@1.1') _ @Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for // Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8 def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows' def distro_name = 'windows'
def distro_version = '2019' def distro_version = '2019'
def python_version = 'py3' def python_version = 'py3'
def test_transport = 'ZeroMQ' def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master' def golden_images_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit' def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([ properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([ parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull') booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
]) ])
]) ])
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > concurrent_builds) {
} // This will cancel the previous build which also defined a matching milestone
// Define a milestone for this build so that, if another build starts, this one will be aborted milestone(buildNumber - concurrent_builds)
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy // vim: ft=groovy

View file

@ -10,14 +10,17 @@ properties([
def shell_header def shell_header
// Be sure to cancel any previously running builds // Only set milestones on PR builds
def buildNumber = env.BUILD_NUMBER as int if (env.CHANGE_ID) {
if (buildNumber > 1) { // Be sure to cancel any previously running builds
// This will cancel the previous build which also defined a matching milestone def buildNumber = env.BUILD_NUMBER as int
milestone(buildNumber - 1) if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
} }
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
def lint_report_issues = [] def lint_report_issues = []
@ -30,7 +33,6 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
stage('checkout-scm') { stage('checkout-scm') {
cleanWs notFailBuild: true cleanWs notFailBuild: true
checkout scm checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
} }
// Setup the kitchen required bundle // Setup the kitchen required bundle

View file

@ -3,15 +3,14 @@ codecov:
- drone.saltstack.com - drone.saltstack.com
- jenkinsci.saltstack.com - jenkinsci.saltstack.com
branch: 2019.2 branch: master
notify: notify:
require_ci_to_pass: no require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
ignore: ignore:
- ^*.py$ - ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* - doc/.* # ignore any code under doc/
- tests/.*
coverage: coverage:
round: up round: up
@ -20,30 +19,61 @@ coverage:
status: status:
project: # measuring the overall project coverage project: # measuring the overall project coverage
default: default: false # disable the default status that measures entire project
salt: # declare a new status context "salt"
enabled: yes # must be yes|true to enable this status enabled: yes # must be yes|true to enable this status
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded paths: "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure # options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: success # if ci fails report status as success, error, or failure if_ci_failed: error # if ci fails report status as success, error, or failure
tests: # declare a new status context "tests"
enabled: yes # must be yes|true to enable this status
#target: 100% # we always want 100% coverage here
target: auto # auto while we get this going
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
paths: "!salt/" # only include coverage in "tests/" folder
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
patch: # pull requests only: this commit status will measure the patch: # pull requests only: this commit status will measure the
# entire pull requests Coverage Diff. Checking if the lines # entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%. # adjusted are covered at least X%.
default: default:
enabled: no # must be yes|true to enable this status enabled: yes # must be yes|true to enable this status
target: 80% # specify the target "X%" coverage to hit target: 100% # Newly added lines must have 100% coverage
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure # options: success, error, failure
if_not_found: success if_not_found: success
if_ci_failed: success if_ci_failed: error
changes: # if there are any unexpected changes in coverage changes: # if there are any unexpected changes in coverage
default: default:
enabled: no # must be yes|true to enable this status enabled: yes # must be yes|true to enable this status
if_no_uploads: success if_no_uploads: error
if_not_found: success if_not_found: success
if_ci_failed: success if_ci_failed: error
# No commends because we're not yet running the full test suite on PRs flags:
comment: off salt:
paths:
- salt/
tests:
paths:
- tests/
comment:
layout: "reach, diff, flags, files"
after_n_builds: 46 # Only comment on PRs after N builds
# This value is the output of:
# sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
behavior: new # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.
# spammy: post new (do not delete old comments).

View file

@ -1,14 +1,11 @@
[run] [run]
branch = True branch = True
cover_pylib = False cover_pylib = False
source =
salt
parallel = True parallel = True
concurrency = multiprocessing concurrency = multiprocessing
omit = omit =
tests/*.py
setup.py setup.py
salt/daemons/test/* .nox/*
[report] [report]
# Regexes for lines to exclude from consideration # Regexes for lines to exclude from consideration
@ -30,7 +27,3 @@ exclude_lines =
ignore_errors = True ignore_errors = True
[paths]
source =
salt

80
.github/CODEOWNERS vendored
View file

@ -8,72 +8,58 @@
# This file uses an fnmatch-style matching pattern. # This file uses an fnmatch-style matching pattern.
# Team Boto
salt/*/*boto* @saltstack/team-boto
# Team Core # Team Core
requirements/* @saltstack/team-core * @saltstack/team-core
rfcs/* @saltstack/team-core
salt/auth/* @saltstack/team-core # Team Boto
salt/cache/* @saltstack/team-core salt/*/*boto* @saltstack/team-boto @saltstack/team-core
salt/cli/* @saltstack/team-core
salt/client/* @saltstack/team-core
salt/config/* @saltstack/team-core
salt/daemons/* @saltstack/team-core
salt/pillar/* @saltstack/team-core
salt/loader.py @saltstack/team-core
salt/payload.py @saltstack/team-core
salt/master.py @saltstack/team-core
salt/*/master* @saltstack/team-core
salt/minion.py @saltstack/team-core
salt/*/minion* @saltstack/team-core
# Team Cloud # Team Cloud
salt/cloud/* @saltstack/team-cloud salt/cloud/* @saltstack/team-cloud @saltstack/team-core
salt/utils/openstack/* @saltstack/team-cloud salt/utils/openstack/* @saltstack/team-cloud @saltstack/team-core
salt/utils/aws.py @saltstack/team-cloud salt/utils/aws.py @saltstack/team-cloud @saltstack/team-core
salt/*/*cloud* @saltstack/team-cloud salt/*/*cloud* @saltstack/team-cloud @saltstack/team-core
# Team NetAPI # Team NetAPI
salt/cli/api.py @saltstack/team-netapi salt/cli/api.py @saltstack/team-netapi @saltstack/team-core
salt/client/netapi.py @saltstack/team-netapi salt/client/netapi.py @saltstack/team-netapi @saltstack/team-core
salt/netapi/* @saltstack/team-netapi salt/netapi/* @saltstack/team-netapi @saltstack/team-core
# Team Network # Team Network
salt/proxy/* @saltstack/team-proxy salt/proxy/* @saltstack/team-proxy @saltstack/team-core
# Team SPM # Team SPM
salt/cli/spm.py @saltstack/team-spm salt/cli/spm.py @saltstack/team-spm @saltstack/team-core
salt/spm/* @saltstack/team-spm salt/spm/* @saltstack/team-spm @saltstack/team-core
# Team SSH # Team SSH
salt/cli/ssh.py @saltstack/team-ssh salt/cli/ssh.py @saltstack/team-ssh @saltstack/team-core
salt/client/ssh/* @saltstack/team-ssh salt/client/ssh/* @saltstack/team-ssh @saltstack/team-core
salt/roster/* @saltstack/team-ssh salt/roster/* @saltstack/team-ssh @saltstack/team-core
salt/runners/ssh.py @saltstack/team-ssh salt/runners/ssh.py @saltstack/team-ssh @saltstack/team-core
salt/*/thin.py @saltstack/team-ssh salt/*/thin.py @saltstack/team-ssh @saltstack/team-core
# Team State # Team State
salt/state.py @saltstack/team-state salt/state.py @saltstack/team-state @saltstack/team-core
# Team SUSE # Team SUSE
salt/*/*btrfs* @saltstack/team-suse salt/*/*btrfs* @saltstack/team-suse @saltstack/team-core
salt/*/*kubernetes* @saltstack/team-suse salt/*/*kubernetes* @saltstack/team-suse @saltstack/team-core
salt/*/*pkg* @saltstack/team-suse salt/*/*pkg* @saltstack/team-suse @saltstack/team-core
salt/*/*snapper* @saltstack/team-suse salt/*/*snapper* @saltstack/team-suse @saltstack/team-core
salt/*/*xfs* @saltstack/team-suse salt/*/*xfs* @saltstack/team-suse @saltstack/team-core
salt/*/*zypper* @saltstack/team-suse salt/*/*zypper* @saltstack/team-suse @saltstack/team-core
# Team Transport # Team Transport
salt/transport/* @saltstack/team-transport salt/transport/* @saltstack/team-transport @saltstack/team-core
salt/utils/zeromq.py @saltstack/team-transport salt/utils/zeromq.py @saltstack/team-transport @saltstack/team-core
# Team Windows # Team Windows
salt/*/*win* @saltstack/team-windows salt/*/*win* @saltstack/team-windows @saltstack/team-core
salt/modules/reg.py @saltstack/team-windows salt/modules/reg.py @saltstack/team-windows @saltstack/team-core
salt/states/reg.py @saltstack/team-windows salt/states/reg.py @saltstack/team-windows @saltstack/team-core
tests/*/*win* @saltstack/team-windows tests/*/*win* @saltstack/team-windows @saltstack/team-core
tests/*/test_reg.py @saltstack/team-windows tests/*/test_reg.py @saltstack/team-windows @saltstack/team-core
# Jenkins Integration # Jenkins Integration
.ci/* @saltstack/saltstack-sre-team @saltstack/team-core .ci/* @saltstack/saltstack-sre-team @saltstack/team-core

View file

@ -9,6 +9,8 @@ Remove this section if not relevant
Remove this section if not relevant Remove this section if not relevant
### Tests written? ### Tests written?
**[NOTICE] Bug fixes or features added to Salt require tests.**
Please review the [test documentation](https://docs.saltstack.com/en/latest/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite.
Yes/No Yes/No

View file

@ -39,7 +39,7 @@ provisioner:
max_retries: 2 max_retries: 2
remote_states: remote_states:
name: git://github.com/saltstack/salt-jenkins.git name: git://github.com/saltstack/salt-jenkins.git
branch: 2019.2 branch: master
repo: git repo: git
testingdir: /testing testingdir: /testing
salt_copy_filter: salt_copy_filter:

View file

@ -6,26 +6,24 @@ repos:
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-linux-py2.7-zmq-requirements alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$ files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$ exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
args: args:
- -v - -v
- --py-version=2.7 - --py-version=2.7
- --platform=linux - --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-osx-py2.7-zmq-requirements alias: compile-darwin-py2.7-zmq-requirements
name: OSX Py2.7 ZeroMQ Requirements name: Darwin Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$ files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args: args:
- -v - -v
- --py-version=2.7 - --py-version=2.7
- --platform=darwin - --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt - --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt - --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt - --include=requirements/base.txt
@ -41,58 +39,12 @@ repos:
- -v - -v
- --py-version=2.7 - --py-version=2.7
- --platform=windows - --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt - --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt - --include=pkg/windows/req_win.txt
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py2.7-raet-requirements
name: Linux Py2.7 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=2.7
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py2.7-raet-requirements
name: OSX Py2.7 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=2.7
- --out-prefix=raet
- --platform=darwin
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py2.7-raet-requirements
name: Windows Py2.7 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=2.7
- --out-prefix=raet
- --platform=windows
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-lint-py2.7-requirements alias: compile-lint-py2.7-requirements
@ -115,32 +67,16 @@ repos:
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-linux-py3.4-zmq-requirements alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$ files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$ exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args: args:
- -v - -v
- --py-version=3.4 - --py-version=3.4
- --platform=linux - --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.4-raet-requirements
name: Linux Py3.4 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.4
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-cloud-py3.4-requirements alias: compile-cloud-py3.4-requirements
@ -153,26 +89,24 @@ repos:
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-linux-py3.5-zmq-requirements alias: compile-linux-py3.5-zmq-requirements
name: Linux Py3.5 ZeroMQ Requirements name: Linux Py3.5 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$ files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$ exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args: args:
- -v - -v
- --py-version=3.5 - --py-version=3.5
- --platform=linux - --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-osx-py3.5-zmq-requirements alias: compile-darwin-py3.5-zmq-requirements
name: OSX Py3.5 ZeroMQ Requirements name: Darwin Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$ files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args: args:
- -v - -v
- --py-version=3.5 - --py-version=3.5
- --platform=darwin - --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt - --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt - --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt - --include=requirements/base.txt
@ -188,61 +122,12 @@ repos:
- -v - -v
- --py-version=3.5 - --py-version=3.5
- --platform=windows - --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt - --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt - --include=pkg/windows/req_win.txt
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.5-raet-requirements
name: Linux Py3.5 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.5
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.5-raet-requirements
name: OSX Py3.5 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.5
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.5-raet-requirements
name: Windows Py3.5 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.5
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-cloud-py3.5-requirements alias: compile-cloud-py3.5-requirements
@ -264,26 +149,24 @@ repos:
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-linux-py3.6-zmq-requirements alias: compile-linux-py3.6-zmq-requirements
name: Linux Py3.6 ZeroMQ Requirements name: Linux Py3.6 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$ files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$ exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args: args:
- -v - -v
- --py-version=3.6 - --py-version=3.6
- --platform=linux - --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-osx-py3.6-zmq-requirements alias: compile-darwin-py3.6-zmq-requirements
name: OSX Py3.6 ZeroMQ Requirements name: Darwin Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$ files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args: args:
- -v - -v
- --py-version=3.6 - --py-version=3.6
- --platform=darwin - --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt - --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt - --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt - --include=requirements/base.txt
@ -299,61 +182,12 @@ repos:
- -v - -v
- --py-version=3.6 - --py-version=3.6
- --platform=windows - --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt - --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt - --include=pkg/windows/req_win.txt
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.6-raet-requirements
name: Linux Py3.6 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.6
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.6-raet-requirements
name: OSX Py3.6 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.6
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.6-raet-requirements
name: Windows Py3.6 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.6
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-cloud-py3.6-requirements alias: compile-cloud-py3.6-requirements
@ -375,26 +209,24 @@ repos:
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-linux-py3.7-zmq-requirements alias: compile-linux-py3.7-zmq-requirements
name: Linux Py3.7 ZeroMQ Requirements name: Linux Py3.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$ files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$ exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args: args:
- -v - -v
- --py-version=3.7 - --py-version=3.7
- --platform=linux - --platform=linux
- --out-prefix=zeromq
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-osx-py3.7-zmq-requirements alias: compile-darwin-py3.7-zmq-requirements
name: OSX Py3.7 ZeroMQ Requirements name: Darwin Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$ files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args: args:
- -v - -v
- --py-version=3.7 - --py-version=3.7
- --platform=darwin - --platform=darwin
- --out-prefix=zeromq
- --include=pkg/osx/req.txt - --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt - --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt - --include=requirements/base.txt
@ -410,61 +242,12 @@ repos:
- -v - -v
- --py-version=3.7 - --py-version=3.7
- --platform=windows - --platform=windows
- --out-prefix=zeromq
- --include=pkg/windows/req.txt - --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt - --include=pkg/windows/req_win.txt
- --include=requirements/base.txt - --include=requirements/base.txt
- --include=requirements/zeromq.txt - --include=requirements/zeromq.txt
- --include=requirements/pytest.txt - --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$ - --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-linux-py3.7-raet-requirements
name: Linux Py3.7 RAET Requirements
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
args:
- -v
- --py-version=3.7
- --platform=linux
- --out-prefix=raet
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.7-raet-requirements
name: OSX Py3.7 RAET Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
args:
- -v
- --py-version=3.7
- --platform=darwin
- --out-prefix=raet
- --include=pkg/osx/req.txt
- --include=pkg/osx/req_ext.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- --passthrough-line-from-input=^pyobjc(.*)$
- id: pip-tools-compile
alias: compile-windows-py3.7-raet-requirements
name: Windows Py3.7 RAET Requirements
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
args:
- -v
- --py-version=3.7
- --platform=windows
- --out-prefix=raet
- --include=pkg/windows/req.txt
- --include=pkg/windows/req_win.txt
- --include=requirements/base.txt
- --include=requirements/raet.txt
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- --remove-line=^enum34==(.*)$
- id: pip-tools-compile - id: pip-tools-compile
alias: compile-cloud-py3.7-requirements alias: compile-cloud-py3.7-requirements

56
CHANGELOG.md Normal file
View file

@ -0,0 +1,56 @@
# Changelog
All notable changes to Salt will be documented in this file.
This changelog follows [keepachangelog](https://keepachangelog.com/en/1.0.0/) format, and is intended for human consumption.
This project versioning is *similar* to [Semantic Versioning](https://semver.org), and is documented in [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20/files).
Versions are `MAJOR.PATCH`.
## Unreleased (Neon)
### Removed
- [#54943](https://github.com/saltstack/salt/pull/54943) - RAET transport method has been removed per the deprecation schedule - [@s0undt3ch](https://github.com/s0undt3ch)
### Deprecated
### Changed
- [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20) - Changed to numeric versions.
- [SEP 1](https://github.com/saltstack/salt-enhancement-proposals/blob/master/accepted/0001-changelog-format.md), [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20) - Adopted keepachangelog format.
### Fixed
### Added
- [#54917](https://github.com/saltstack/salt/pull/54917) - Added get_settings, put_settings and flush_synced methods for Elasticsearch module. - [@Oloremo](https://github.com/Oloremo)
---
## [2019.2.2]
### Changed
- [#54758](https://github.com/saltstack/salt/issues/54758) - Missing sls file during `state.show_states` displays message instead of failing - [@Ch3LL](https://github.com/Ch3LL)
### Fixed
- [#54521](https://github.com/saltstack/salt/issues/54521) - `failhard` during orchestration now fails as expected - [@mattp-](https://github.com/mattp-) / [@Oloremo](https://github.com/Oloremo)
- [#54741](https://github.com/saltstack/salt/issues/54741) - `schedule.run_job` without time element now works as expected - [@garethgreenaway](https://github.com/garethgreenaway)
- [#54755](https://github.com/saltstack/salt/issues/54755) - Pip state ensures pip was imported before trying to remove - [@dwoz](https://github.com/dwoz)
- [#54760](https://github.com/saltstack/salt/issues/54760) - Fix `salt-cloud -Q` for OpenStack driver - [@vdloo](https://github.com/vdloo) / [@Akm0d](https://github.com/Akm0d)
- [#54762](https://github.com/saltstack/salt/issues/54762) - IPv6 addresses with brackets no longer break master/minion communication - [@dhiltonp](https://github.com/dhiltonp)
- [#54765](https://github.com/saltstack/salt/issues/54765) - Masterless jinja imports - [@dwoz](https://github.com/dwoz)
- [#54776](https://github.com/saltstack/salt/issues/54776) - `ping_interval` in minion config no longer prevents startup - [@dwoz](https://github.com/dwoz)
- [#54820](https://github.com/saltstack/salt/issues/54820) - `scheduler.present` no longer always reports changes when scheduler is disabled - [@garethgreenaway](https://github.com/garethgreenaway)
- [#54941](https://github.com/saltstack/salt/issues/54941) - Pillar data is no longer refreshed on every call - [@dwoz](https://github.com/dwoz)
### Added
- [#54919](https://github.com/saltstack/salt/pull/54919) - Added missing `win_wusa` state and module docs - [@twangboy](https://github.com/twangboy)
## [2019.2.1] - 2019-09-25 [YANKED]
- See [old release notes](https://docs.saltstack.com/en/latest/topics/releases/2019.2.1.html)

View file

@ -155,15 +155,6 @@ ZeroMQ Transport:
pip install -e . pip install -e .
RAET Transport:
.. code-block:: bash
pip install -r requirements/raet.txt
pip install psutil
pip install -e .
Running a self-contained development version Running a self-contained development version
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View file

@ -60,7 +60,6 @@ services`_ offerings.
* Facebook - `<https://www.facebook.com/SaltStack/>`_ * Facebook - `<https://www.facebook.com/SaltStack/>`_
* LinkedIn - `<https://www.linkedin.com/company/salt-stack-inc>`_ * LinkedIn - `<https://www.linkedin.com/company/salt-stack-inc>`_
* LinkedIn Group - `<https://www.linkedin.com/groups/4877160>`_ * LinkedIn Group - `<https://www.linkedin.com/groups/4877160>`_
* Google+ - `<https://plus.google.com/b/112856352920437801867/+SaltStackInc/posts>`_
.. _global community: http://www.meetup.com/pro/saltstack/ .. _global community: http://www.meetup.com/pro/saltstack/
.. _SaltConf: http://saltconf.com/ .. _SaltConf: http://saltconf.com/

View file

@ -391,7 +391,7 @@
#mine_interval: 60 #mine_interval: 60
# Windows platforms lack posix IPC and must rely on slower TCP based inter- # Windows platforms lack posix IPC and must rely on slower TCP based inter-
# process communications. Set ipc_mode to 'tcp' on such systems # process communications. ipc_mode is set to 'tcp' on such systems.
#ipc_mode: ipc #ipc_mode: ipc
# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp' # Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp'

View file

@ -1,79 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
compile-translation-catalogs
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Compile the existing translation catalogs.
'''
# Import python libs
import os
import sys
import fnmatch
# Import 3rd-party libs
HAS_BABEL = False
try:
from babel.messages import mofile, pofile
HAS_BABEL = True
except ImportError:
try:
import polib
except ImportError:
print(
'You need to install either babel or pofile in order to compile '
'the message catalogs. One of:\n'
' pip install babel\n'
' pip install polib'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
print('Gathering the translation catalogs to compile...'),
sys.stdout.flush()
entries = {}
for locale in os.listdir(os.path.join(LOCALES_DIR)):
if locale == 'pot':
continue
locale_path = os.path.join(LOCALES_DIR, locale)
entries[locale] = []
for dirpath, _, filenames in os.walk(locale_path):
for filename in fnmatch.filter(filenames, '*.po'):
entries[locale].append(os.path.join(dirpath, filename))
print('DONE')
for locale, po_files in sorted(entries.items()):
lc_messages_path = os.path.join(LOCALES_DIR, locale, 'LC_MESSAGES')
print('\nCompiling the \'{0}\' locale:'.format(locale))
for po_file in sorted(po_files):
relpath = os.path.relpath(po_file, lc_messages_path)
print ' {0}.po -> {0}.mo'.format(relpath.split('.po', 1)[0])
if HAS_BABEL:
catalog = pofile.read_po(open(po_file))
mofile.write_mo(
open(po_file.replace('.po', '.mo'), 'wb'), catalog
)
continue
catalog = polib.pofile(po_file)
catalog.save_as_mofile(fpath=po_file.replace('.po', '.mo'))
print('Done')
if __name__ == '__main__':
main()

View file

@ -1,53 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
download-translation-catalog
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Download a translation catalog from Transifex.
'''
# Import python libs
import os
import sys
# Import 3rd-party libs
try:
import txclib.utils
except ImportError:
print(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\''
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
os.chdir(DOC_DIR)
tx_root = txclib.utils.find_dot_tx()
if len(sys.argv) < 2:
print('You need to pass a locale to this script. For example: '
'pt_PT, zh_CN, ru, etc...')
sys.exit(1)
for locale in sys.argv[1:]:
print('Download \'{0}\' translations catalog...'.format(locale))
txclib.utils.exec_command('pull', ['-l', locale], tx_root)
print('Done')
if __name__ == '__main__':
main()

View file

@ -1,223 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
update-transifex-source-translations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Update the transifex sources configuration file and push the source
'''
# Import python libs
import os
import sys
import time
import logging
import subprocess
import ConfigParser
try:
import txclib.utils
except ImportError:
sys.stdout.write(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\'\n'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
'''
Run the update code
'''
os.chdir(DOC_DIR)
sys.stdout.write('Extracting translatable strings....\n')
try:
subprocess.check_call(['make', 'gettext'])
except subprocess.CalledProcessError as exc:
sys.stdout.write('An error occurred while extracting the translation '
'strings: {0}\n'.format(exc))
sys.exit(1)
locale_dir = os.path.join(DOC_DIR, 'locale')
pot_dir = os.path.join(DOC_DIR, '_build', 'locale')
tx_root = txclib.utils.find_dot_tx()
tx_config = os.path.join(tx_root, '.tx', 'config')
if not tx_root:
sys.stdout.write(
'Unable to find the \'.tx/\' directory. Unable to continue\n'
)
sys.exit(1)
# We do not want the txclib INFO or WARNING logging
logging.getLogger('txclib').setLevel(logging.ERROR)
sys.stdout.write('Gathering the translation template files...')
sys.stdout.flush()
entries = []
for dirpath, dirnames, filenames in os.walk(pot_dir):
for filename in filenames:
pot_file = os.path.join(dirpath, filename)
base, ext = os.path.splitext(pot_file)
if ext != '.pot':
continue
resource_path = os.path.relpath(base, pot_dir)
try:
import babel.messages.pofile
if not len(babel.messages.pofile.read_po(open(pot_file))):
# Empty pot file, continue
continue
except ImportError:
# No babel package, let's keep on going
pass
resource_name = resource_path.replace(
'\\', '/').replace('/', '--').replace('.', '_')
entries.append((resource_path, resource_name))
sys.stdout.write('Done\n')
# Let's load the resources already present in the configuration file
cfg = ConfigParser.SafeConfigParser()
cfg.read([tx_config])
handled_resources = set(
section for section in
cfg.sections() if section.startswith('salt.')
)
sys.stdout.write('Updating the entries in \'.tx/config\'...\n')
sys.stdout.flush()
total_entries = len(entries)
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Updating resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'set',
'--auto-local -r salt.{resource_name} '
'{locale_dir}/<lang>/LC_MESSAGES/{resource_path}.po '
'--source-lang en '
'--source-file {pot_dir}/{resource_path}.pot '
'--source-name {resource_path}.rst '
'--execute'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir,
pot_dir=pot_dir.rstrip('/')
).split(),
tx_root
)
sys.stdout.write('\n')
if 'salt.{0}'.format(resource_name) in handled_resources:
handled_resources.remove('salt.{0}'.format(resource_name))
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
non_handled_resources = len(handled_resources)
sys.stdout.write(
'Removing old resources from configuration and upstream'
'(if possible)\n'
)
for idx, resource_name in enumerate(sorted(handled_resources)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Removing resource \'{resource_name}\''.format(
idx + 1,
non_handled_resources,
pad=len(str(non_handled_resources)),
resource_name=resource_name,
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'delete',
['-r', resource_name],
tx_root
)
handled_resources.remove(resource_name)
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
finally:
if cfg.has_section(resource_name):
cfg.remove_section(resource_name)
sys.stdout.write('\n')
time.sleep(0.025)
cfg.write(open(tx_config, 'w'))
sys.stdout.write('\n')
# Set the translations file type we're using
txclib.utils.exec_command('set', ['-t', 'PO'], tx_root)
time.sleep(0.025)
if 'TRANSIFEX_NO_PUSH' not in os.environ:
sys.stdout.write('\n')
sys.stdout.write('Pushing translation template files...\n')
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Pushing resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'push',
'--resource salt.{resource_name} '
'--source '
'--skip '
'--no-interactive'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir
).split(),
tx_root
)
sys.stdout.write('\n')
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
sys.stdout.write('=' * 80)
sys.stdout.write(
'\nDon\'t forget to delete the following remote resources:\n')
for resource_name in sorted(handled_resources):
sys.stdout.write(' {0}\n'.format(resource_name))
sys.stdout.write('=' * 80)
sys.stdout.write('\nDONE\n')
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load diff

View file

@ -9,29 +9,14 @@ BUILDDIR = _build
SPHINXLANG = SPHINXLANG =
XELATEX = xelatex XELATEX = xelatex
# ----- Translations Support ------------------------------------------------>
# If language is set, also set translation options
ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0)
TRANSLATIONOPTS = -D language='$(SPHINXLANG)'
else
TRANSLATIONOPTS =
endif
# Reset settings if sphinx-intl is not available
ifeq ($(shell which sphinx-intl >/dev/null 2>&1; echo $$?), 1)
SPHINXLANG =
TRANSLATIONOPTS =
endif
# <---- Translations Support -------------------------------------------------
# Internal variables. # Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS) $(SPHINXOPTS) . ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others # the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations .PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
help: help:
@echo "Please use \`make <target>' where <target> is one of" @echo "Please use \`make <target>' where <target> is one of"
@ -53,7 +38,6 @@ help:
@echo " man to make manual pages" @echo " man to make manual pages"
@echo " texinfo to make Texinfo files" @echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo" @echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items" @echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files" @echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " pseudoxml to make pseudoxml-XML files for display purposes"
@ -68,38 +52,38 @@ clean:
check_sphinx-build: check_sphinx-build:
@which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://www.sphinx-doc.org/en/master/)" >&2; false) @which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://www.sphinx-doc.org/en/master/)" >&2; false)
html: check_sphinx-build translations html: check_sphinx-build
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo @echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html." @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml: check_sphinx-build translations dirhtml: check_sphinx-build
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo @echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml: check_sphinx-build translations singlehtml: check_sphinx-build
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo @echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle: check_sphinx-build translations pickle: check_sphinx-build
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo @echo
@echo "Build finished; now you can process the pickle files." @echo "Build finished; now you can process the pickle files."
json: check_sphinx-build translations json: check_sphinx-build
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo @echo
@echo "Build finished; now you can process the JSON files." @echo "Build finished; now you can process the JSON files."
htmlhelp: check_sphinx-build translations htmlhelp: check_sphinx-build
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo @echo
@echo "Build finished; now you can run HTML Help Workshop with the" \ @echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp." ".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp: check_sphinx-build translations qthelp: check_sphinx-build
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo @echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
@ -108,7 +92,7 @@ qthelp: check_sphinx-build translations
@echo "To view the help file:" @echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc"
devhelp: check_sphinx-build translations devhelp: check_sphinx-build
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo @echo
@echo "Build finished." @echo "Build finished."
@ -117,31 +101,31 @@ devhelp: check_sphinx-build translations
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
@echo "# devhelp" @echo "# devhelp"
epub: check_sphinx-build translations epub: check_sphinx-build
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo @echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub." @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex: check_sphinx-build translations latex: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo @echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \ @echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)." "(use \`make latexpdf' here to do that automatically)."
latexpdf: check_sphinx-build translations latexpdf: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..." @echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf $(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja: check_sphinx-build translations latexpdfja: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..." @echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
pdf: check_sphinx-build translations pdf: check_sphinx-build
@if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \ @if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \
echo "The '$(XELATEX)' command was not found."; \ echo "The '$(XELATEX)' command was not found."; \
fi fi
@ -150,40 +134,35 @@ pdf: check_sphinx-build translations
$(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet" $(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet"
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex." @echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
cheatsheet: translations cheatsheet:
@echo "Running cheatsheet/salt.tex file through xelatex..." @echo "Running cheatsheet/salt.tex file through xelatex..."
cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf
@echo "./salt-cheatsheet.pdf created." @echo "./salt-cheatsheet.pdf created."
text: check_sphinx-build translations text: check_sphinx-build
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo @echo
@echo "Build finished. The text files are in $(BUILDDIR)/text." @echo "Build finished. The text files are in $(BUILDDIR)/text."
man: check_sphinx-build translations man: check_sphinx-build
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo @echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man." @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo: check_sphinx-build translations texinfo: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo @echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \ @echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)." "(use \`make info' here to do that automatically)."
info: check_sphinx-build translations info: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..." @echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext: check_sphinx-build changes: check_sphinx-build
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale"
changes: check_sphinx-build translations
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo @echo
@echo "The overview file is in $(BUILDDIR)/changes." @echo "The overview file is in $(BUILDDIR)/changes."
@ -205,27 +184,12 @@ doctest: check_sphinx-build
@echo "Testing of doctests in the sources finished, look at the " \ @echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt." "results in $(BUILDDIR)/doctest/output.txt."
xml: check_sphinx-build translations xml: check_sphinx-build
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo @echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml." @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml: check_sphinx-build translations pseudoxml: check_sphinx-build
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo @echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
translations:
@if [ "$(SPHINXLANG)" = "en" ] || [ "x$(SPHINXLANG)" = "x" ]; then \
echo "No need to update translations. Skipping..."; \
elif [ ! -d locale/$(SPHINXLANG) ]; then \
echo "The locale directory for $(SPHINXLANG) does not exist"; \
exit 1; \
else \
echo "Compiling exising message catalog for '$(SPHINXLANG)'"; \
.scripts/compile-translation-catalogs; \
fi
download-translations:
@echo "Downloading $(SPHINXLANG) translations"
.scripts/download-translation-catalog $(SPHINXLANG)

24
doc/_ext/saltrepo.py Normal file
View file

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
'''
saltrepo
~~~~~~~~
SaltStack Repository Sphinx directives
'''
def source_read_handler(app, docname, source):
if '|repo_primary_branch|' in source[0]:
source[0] = source[0].replace(
'|repo_primary_branch|',
app.config.html_context['repo_primary_branch']
)
def setup(app):
app.connect('source-read', source_read_handler)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}

View file

@ -264,20 +264,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script> <script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="http://t6.trackalyzer.com/trackalyze.js"></script> <script type="text/javascript" language="javascript" src="http://t6.trackalyzer.com/trackalyze.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %} {% endif %}
</body> </body>
</html> </html>

View file

@ -52,7 +52,8 @@
SEARCH_CX: '{{ search_cx }}', SEARCH_CX: '{{ search_cx }}',
COLLAPSE_INDEX: false, COLLAPSE_INDEX: false,
FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}', FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: '{{ has_source|lower }}' HAS_SOURCE: '{{ has_source|lower }}',
REPO_PRIMARY_BRANCH_TAB_NAME: '{{ repo_primary_branch | capitalize }}'
}; };
</script> </script>
{%- for scriptfile in script_files %} {%- for scriptfile in script_files %}
@ -178,15 +179,18 @@
<div class="row"> <div class="row">
<div class="col-sm-12 col-md-11 col-md-offset-1 col-lg-10 col-lg-offset-1"> <div class="col-sm-12 col-md-11 col-md-offset-1 col-lg-10 col-lg-offset-1">
<nav class="pull-left text-muted"> <nav class="pull-left text-muted">
<a href="https://github.com/saltstack/salt/edit/develop/doc/{{pagename}}.rst">Edit on GitHub</a> <a href="https://github.com/saltstack/salt/edit/{{ repo_primary_branch }}/doc/{{pagename}}.rst">Edit on GitHub</a>
</nav> </nav>
<nav id="localnav"> <nav id="localnav">
<ul class="nav navbar-nav"> <ul class="nav navbar-nav">
{%- block relbar_small %}{{ relbar() }}{% endblock %} {%- block relbar_small %}{{ relbar() }}{% endblock %}
{% if not (build_type == "develop" or build_type == "next") and on_saltstack %} {% if not (build_type == repo_primary_branch or build_type == "next") and on_saltstack %}
<li><a class="icon-dl" href="/en/pdf/Salt-{{ release }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li> <li><a class="icon-dl" href="/en/pdf/Salt-{{ release }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
<li><a class="icon-dl" href="/en/epub/Salt-{{ release }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li> <li><a class="icon-dl" href="/en/epub/Salt-{{ release }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
{% elif build_type == repo_primary_branch and on_saltstack %}
<li><a class="icon-dl" href="/en/pdf/Salt-{{ repo_primary_branch }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
<li><a class="icon-dl" href="/en/epub/Salt-{{ repo_primary_branch }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
{% endif %} {% endif %}
</ul> </ul>
</nav> </nav>
@ -202,9 +206,9 @@
<div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> These release notes are for an old release of Salt. This release might contain known security and other issues that are fixed in the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Release notes for the latest release" href="{{ release }}.html">latest release</a>.</div> <div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> These release notes are for an old release of Salt. This release might contain known security and other issues that are fixed in the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Release notes for the latest release" href="{{ release }}.html">latest release</a>.</div>
{% elif build_type == "develop" and on_saltstack %} {% elif build_type == repo_primary_branch and on_saltstack %}
<div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> You are viewing docs from the develop branch, some of these features are not yet released.</div> <div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> You are viewing docs from the {{ repo_primary_branch }} branch, some of these features are not yet released.</div>
{% elif build_type == "inactive" and on_saltstack %} {% elif build_type == "inactive" and on_saltstack %}
@ -244,16 +248,16 @@
<p><i>{{ today }}</i></p> <p><i>{{ today }}</i></p>
{% if build_type == "latest" %} {% if build_type == "latest" %}
<p>You are viewing docs for the latest stable release, {{ latest_release }}. Switch to docs for the previous stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the previous stable release" href="/en/{{ previous_release_dir }}/">{{ previous_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p> <p>You are viewing docs for the latest stable release, {{ latest_release }}. Switch to docs for the previous stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the previous stable release" href="/en/{{ previous_release_dir }}/">{{ previous_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "previous" %} {% elif build_type == "previous" %}
<p>You are viewing docs for the previous stable release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p> <p>You are viewing docs for the previous stable release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "inactive" %} {% elif build_type == "inactive" %}
<p>You are viewing docs for an inactive release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p> <p>You are viewing docs for an inactive release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "develop" %} {% elif build_type == repo_primary_branch %}
<p>You are viewing docs built from a recent snapshot of the develop branch. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p> <p>You are viewing docs built from a recent snapshot of the {{ repo_primary_branch }} branch. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
{% elif build_type == "next" %} {% elif build_type == "next" %}
<p>You are viewing preview docs for the next major release, {{ next_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p> <p>You are viewing preview docs for the next major release, {{ next_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
@ -285,12 +289,12 @@
<!-- <a class="ss-logo" href="http://saltstack.com"><img width="250" height="63" class="nolightbox sidebar-logo" src="{{ pathto('_static/images/saltstack_logo.svg', 1) }}"></a> <!-- <a class="ss-logo" href="http://saltstack.com"><img width="250" height="63" class="nolightbox sidebar-logo" src="{{ pathto('_static/images/saltstack_logo.svg', 1) }}"></a>
{% if on_saltstack %} {% if on_saltstack %}
{#
{% if [True, False]|random %} {% if [True, False]|random %}
<a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a> <a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
{% else %} {% else %}
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a> <a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
{% endif %}--> {% endif %} #}-->
<a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-menu-ad-250x63.jpg"/></a> <a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-menu-ad-250x63.jpg"/></a>
@ -304,11 +308,11 @@
<div class="releaselinks versions {{ build_type }}"> <div class="releaselinks versions {{ build_type }}">
<a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{%- endif %}</a> <a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
<a class="btn btn-secondary{% if build_type == "latest" %} active{% endif %}" id="latest"{% if build_type == "latest" %} title="View release notes"{% else %} title="Switch to docs for the latest stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/latest/">{{ latest_release }}{% if build_type == "latest" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a> <a class="btn btn-secondary{% if build_type == "latest" %} active{% endif %}" id="latest"{% if build_type == "latest" %} title="View release notes"{% else %} title="Switch to docs for the latest stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/latest/">{{ latest_release }}{% if build_type == "latest" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
<a class="btn btn-secondary{% if build_type == "develop" %} active{% endif %}" id="develop"{% if build_type == "develop" %} title="View all release notes"{% endif %} title="Switch to docs built recently from the develop branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/develop/">Develop{% if build_type == "develop" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a> <a class="btn btn-secondary{% if build_type == repo_primary_branch %} active{% endif %}" id="{{ repo_primary_branch }}"{% if build_type == repo_primary_branch %} title="View all release notes"{% endif %} title="Switch to docs built recently from the {{ repo_primary_branch }} branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch | capitalize }}{% if build_type == repo_primary_branch %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
</div> </div>
@ -376,20 +380,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script> <script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="https://trackalyzer.com/trackalyze_secure.js"></script> <script type="text/javascript" language="javascript" src="https://trackalyzer.com/trackalyze_secure.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %} {% endif %}
</body> </body>
</html> </html>

View file

@ -125,7 +125,7 @@ $( document ).ready(function() {
window.location.href = window.location.href.replace($currentVer.attr("href"), clickedVer); window.location.href = window.location.href.replace($currentVer.attr("href"), clickedVer);
} }
else { else {
if ($currentVer.text().indexOf("Develop") == -1) { if ($currentVer.text().indexOf(DOCUMENTATION_OPTIONS.REPO_PRIMARY_BRANCH_TAB_NAME) == -1) {
window.location.href = clickedVer + "topics/releases/" + $currentVer.text().trim() + ".html"; window.location.href = clickedVer + "topics/releases/" + $currentVer.text().trim() + ".html";
} }
else window.location.href = clickedVer + "topics/releases/"; else window.location.href = clickedVer + "topics/releases/";

View file

@ -253,9 +253,9 @@ intersphinx_mapping = {
on_saltstack = 'SALT_ON_SALTSTACK' in os.environ on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
project = 'Salt' project = 'Salt'
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
version = salt.version.__version__ version = salt.version.__version__
latest_release = '2019.2.1' # latest release latest_release = '2019.2.2' # latest release
previous_release = '2018.3.4' # latest release from previous branch previous_release = '2018.3.4' # latest release from previous branch
previous_release_dir = '2018.3' # path on web server for previous branch previous_release_dir = '2018.3' # path on web server for previous branch
next_release = '' # next release next_release = '' # next release
@ -268,22 +268,31 @@ if on_saltstack:
copyright = time.strftime("%Y") copyright = time.strftime("%Y")
# < --- START do not merge these settings to other branches START ---> # # < --- START do not merge these settings to other branches START ---> #
build_type = 'latest' # latest, previous, develop, next build_type = repo_primary_branch # latest, previous, master, next
release = latest_release
# < --- END do not merge these settings to other branches END ---> # # < --- END do not merge these settings to other branches END ---> #
# Set google custom search engine # Set google custom search engine
if release == latest_release: if build_type == repo_primary_branch:
release = latest_release
search_cx = '011515552685726825874:v1had6i279q' # master
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == 'next':
release = next_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif build_type == 'previous':
release = previous_release
if release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
else:
search_cx = '011515552685726825874:ht0p8miksrm' # latest
else: # latest or something else
release = latest_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
else:
search_cx = '011515552685726825874:x17j5zl74g8' # develop
needs_sphinx = '1.3' needs_sphinx = '1.3'
@ -306,6 +315,7 @@ extensions = [
'sphinx.ext.intersphinx', 'sphinx.ext.intersphinx',
'httpdomain', 'httpdomain',
'youtube', 'youtube',
'saltrepo'
#'saltautodoc', # Must be AFTER autodoc #'saltautodoc', # Must be AFTER autodoc
#'shorturls', #'shorturls',
] ]
@ -364,7 +374,7 @@ rst_prolog = """\
# A shortcut for linking to tickets on the GitHub issue tracker # A shortcut for linking to tickets on the GitHub issue tracker
extlinks = { extlinks = {
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % 'develop', None), 'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'), 'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'), 'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
'formula_url': ('https://github.com/saltstack-formulas/%s', ''), 'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
@ -436,6 +446,7 @@ html_context = {
'build_type': build_type, 'build_type': build_type,
'today': today, 'today': today,
'copyright': copyright, 'copyright': copyright,
'repo_primary_branch': repo_primary_branch
} }
html_use_index = True html_use_index = True

View file

@ -392387,7 +392387,7 @@ Rbenv will be installed automatically the first time it is needed and can be
updated later. This module will \fInot\fP automatically install packages which rbenv updated later. This module will \fInot\fP automatically install packages which rbenv
will need to compile the versions of ruby. If your version of ruby fails to will need to compile the versions of ruby. If your version of ruby fails to
install, refer to the ruby\-build documentation to verify you are not missing any install, refer to the ruby\-build documentation to verify you are not missing any
dependencies: \fI\%https://github.com/sstephenson/ruby\-build/wiki\fP dependencies: \fI\%https://github.com/rbenv/ruby\-build/wiki\fP
.sp .sp
If rbenv is run as the root user then it will be installed to /usr/local/rbenv, If rbenv is run as the root user then it will be installed to /usr/local/rbenv,
otherwise it will be installed to the users ~/.rbenv directory. To make otherwise it will be installed to the users ~/.rbenv directory. To make

View file

@ -866,9 +866,8 @@ Default: ``zeromq``
Changes the underlying transport layer. ZeroMQ is the recommended transport Changes the underlying transport layer. ZeroMQ is the recommended transport
while additional transport layers are under development. Supported values are while additional transport layers are under development. Supported values are
``zeromq``, ``raet`` (experimental), and ``tcp`` (experimental). This setting has ``zeromq`` and ``tcp`` (experimental). This setting has a significant impact on
a significant impact on performance and should not be changed unless you know performance and should not be changed unless you know what you are doing!
what you are doing!
.. code-block:: yaml .. code-block:: yaml

View file

@ -769,6 +769,30 @@ Statically assigns grains to the minion.
cabinet: 13 cabinet: 13
cab_u: 14-15 cab_u: 14-15
.. conf_minion:: grains_blacklist
``grains_blacklist``
--------------------
Default: ``[]``
Each grains key will be compared against each of the expressions in this list.
Any keys which match will be filtered from the grains. Exact matches, glob
matches, and regular expressions are supported.
.. note::
Some states and execution modules depend on grains. Filtering may cause
them to be unavailable or run unreliably.
.. versionadded:: Neon
.. code-block:: yaml
grains_blacklist:
- cpu_flags
- zmq*
- ipv[46]
.. conf_minion:: grains_cache .. conf_minion:: grains_cache
``grains_cache`` ``grains_cache``
@ -893,6 +917,20 @@ minion. Since this grain is expensive, it is disabled by default.
iscsi_grains: True iscsi_grains: True
.. conf_minion:: nvme_grains
``nvme_grains``
------------------------
Default: ``False``
The ``nvme_grains`` setting will enable the ``nvme_nqn`` grain on the
minion. Since this grain is expensive, it is disabled by default.
.. code-block:: yaml
nvme_grains: True
.. conf_minion:: mine_enabled .. conf_minion:: mine_enabled
``mine_enabled`` ``mine_enabled``
@ -1326,7 +1364,7 @@ creates a new connection for every return to the master.
Default: ``ipc`` Default: ``ipc``
Windows platforms lack POSIX IPC and must rely on slower TCP based inter- Windows platforms lack POSIX IPC and must rely on slower TCP based inter-
process communications. Set ipc_mode to ``tcp`` on such systems. process communications. ``ipc_mode`` is set to ``tcp`` on such systems.
.. code-block:: yaml .. code-block:: yaml
@ -1367,9 +1405,8 @@ Default: ``zeromq``
Changes the underlying transport layer. ZeroMQ is the recommended transport Changes the underlying transport layer. ZeroMQ is the recommended transport
while additional transport layers are under development. Supported values are while additional transport layers are under development. Supported values are
``zeromq``, ``raet`` (experimental), and ``tcp`` (experimental). This setting has ``zeromq`` and ``tcp`` (experimental). This setting has a significant impact
a significant impact on performance and should not be changed unless you know on performance and should not be changed unless you know what you are doing!
what you are doing!
.. code-block:: yaml .. code-block:: yaml

View file

@ -11,7 +11,6 @@ engine modules
:template: autosummary.rst.tmpl :template: autosummary.rst.tmpl
docker_events docker_events
hipchat
http_logstash http_logstash
ircbot ircbot
junos_syslog junos_syslog

View file

@ -1,6 +0,0 @@
salt.engines.hipchat module
===========================
.. automodule:: salt.engines.hipchat
:members:
:undoc-members:

View file

@ -44,8 +44,8 @@ will be skipped if the ``update`` function is run on the fileserver.
Backends for the fileserver are located in `salt/fileserver/`_ (the files not Backends for the fileserver are located in `salt/fileserver/`_ (the files not
named ``__init__.py``). named ``__init__.py``).
.. _`salt/fileserver/__init__.py`: https://github.com/saltstack/salt/tree/develop/salt/fileserver/__init__.py .. _`salt/fileserver/__init__.py`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/fileserver/__init__.py
.. _`salt/fileserver/`: https://github.com/saltstack/salt/tree/develop/salt/fileserver .. _`salt/fileserver/`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/fileserver
Fileclient Fileclient
---------- ----------

View file

@ -171,7 +171,6 @@ execution modules
hashutil hashutil
heat heat
hg hg
hipchat
hosts hosts
http http
ifttt ifttt
@ -362,7 +361,6 @@ execution modules
qemu_nbd qemu_nbd
quota quota
rabbitmq rabbitmq
raet_publish
rallydev rallydev
random_org random_org
rbac_solaris rbac_solaris

View file

@ -1,6 +0,0 @@
====================
salt.modules.hipchat
====================
.. automodule:: salt.modules.hipchat
:members:

View file

@ -1,6 +0,0 @@
=========================
salt.modules.raet_publish
=========================
.. automodule:: salt.modules.raet_publish
:members:

View file

@ -298,7 +298,7 @@ prevent loading if dependencies are not met.
Since ``__virtual__`` is called before the module is loaded, ``__salt__`` will Since ``__virtual__`` is called before the module is loaded, ``__salt__`` will
be unreliable as not all modules will be available at this point in time. The be unreliable as not all modules will be available at this point in time. The
``__pillar`` and ``__grains__`` :ref:`"dunder" dictionaries <dunder-dictionaries>` ``__pillar__`` and ``__grains__`` :ref:`"dunder" dictionaries <dunder-dictionaries>`
are available however. are available however.
.. note:: .. note::

View file

@ -19,7 +19,6 @@ returner modules
elasticsearch_return elasticsearch_return
etcd_return etcd_return
highstate_return highstate_return
hipchat_return
influxdb_return influxdb_return
kafka_return kafka_return
librato_return librato_return

View file

@ -1,6 +0,0 @@
=============================
salt.returners.hipchat_return
=============================
.. automodule:: salt.returners.hipchat_return
:members:

View file

@ -383,4 +383,4 @@ Full List of Returners
.. toctree:: .. toctree::
all/index all/index
.. _`redis`: https://github.com/saltstack/salt/tree/develop/salt/returners/redis_return.py .. _`redis`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/returners/redis_return.py

View file

@ -3,7 +3,7 @@ salt.runners.mattermost
**Note for 2017.7 releases!** **Note for 2017.7 releases!**
Due to the `salt.runners.config <https://github.com/saltstack/salt/blob/develop/salt/runners/config.py>`_ module not being available in this release series, importing the `salt.runners.config <https://github.com/saltstack/salt/blob/develop/salt/runners/config.py>`_ module from the develop branch is required to make this module work. Due to the `salt.runners.config <https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/runners/config.py>`_ module not being available in this release series, importing the `salt.runners.config <https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/runners/config.py>`_ module from the |repo_primary_branch| branch is required to make this module work.
Ref: `Mattermost runner failing to retrieve config values due to unavailable config runner #43479 <https://github.com/saltstack/salt/issues/43479>`_ Ref: `Mattermost runner failing to retrieve config values due to unavailable config runner #43479 <https://github.com/saltstack/salt/issues/43479>`_

View file

@ -14,5 +14,6 @@ serializer modules
json json
msgpack msgpack
python python
toml
yaml yaml
yamlex yamlex

View file

@ -0,0 +1,6 @@
=====================
salt.serializers.toml
=====================
.. automodule:: salt.serializers.toml
:members:

View file

@ -115,7 +115,6 @@ state modules
group group
heat heat
hg hg
hipchat
host host
http http
icinga2 icinga2
@ -255,6 +254,7 @@ state modules
rvm rvm
salt_proxy salt_proxy
saltmod saltmod
saltutil
schedule schedule
selinux selinux
serverdensity_device serverdensity_device

View file

@ -1,6 +0,0 @@
===================
salt.states.hipchat
===================
.. automodule:: salt.states.hipchat
:members:

View file

@ -0,0 +1,6 @@
====================
salt.states.saltutil
====================
.. automodule:: salt.states.saltutil
:members:

View file

@ -5,10 +5,10 @@ Ordering States
=============== ===============
The way in which configuration management systems are executed is a hotly The way in which configuration management systems are executed is a hotly
debated topic in the configuration management world. Two debated topic in the configuration management world. Two major philosophies
major philosophies exist on the subject, to either execute in an imperative exist on the subject, to either execute in an imperative fashion where things
fashion where things are executed in the order in which they are defined, or are executed in the order in which they are defined, or in a declarative
in a declarative fashion where dependencies need to be mapped between objects. fashion where dependencies need to be mapped between objects.
Imperative ordering is finite and generally considered easier to write, but Imperative ordering is finite and generally considered easier to write, but
declarative ordering is much more powerful and flexible but generally considered declarative ordering is much more powerful and flexible but generally considered
@ -27,20 +27,17 @@ State Auto Ordering
.. versionadded: 0.17.0 .. versionadded: 0.17.0
Salt always executes states in a finite manner, meaning that they will always Salt always executes states in a finite manner, meaning that they will always
execute in the same order regardless of the system that is executing them. execute in the same order regardless of the system that is executing them. This
But in Salt 0.17.0, the ``state_auto_order`` option was added. This option evaluation order makes it easy to know what order the states will be executed in,
makes states get evaluated in the order in which they are defined in sls but it is important to note that the requisite system will override the ordering
files, including the top.sls file. defined in the files, and the ``order`` option, described below, will also
override the order in which states are executed.
The evaluation order makes it easy to know what order the states will be This ordering system can be disabled in preference of lexicographic (classic)
executed in, but it is important to note that the requisite system will ordering by setting the ``state_auto_order`` option to ``False`` in the master
override the ordering defined in the files, and the ``order`` option described configuration file. Otherwise, ``state_auto_order`` defaults to ``True``.
below will also override the order in which states are defined in sls files.
If the classic ordering is preferred (lexicographic), then set
``state_auto_order`` to ``False`` in the master configuration file. Otherwise,
``state_auto_order`` defaults to ``True``.
How compiler ordering is managed is described further in :ref:`compiler-ordering`.
.. _ordering_requisites: .. _ordering_requisites:

View file

@ -21,7 +21,7 @@ illustrate:
.. code-block:: yaml .. code-block:: yaml
/etc/salt/master: # maps to "name", unless a "name" argument is specified below /etc/salt/master: # maps to "name", unless a "name" argument is specified below
file.managed: # maps to <filename>.<function> - e.g. "managed" in https://github.com/saltstack/salt/tree/develop/salt/states/file.py file.managed: # maps to <filename>.<function> - e.g. "managed" in https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/states/file.py
- user: root # one of many options passed to the manage function - user: root # one of many options passed to the manage function
- group: root - group: root
- mode: 644 - mode: 644
@ -59,7 +59,7 @@ A well-written state function will follow these steps:
This is an extremely simplified example. Feel free to browse the `source This is an extremely simplified example. Feel free to browse the `source
code`_ for Salt's state modules to see other examples. code`_ for Salt's state modules to see other examples.
.. _`source code`: https://github.com/saltstack/salt/tree/develop/salt/states .. _`source code`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/states
1. Set up the return dictionary and perform any necessary input validation 1. Set up the return dictionary and perform any necessary input validation
(type checking, looking for use of mutually-exclusive arguments, etc.). (type checking, looking for use of mutually-exclusive arguments, etc.).

View file

@ -31,7 +31,7 @@ imports, to decide whether to load the module. In most cases, it will return a
filename, then that name should be returned instead of ``True``. An example of filename, then that name should be returned instead of ``True``. An example of
this may be seen in the Azure module: this may be seen in the Azure module:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/msazure.py https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/msazure.py
The get_configured_provider() Function The get_configured_provider() Function
-------------------------------------- --------------------------------------
@ -57,7 +57,7 @@ created by the cloud host, wait for it to become available, and then
A good example to follow for writing a cloud driver module based on libcloud A good example to follow for writing a cloud driver module based on libcloud
is the module provided for Linode: is the module provided for Linode:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/linode.py https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/linode.py
The basic flow of a ``create()`` function is as follows: The basic flow of a ``create()`` function is as follows:
@ -183,7 +183,7 @@ imports should be absent from the Salt Cloud module.
A good example of a non-libcloud driver is the DigitalOcean driver: A good example of a non-libcloud driver is the DigitalOcean driver:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/digitalocean.py https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/digitalocean.py
The ``create()`` Function The ``create()`` Function
------------------------- -------------------------

View file

@ -89,7 +89,7 @@ functions include:
A good, well commented example of this process is the Fedora deployment A good, well commented example of this process is the Fedora deployment
script: script:
https://github.com/saltstack/salt/blob/develop/salt/cloud/deploy/Fedora.sh https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/cloud/deploy/Fedora.sh
A number of legacy deploy scripts are included with the release tarball. None A number of legacy deploy scripts are included with the release tarball. None
of them are as functional or complete as Salt Bootstrap, and are still included of them are as functional or complete as Salt Bootstrap, and are still included

View file

@ -78,12 +78,12 @@ Fork a Repo Guide_>`_ and is well worth reading.
git fetch upstream git fetch upstream
git checkout -b fix-broken-thing upstream/2016.11 git checkout -b fix-broken-thing upstream/2016.11
If you're working on a feature, create your branch from the develop branch. If you're working on a feature, create your branch from the |repo_primary_branch| branch.
.. code-block:: bash .. code-block:: bash
git fetch upstream git fetch upstream
git checkout -b add-cool-feature upstream/develop git checkout -b add-cool-feature upstream/|repo_primary_branch|
#. Edit and commit changes to your branch. #. Edit and commit changes to your branch.
@ -149,7 +149,7 @@ Fork a Repo Guide_>`_ and is well worth reading.
.. code-block:: bash .. code-block:: bash
git fetch upstream git fetch upstream
git rebase upstream/develop add-cool-feature git rebase upstream/|repo_primary_branch| add-cool-feature
git push -u origin add-cool-feature git push -u origin add-cool-feature
If you do rebase, and the push is rejected with a If you do rebase, and the push is rejected with a
@ -185,9 +185,9 @@ Fork a Repo Guide_>`_ and is well worth reading.
https://github.com/my-account/salt/compare/saltstack:2016.11...fix-broken-thing https://github.com/my-account/salt/compare/saltstack:2016.11...fix-broken-thing
If your branch is a feature, choose ``develop`` as the base branch, If your branch is a feature, choose ``|repo_primary_branch|`` as the base branch,
https://github.com/my-account/salt/compare/saltstack:develop...add-cool-feature https://github.com/my-account/salt/compare/saltstack:|repo_primary_branch|...add-cool-feature
#. Review that the proposed changes are what you expect. #. Review that the proposed changes are what you expect.
#. Write a descriptive comment. Include links to related issues (e.g. #. Write a descriptive comment. Include links to related issues (e.g.
@ -219,10 +219,10 @@ Fork a Repo Guide_>`_ and is well worth reading.
Salt's Branch Topology Salt's Branch Topology
---------------------- ----------------------
There are three different kinds of branches in use: develop, main release There are three different kinds of branches in use: |repo_primary_branch|, main release
branches, and dot release branches. branches, and dot release branches.
- All feature work should go into the ``develop`` branch. - All feature work should go into the ``|repo_primary_branch|`` branch.
- Bug fixes and documentation changes should go into the oldest **supported - Bug fixes and documentation changes should go into the oldest **supported
main** release branch affected by the the bug or documentation change (you main** release branch affected by the the bug or documentation change (you
can use the blame button in github to figure out when the bug was introduced). can use the blame button in github to figure out when the bug was introduced).
@ -236,22 +236,22 @@ branches, and dot release branches.
.. note:: .. note::
GitHub will open pull requests against Salt's main branch, ``develop``, GitHub will open pull requests against Salt's main branch, ``|repo_primary_branch|``,
by default. Be sure to check which branch is selected when creating the by default. Be sure to check which branch is selected when creating the
pull request. pull request.
The Develop Branch The |repo_primary_branch| Branch
================== ================================
The ``develop`` branch is unstable and bleeding-edge. Pull requests containing The ``|repo_primary_branch|`` branch is unstable and bleeding-edge. Pull requests containing
feature additions or non-bug-fix changes should be made against the ``develop`` feature additions or non-bug-fix changes should be made against the ``|repo_primary_branch|``
branch. branch.
.. note:: .. note::
If you have a bug fix or documentation change and have already forked your If you have a bug fix or documentation change and have already forked your
working branch from ``develop`` and do not know how to rebase your commits working branch from ``|repo_primary_branch|`` and do not know how to rebase your commits
against another branch, then submit it to ``develop`` anyway. SaltStack's against another branch, then submit it to ``|repo_primary_branch|`` anyway. SaltStack's
development team will be happy to back-port it to the correct branch. development team will be happy to back-port it to the correct branch.
**Please make sure you let the maintainers know that the pull request needs **Please make sure you let the maintainers know that the pull request needs
@ -298,7 +298,7 @@ automatically be "merged-forward" into the newer branches.
For example, a pull request is merged into ``2017.7``. Then, the entire For example, a pull request is merged into ``2017.7``. Then, the entire
``2017.7`` branch is merged-forward into the ``2018.3`` branch, and the ``2017.7`` branch is merged-forward into the ``2018.3`` branch, and the
``2018.3`` branch is merged-forward into the ``develop`` branch. ``2018.3`` branch is merged-forward into the ``|repo_primary_branch|`` branch.
This process makes is easy for contributors to make only one pull-request This process makes is easy for contributors to make only one pull-request
against an older branch, but allows the change to propagate to all **main** against an older branch, but allows the change to propagate to all **main**
@ -338,7 +338,7 @@ message text directly. Only including the text in a pull request will not
close the issue. close the issue.
GitHub will close the referenced issue once the *commit* containing the GitHub will close the referenced issue once the *commit* containing the
magic text is merged into the default branch (``develop``). Any magic text magic text is merged into the default branch (``|repo_primary_branch|``). Any magic text
input only into the pull request description will not be seen at the input only into the pull request description will not be seen at the
Git-level when those commits are merged-forward. In other words, only the Git-level when those commits are merged-forward. In other words, only the
commits are merged-forward and not the pull request text. commits are merged-forward and not the pull request text.
@ -348,7 +348,7 @@ commits are merged-forward and not the pull request text.
Backporting Pull Requests Backporting Pull Requests
========================= =========================
If a bug is fixed on ``develop`` and the bug is also present on a If a bug is fixed on ``|repo_primary_branch|`` and the bug is also present on a
currently-supported release branch, it will need to be back-ported to an currently-supported release branch, it will need to be back-ported to an
applicable branch. applicable branch.
@ -363,11 +363,11 @@ applicable branch.
changes themselves. changes themselves.
It is often easiest to fix a bug on the oldest supported release branch and It is often easiest to fix a bug on the oldest supported release branch and
then merge that branch forward into ``develop`` (as described earlier in this then merge that branch forward into ``|repo_primary_branch|`` (as described earlier in this
document). When that is not possible the fix must be back-ported, or copied, document). When that is not possible the fix must be back-ported, or copied,
into any other affected branches. into any other affected branches.
These steps assume a pull request ``#1234`` has been merged into ``develop``. These steps assume a pull request ``#1234`` has been merged into ``|repo_primary_branch|``.
And ``upstream`` is the name of the remote pointing to the main Salt repo. And ``upstream`` is the name of the remote pointing to the main Salt repo.
#. Identify the oldest supported release branch that is affected by the bug. #. Identify the oldest supported release branch that is affected by the bug.
@ -450,20 +450,20 @@ the name of the main `saltstack/salt`_ repository.
git fetch upstream git fetch upstream
#. Update your copy of the ``develop`` branch. #. Update your copy of the ``|repo_primary_branch|`` branch.
.. code-block:: bash .. code-block:: bash
git checkout develop git checkout |repo_primary_branch|
git merge --ff-only upstream/develop git merge --ff-only upstream/|repo_primary_branch|
If Git complains that a fast-forward merge is not possible, you have local If Git complains that a fast-forward merge is not possible, you have local
commits. commits.
* Run ``git pull --rebase origin develop`` to rebase your changes on top of * Run ``git pull --rebase origin |repo_primary_branch|`` to rebase your changes on top of
the upstream changes. the upstream changes.
* Or, run ``git branch <branch-name>`` to create a new branch with your * Or, run ``git branch <branch-name>`` to create a new branch with your
commits. You will then need to reset your ``develop`` branch before commits. You will then need to reset your ``|repo_primary_branch|`` branch before
updating it with the changes from upstream. updating it with the changes from upstream.
If Git complains that local files will be overwritten, you have changes to If Git complains that local files will be overwritten, you have changes to
@ -474,7 +474,7 @@ the name of the main `saltstack/salt`_ repository.
.. code-block:: bash .. code-block:: bash
git push origin develop git push origin |repo_primary_branch|
#. Repeat the previous two steps for any other branches you work with, such as #. Repeat the previous two steps for any other branches you work with, such as
the current release branch. the current release branch.
@ -551,6 +551,6 @@ Script, see the Bootstrap Script's `Contributing Guidelines`_.
.. _GPG Probot: https://probot.github.io/apps/gpg/ .. _GPG Probot: https://probot.github.io/apps/gpg/
.. _help articles: https://help.github.com/articles/signing-commits-with-gpg/ .. _help articles: https://help.github.com/articles/signing-commits-with-gpg/
.. _GPG Signature Verification feature announcement: https://github.com/blog/2144-gpg-signature-verification .. _GPG Signature Verification feature announcement: https://github.com/blog/2144-gpg-signature-verification
.. _bootstrap-salt.sh: https://github.com/saltstack/salt/blob/develop/salt/cloud/deploy/bootstrap-salt.sh .. _bootstrap-salt.sh: https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/cloud/deploy/bootstrap-salt.sh
.. _salt-bootstrap repo: https://github.com/saltstack/salt-bootstrap .. _salt-bootstrap repo: https://github.com/saltstack/salt-bootstrap
.. _Contributing Guidelines: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md .. _Contributing Guidelines: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md

Some files were not shown because too many files have changed in this diff Show more