Merge branch '2019.2.1' into do_key_test

This commit is contained in:
Gareth J. Greenaway 2019-07-01 09:39:45 -07:00 committed by GitHub
commit 315eb3562f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
33 changed files with 1115 additions and 281 deletions

View file

@ -1,20 +1,15 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 2
def salt_target_branch = '2019.2.1'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
])
def shell_header
timeout(time: global_timeout, unit: 'HOURS') {
node('docs') {
node('docs') {
timeout(time: global_timeout, unit: 'HOURS') {
ansiColor('xterm') {
timestamps {
try {
@ -44,7 +39,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
pyenv install --skip-existing 3.6.8
pyenv shell 3.6.8
python --version
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
pip install -U nox-py2
nox --version
'''
}

152
.ci/kitchen-amazon2-py2 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

152
.ci/kitchen-amazon2-py3 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '6'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

152
.ci/kitchen-fedora29-py2 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

152
.ci/kitchen-fedora29-py3 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
//// Define the maximum time, in hours, that a test run should run for
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,4 +1,4 @@
//// Define the maximum time, in hours, that a test run should run for
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,5 +1,5 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -36,7 +31,8 @@ timeout(time: global_timeout, unit: 'HOURS') {
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"NOX_PASSTHROUGH_OPTS=--unit",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,5 +1,5 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -36,6 +31,8 @@ timeout(time: global_timeout, unit: 'HOURS') {
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=--unit",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -57,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -79,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

122
.ci/lint
View file

@ -1,20 +1,15 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 3
def salt_target_branch = '2019.2.1'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
])
def shell_header
timeout(time: global_timeout, unit: 'HOURS') {
node('lint') {
node('lint') {
timeout(time: global_timeout, unit: 'HOURS') {
ansiColor('xterm') {
timestamps {
try {
@ -30,35 +25,40 @@ timeout(time: global_timeout, unit: 'HOURS') {
} else {
shell_header = ''
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh shell_header + '''
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
# -M is on by default in git 2.9+
git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" > file-list-status.log
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
(git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
eval "$(pyenv init -)"
pyenv --version
pyenv install --skip-existing 2.7.15
pyenv shell 2.7.15
python --version
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
nox --version
# Create the required virtualenvs in serial
nox --install-only -e lint-salt
nox --install-only -e lint-tests
'''
withEnv(["SALT_TARGET_BRANCH=${salt_target_branch}"]) {
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh shell_header + '''
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
# -M is on by default in git 2.9+
git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" > file-list-status.log
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
(git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
eval "$(pyenv init -)"
pyenv --version
pyenv install --skip-existing 2.7.15
pyenv shell 2.7.15
python --version
pip install -U nox-py2
nox --version
# Create the required virtualenvs in serial
nox --install-only -e lint-salt
nox --install-only -e lint-tests
'''
}
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log,file-list-experiment.log'
}
stage('Lint Changes') {
try {
parallel(
@ -94,17 +94,22 @@ timeout(time: global_timeout, unit: 'HOURS') {
}
)
} finally {
archiveArtifacts artifacts: 'pylint-report-*-chg.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report-*-chg.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
def changed_logs_pattern = 'pylint-report-*-chg.log'
archiveArtifacts artifacts: changed_logs_pattern, allowEmptyArchive: true
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: changed_logs_pattern
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
} else {
recordIssues(enabledForFailure: true, tool: pyLint(pattern: changed_logs_pattern, reportEncoding: 'UTF-8'))
}
}
}
stage('Lint Full') {
@ -146,17 +151,22 @@ timeout(time: global_timeout, unit: 'HOURS') {
}
)
} finally {
archiveArtifacts artifacts: 'pylint-report-*-full.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report-*-full.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
def full_logs_pattern = 'pylint-report-*-full.log'
archiveArtifacts artifacts: full_logs_pattern, allowEmptyArchive: true
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: full_logs_pattern
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
} else {
recordIssues(enabledForFailure: true, tool: pyLint(pattern: full_logs_pattern, reportEncoding: 'UTF-8'))
}
}
}
}

View file

@ -151,7 +151,7 @@
# Store all returns in the given returner.
# Setting this option requires that any returner-specific configuration also
# be set. See various returners in salt/returners for details on required
# configuration values. (See also, event_return_queue below.)
# configuration values. (See also, event_return_queue, and event_return_queue_max_seconds below.)
#
#event_return: mysql
@ -161,6 +161,12 @@
# By default, events are not queued.
#event_return_queue: 0
# In some cases enabling event return queueing can be very helpful, but the bus
# may not busy enough to flush the queue consistently. Setting this to a reasonable
# value (1-30 seconds) will cause the queue to be flushed when the oldest event is older
# than `event_return_queue_max_seconds` regardless of how many events are in the queue.
#event_return_queue_max_seconds: 0
# Only return events matching tags in a whitelist, supports glob matches.
#event_return_whitelist:
# - salt/master/a_tag

View file

@ -4,3 +4,13 @@ In Progress: Salt 2018.3.5 Release Notes
Version 2018.3.5 is an **unreleased** bugfix release for :ref:`2018.3.0 <release-2018-3-0>`.
This release is still in progress and has not been released yet.
Master Configuration Changes
============================
To fix `#53411`_ a new configuration parameter `event_listen_queue_max_seconds` is provided.
When this is set to a value greater than 0 and `event_listen_queue` is not 0, if the oldest event
in the listen queue is older than `event_listen_queue_max_seconds`, the queue will be flushed to
returners regardless of how many events are in the queue.
.. _`#53411`: https://github.com/saltstack/salt/issues/53411

View file

@ -434,14 +434,8 @@ class Beacon(object):
Reset the beacons to defaults
'''
self.opts['beacons'] = {}
comment = 'Beacon Reset'
complete = True
# Fire the complete event back along with updated list of beacons
evt = salt.utils.event.get_event('minion', opts=self.opts)
evt.fire_event({'complete': complete, 'comment': comment,
evt.fire_event({'complete': True, 'comment': 'Beacons have been reset',
'beacons': self.opts['beacons']},
tag='/salt/minion/minion_beacon_reset_complete')
return True

View file

@ -550,6 +550,11 @@ VALID_OPTS = {
# returner specified by 'event_return'
'event_return_queue': int,
# The number of seconds that events can languish in the queue before we flush them.
# The goal here is to ensure that if the bus is not busy enough to reach a total
# `event_return_queue` events won't get stale.
'event_return_queue_max_seconds': int,
# Only forward events to an event returner if it matches one of the tags in this list
'event_return_whitelist': list,

View file

@ -29,6 +29,7 @@ from salt.ext import six
import salt.utils.files
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
log = logging.getLogger(__name__)
__virtualname__ = 'at'

View file

@ -621,15 +621,22 @@ def reset(**kwargs):
eventer = salt.utils.event.get_event('minion', opts=__opts__, listen=True)
res = __salt__['event.fire']({'func': 'reset'}, 'manage_beacons')
if res:
wait = kwargs.get('timeout', default_event_wait)
event_ret = eventer.get_event(
tag='/salt/minion/minion_beacon_reset_complete',
wait=kwargs.get('timeout', default_event_wait))
wait=wait)
if event_ret and event_ret['complete']:
ret['result'] = True
ret['comment'] = 'Beacon configuration reset.'
else:
ret['result'] = False
ret['comment'] = 'Something went wrong.'
if event_ret is None:
ret['result'] = False
ret['comment'] = (
'minion reset event not recieved after {} seconds'
).format(wait)
else:
ret['result'] = False
ret['comment'] = event_ret['comment']
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system

View file

@ -23,6 +23,7 @@ import time
# Import salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.stringutils
from salt.exceptions import SaltInvocationError
from salt.utils.versions import LooseVersion as _LooseVersion
@ -722,8 +723,7 @@ def import_key(text=None,
if filename:
try:
with salt.utils.files.flopen(filename, 'rb') as _fp:
lines = _fp.readlines()
text = ''.join(lines)
text = salt.utils.stringutils.to_unicode(_fp.read())
except IOError:
raise SaltInvocationError('filename does not exist.')
@ -1009,21 +1009,22 @@ def sign(user=None,
gnupg_version = _LooseVersion(gnupg.__version__)
if text:
if gnupg_version >= '1.3.1':
if gnupg_version >= _LooseVersion('1.3.1'):
signed_data = gpg.sign(text, default_key=keyid, passphrase=gpg_passphrase)
else:
signed_data = gpg.sign(text, keyid=keyid, passphrase=gpg_passphrase)
elif filename:
with salt.utils.files.flopen(filename, 'rb') as _fp:
if gnupg_version >= '1.3.1':
if gnupg_version >= _LooseVersion('1.3.1'):
signed_data = gpg.sign(text, default_key=keyid, passphrase=gpg_passphrase)
else:
signed_data = gpg.sign_file(_fp, keyid=keyid, passphrase=gpg_passphrase)
if output:
with salt.utils.files.flopen(output, 'w') as fout:
fout.write(signed_data.data)
fout.write(salt.utils.stringutils.to_bytes(signed_data.data))
else:
raise SaltInvocationError('filename or text must be passed.')
return signed_data.data
@ -1193,7 +1194,7 @@ def encrypt(user=None,
# This version does not allow us to encrypt using the
# file stream # have to read in the contents and encrypt.
with salt.utils.files.flopen(filename, 'rb') as _fp:
_contents = _fp.read()
_contents = salt.utils.stringutils.to_unicode(_fp.read())
result = gpg.encrypt(_contents, recipients, passphrase=gpg_passphrase, output=output)
else:
# This version allows encrypting the file stream

View file

@ -320,12 +320,8 @@ class SaltEvent(object):
sock_dir,
'minion_event_{0}_pull.ipc'.format(id_hash)
)
log.debug(
'{0} PUB socket URI: {1}'.format(self.__class__.__name__, puburi)
)
log.debug(
'{0} PULL socket URI: {1}'.format(self.__class__.__name__, pulluri)
)
log.debug('%s PUB socket URI: %s', self.__class__.__name__, puburi)
log.debug('%s PULL socket URI: %s', self.__class__.__name__, pulluri)
return puburi, pulluri
def subscribe(self, tag=None, match_type=None):
@ -370,9 +366,9 @@ class SaltEvent(object):
with salt.utils.asynchronous.current_ioloop(self.io_loop):
if self.subscriber is None:
self.subscriber = salt.transport.ipc.IPCMessageSubscriber(
self.puburi,
io_loop=self.io_loop
)
self.puburi,
io_loop=self.io_loop
)
try:
self.io_loop.run_sync(
lambda: self.subscriber.connect(timeout=timeout))
@ -382,9 +378,9 @@ class SaltEvent(object):
else:
if self.subscriber is None:
self.subscriber = salt.transport.ipc.IPCMessageSubscriber(
self.puburi,
io_loop=self.io_loop
)
self.puburi,
io_loop=self.io_loop
)
# For the asynchronous case, the connect will be defered to when
# set_event_handler() is invoked.
@ -982,16 +978,10 @@ class AsyncEventPublisher(object):
epub_uri = epub_sock_path
epull_uri = epull_sock_path
log.debug(
'{0} PUB socket URI: {1}'.format(
self.__class__.__name__, epub_uri
)
)
log.debug(
'{0} PULL socket URI: {1}'.format(
self.__class__.__name__, epull_uri
)
)
log.debug('%s PUB socket URI: %s',
self.__class__.__name__, epub_uri)
log.debug('%s PULL socket URI: %s',
self.__class__.__name__, epull_uri)
minion_sock_dir = self.opts['sock_dir']
@ -1001,7 +991,7 @@ class AsyncEventPublisher(object):
try:
os.makedirs(minion_sock_dir, 0o755)
except OSError as exc:
log.error('Could not create SOCK_DIR: {0}'.format(exc))
log.error('Could not create SOCK_DIR: %s', exc)
# Let's not fail yet and try using the default path
if minion_sock_dir == default_minion_sock_dir:
# We're already trying the default system path, stop now!
@ -1011,7 +1001,7 @@ class AsyncEventPublisher(object):
try:
os.makedirs(default_minion_sock_dir, 0o755)
except OSError as exc:
log.error('Could not create SOCK_DIR: {0}'.format(exc))
log.error('Could not create SOCK_DIR: %s', exc)
# Let's stop at this stage
raise
@ -1027,7 +1017,7 @@ class AsyncEventPublisher(object):
payload_handler=self.handle_publish
)
log.info('Starting pull socket on {0}'.format(epull_uri))
log.info('Starting pull socket on %s', epull_uri)
with salt.utils.files.set_umask(0o177):
self.publisher.start()
self.puller.start()
@ -1192,6 +1182,7 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
self.opts = opts
self.event_return_queue = self.opts['event_return_queue']
self.event_return_queue_max_seconds = self.opts.get('event_return_queue_max_seconds', 0)
local_minion_opts = self.opts.copy()
local_minion_opts['file_client'] = 'local'
self.minion = salt.minion.MasterMinion(local_minion_opts)
@ -1227,13 +1218,13 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
if isinstance(self.opts['event_return'], list):
# Multiple event returners
for r in self.opts['event_return']:
log.debug('Calling event returner {0}, one of many.'.format(r))
log.debug('Calling event returner %s, one of many.', r)
event_return = '{0}.event_return'.format(r)
self._flush_event_single(event_return)
else:
# Only a single event returner
log.debug('Calling event returner {0}, only one '
'configured.'.format(self.opts['event_return']))
log.debug('Calling event returner %s, only one '
'configured.', self.opts['event_return'])
event_return = '{0}.event_return'.format(
self.opts['event_return']
)
@ -1245,13 +1236,13 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
try:
self.minion.returners[event_return](self.event_queue)
except Exception as exc:
log.error('Could not store events - returner \'{0}\' raised '
'exception: {1}'.format(event_return, exc))
log.error('Could not store events - returner \'%s\' raised '
'exception: %s', event_return, exc)
# don't waste processing power unnecessarily on converting a
# potentially huge dataset to a string
if log.level <= logging.DEBUG:
log.debug('Event data that caused an exception: {0}'.format(
self.event_queue))
log.debug('Event data that caused an exception: %s',
self.event_queue)
else:
log.error('Could not store return for event(s) - returner '
'\'%s\' not found.', event_return)
@ -1265,17 +1256,52 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
events = self.event.iter_events(full=True)
self.event.fire_event({}, 'salt/event_listen/start')
try:
# events below is a generator, we will iterate until we get the salt/event/exit tag
oldestevent = None
for event in events:
if event['tag'] == 'salt/event/exit':
# We're done eventing
self.stop = True
if self._filter(event):
# This event passed the filter, add it to the queue
self.event_queue.append(event)
if len(self.event_queue) >= self.event_return_queue:
too_long_in_queue = False
# if max_seconds is >0, then we want to make sure we flush the queue
# every event_return_queue_max_seconds seconds, If it's 0, don't
# apply any of this logic
if self.event_return_queue_max_seconds > 0:
rightnow = datetime.datetime.now()
if not oldestevent:
oldestevent = rightnow
age_in_seconds = (rightnow - oldestevent).seconds
if age_in_seconds > 0:
log.debug('Oldest event in queue is %s seconds old.', age_in_seconds)
if age_in_seconds >= self.event_return_queue_max_seconds:
too_long_in_queue = True
oldestevent = None
else:
too_long_in_queue = False
if too_long_in_queue:
log.debug('Oldest event has been in queue too long, will flush queue')
# If we are over the max queue size or the oldest item in the queue has been there too long
# then flush the queue
if len(self.event_queue) >= self.event_return_queue or too_long_in_queue:
log.debug('Flushing %s events.', len(self.event_queue))
self.flush_events()
oldestevent = None
if self.stop:
# We saw the salt/event/exit tag, we can stop eventing
break
finally: # flush all we have at this moment
# No matter what, make sure we flush the queue even when we are exiting
# and there will be no more events.
if self.event_queue:
log.debug('Flushing %s events.', len(self.event_queue))
self.flush_events()
def _filter(self, event):

View file

@ -62,7 +62,9 @@ class SaltCacheLoader(BaseLoader):
def shutdown(cls):
if cls._cached_client is None:
return
cls._cached_client.destroy()
# PillarClient and LocalClient objects do not have a destroy method
if hasattr(cls._cached_client, 'destroy'):
cls._cached_client.destroy()
cls._cached_client = None
def __init__(self, opts, saltenv='base', encoding='utf-8',

View file

@ -63,6 +63,7 @@ class MinionTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMix
with salt.utils.files.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
config = salt.utils.yaml.safe_load(fhr)
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
config['id'] = 'issue-7754'
with salt.utils.files.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
salt.utils.yaml.safe_dump(config, fhw, default_flow_style=False)

View file

@ -2,17 +2,12 @@
'''
Integration tests for the beacon states
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.case import ModuleCase
from tests.support.mixins import SaltReturnAssertsMixin
import logging
log = logging.getLogger(__name__)
class BeaconStateTestCase(ModuleCase, SaltReturnAssertsMixin):
'''

View file

@ -1263,11 +1263,11 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
if IS_WINDOWS:
ret = self.run_state(
'file.directory', name=tmp_dir, recurse={'mode'},
'file.directory', name=tmp_dir, recurse=['mode'],
follow_symlinks=True, win_owner='Administrators')
else:
ret = self.run_state(
'file.directory', name=tmp_dir, recurse={'mode'},
'file.directory', name=tmp_dir, recurse=['mode'],
file_mode=644, dir_mode=755)
self.assertSaltTrueReturn(ret)

View file

@ -1,17 +1,21 @@
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Gareth J. Greenaway <gareth@saltstack.com>`
:codeauthor: :email:`David Murphy <dmurphy@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
import shutil
import datetime
import time
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
# Import Salt Testing libs
from tests.support.helpers import destructiveTest
from tests.support.unit import TestCase, skipIf
from tests.support.paths import TMP
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import (
MagicMock,
patch,
@ -19,11 +23,143 @@ from tests.support.mock import (
NO_MOCK_REASON
)
# Import Salt Libs
import salt.modules.gpg as gpg
# Import Salt libs
import salt.utils.path
import salt.utils.platform
import salt.utils.files
import salt.modules.gpg as gpg
GPG_TEST_KEY_PASSPHRASE = 'testkeypassphrase'
GPG_TEST_KEY_ID = '7416F045'
GPG_TEST_PUB_KEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
mQGNBFz1dx4BDACph7J5nuWE+zb9rZqTaL8akAnPAli2j6Qtk7BTDzTM9Kq80U2P
O3QRAFBQDODsgmrBTWgeZeNhN6Snz2WrZ8pC0RMK+mCmEt5S49ydWtvWl/xtzPfg
sy8h8OrIjSH1G0NQv9vdBpg2Y9vXLNDCZAqH0/Sh/wCkHT80c4i8TU09UZfx96S6
fFVmB7nHu/ztgiIkC6Fr04WGHlkd50g8r8CFPvwKOTD+rfoMsGppmAC1+y8ajfik
B+tEL88Rv2s4hLU78nQ3iGtdVRSs5Ip0x4O/PYZIDEd8KuBMo+flSzOZj2HCbUzN
MHiTXIbL8hBlwku9LGO7Itks4v2vfDh57JRHykwzGnvOlgXtSSvbayWLohNXtzWq
WtsMKrsrsIUprg1jhIVHnMSZZXMMizlni6MT5vbil2Bgn1g7diNscDnfCD6vlWUH
FRS1XkFFZ5ozu0+lC/5UaTcrjhH8/hQBNJwjsDSPsyKYQm+t7PXqq4SlI3Er7JJW
esK0diZ6reeebl0AEQEAAbQhdGVzdGtleSA8cGFja2FnaW5nQHNhbHRzdGFjay5j
b20+iQHOBBMBCgA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEEjS1DixNC
naoZrRFuvMeUg3QW8EUFAlz1ekoACgkQvMeUg3QW8EVm1gv/Z5CCqFoF8IXpa3g0
G+9C4gRS0h+tEYtjebAgxBn/dqk6cSNIb1BGDM/sSWxK5/jweN8CF/ojVgP1CFrX
igULnL3g5351L+8feU2ImP2OML438/GcOOJ+iwTKUBRuqxqK8NwcRuuN6YmbjtUw
JSD2+60DlWfGsInyenwGkgBAM44Y6dSAXKsm6dJ/IGVTQIsHE5mKRykrRZ1jyXxw
i1CF8fEyG6fNNb8I8Oiyj52xdYMxBvGmgPpFlF1jRoU+OViGFeUiZ49XFlC8GNCf
boWfXg+PCwZeWUeJ/s6a6iC5HG7iC0XYRiqhzymP8ctrwwm5dPT4ebYNfUSqq8i0
/YG6JGdsQZrq0i0FuceUSfOKVeFvSm+AkCG3kLxxgM925fqBetWiAJRCDYdmy8uc
jm618584hqa/yFMOfwVoLqq8mVg84wyGU6gXo6hXCa/008PITFZD47ILuN/Z8iLm
8Or/aJD5Tq+Wg3r2Di9+Ku8/gcn3bVxwFRjiVPrFwJkyjfMFuQGNBFz1dx4BDADU
Ynz0efLmaKqZIjfxwDf7iVqwcD9b8cMXIv2aa3Y56QDVJBXU5952LiU8XuzBBPq+
4FYQ78UmxSv3Rk6LKb9P2ih2L1PaJuQ1ZkNrQLqab3olpAu/Xe3raGLgCOU0RKJw
EPF3RcKu8ALuRcovfwzXWg8w19QRUPewZdVC4VgslKp8mNLECvdUxxVIDQWf06RZ
uCAfbqdiYReE62QT7NR4lAa1RpfU7Nt149OcQEP8VKTAZgTYyuwhRXFbrDD3Zp58
k5H0nKHNX+w1Ayih/YUk2b3etaBhlcTVAy/73TPfrd3Gl8dtzJZNtUD/eLWdGfP9
mCghmyAqbiQngH2eAMeifIYornynZFVBPBlvnwy7Iouq0V6tIVyNPGp0jcy1j2XT
NRBJyFbvam3hmrRW8A/VOJQ1W7LOKaM/5lh/BarrSLKn0xlL97GTmuSqlS+WrmyM
kU182TUYyUD7Rs3mydnMVS/N4aRxu4ITaTm9vieZLmAPR9vPgo+GwdHEkwm797kA
EQEAAYkBtgQYAQoAIAIbDBYhBI0tQ4sTQp2qGa0RbrzHlIN0FvBFBQJc9XqkAAoJ
ELzHlIN0FvBFlyEL/jVhm2PFj2mCLuKE5/nV4JvxY4Qu4+NCFiEdYK+zUoD36gEJ
3VjHL5dydHuZWcvm+XLW1PseNx2awVs47mjv2iZOLwY6BtfAFWhWEFmBEe6fTFXz
KkDWRst4gm0b0B7S3byoABwcyYNS6RkTfUApK4zdYErbfOLoT+Xa08YQKLVK7fmE
KBnBnnHUvktYTEvhwv9BID+qLnTVSQcjRcXbDQAYm14c7Nyb/SyxcUaUkCk41MVY
+vzNQlFrVc4h2np41X8JbmrsQb37E7lE+h32sJFBU03SGf0vT7SXXQj+UD/DEGay
Gt/8aRa5FGrcJyM5mTdbSgvCp0EjTrdokK5GHwM23cbSTo+nN9BNhIBRc4929SaJ
DVRqOIoJ+eHZdf3gIkMPOA3fBbMYzW65LIxt/p49tHD0c/nioZETycEgGuuYbnrn
IfXHFqiCAxkobIHqUg/BSu1cs8GNgE7BVUXy8JThuzmVdh4Pvd3YN1ouoPyVuDrk
ylirh0aqUQdSeIuJTg==
=yF8M
-----END PGP PUBLIC KEY BLOCK-----
"""
GPG_TEST_PRIV_KEY = """-----BEGIN PGP PRIVATE KEY BLOCK-----
lQWFBFz1dx4BDACph7J5nuWE+zb9rZqTaL8akAnPAli2j6Qtk7BTDzTM9Kq80U2P
O3QRAFBQDODsgmrBTWgeZeNhN6Snz2WrZ8pC0RMK+mCmEt5S49ydWtvWl/xtzPfg
sy8h8OrIjSH1G0NQv9vdBpg2Y9vXLNDCZAqH0/Sh/wCkHT80c4i8TU09UZfx96S6
fFVmB7nHu/ztgiIkC6Fr04WGHlkd50g8r8CFPvwKOTD+rfoMsGppmAC1+y8ajfik
B+tEL88Rv2s4hLU78nQ3iGtdVRSs5Ip0x4O/PYZIDEd8KuBMo+flSzOZj2HCbUzN
MHiTXIbL8hBlwku9LGO7Itks4v2vfDh57JRHykwzGnvOlgXtSSvbayWLohNXtzWq
WtsMKrsrsIUprg1jhIVHnMSZZXMMizlni6MT5vbil2Bgn1g7diNscDnfCD6vlWUH
FRS1XkFFZ5ozu0+lC/5UaTcrjhH8/hQBNJwjsDSPsyKYQm+t7PXqq4SlI3Er7JJW
esK0diZ6reeebl0AEQEAAf4HAwIqiZQqEMAZQ/+u0gE6tBcp52lUhE9fjORqgsY6
C5klAfrnrQyHXYkfjjQMWErSDR5FHeOxOPdZNnhVTBRaUIypLd+Os/YWl5lVO223
znbfK8GJIwHbDFQBSxtkC3WtD8cCqtKXvzApZzqeOhgNcaFJE956ZNlZfsCf0qsm
6xpEq07YiRVga6jJvjIFiPv7wMtdQQ67pEP4/tavLl+yuf6oJko2FjuG3RxrTf/C
CB4tyHCsRwgV7ouEdoSVhjFiDSS5xeWWLPRaXu4ceL0AjClHmdlMJtcpbyXKoh3U
uG5Cnwv9gXh24Qc6uuTWX61u7EhFLUWmKMFOI8dA+INYS8cXU8t6TU6XeV/01c7N
Q1O6QUCOx5VRbWRQuuvQN4f1gZm5QqN2jpNWjoUp2GSoxcHycEVSweEr+TmaouDA
ZOo12gx6dppkiwqS7Feq28qdpiZZPfdl/CvuWHxveNU9OVlexJ6A5PLep053qY+3
OlkvvkOxwmkJk2A3ITb1XngQkZCQDxAqCG6xMYjGIblKqqLqV1/q3pQ1nNbq5/iM
OtoxB7O7kZcyk7fQodk8EUz/ehTAZ5K8EWUETmiH9YlKTBbw8YMYEnuKfUFW6xqT
ROqurJfBlYmZEOxQ3oDVLZSfJQ3g/SXAOTKprB9GKyahM026Y+gfqR7yfwA8ifrH
E+HV4u7n/UjaUgu45LRGLGZQ7slmm79uYcVhBodQ0/eWcow7roHpOCFWTgyY3uhS
xdfuqgkEF8BGjQFaI4VNVeY+3+SM989BagAFuDlJw33fDoFSTPt9C+sqP1t1IvLv
9Eajn55MhO6gUptO3ViFPQE/EkiOrmaAJglu1LHEF/ssqWb/1+RGqF6N0OkKC+gx
iuuTgWl4wfxUsyh2JqIcj+xHRSf3G9OVJYkXaYsSNQ2eoSRlEzzu7Cxi83/qt6Dm
S+ST4wHl2VypfkhbNMq0W1aR8Kubi2Ixvk31ZDyk0uecRf3kNjVwD84WPjDedBsh
5rtCZO5kCAyWooCG41il09HfV9NCTjACCeO+dl4FO5aaLS0JSCBLVtORtwDCCZz+
QhS9CeXC+ook7sIaaiT0xWSnPmhEYE6roqwj4Lq3vvXIgHZjxeJizlGO0OSdTPBw
9wQ5ij/8G6MEGap4thvTohsFGUxHK2xx8Z089kGdmKd4piY/kjtX7AFtLEc0YiDa
w7PTlrqJA9FRta7g/aYVCKBk8G+8dxiErErFgm6RBxnQiJ/lLUAVsJv1cAQ8oyCK
GyDzGXEFk9eQtKGczF4CK8NhOMc9HabnQnzxcMGiVXEn/E3bDqocfeOAWEYft8zJ
sy96EJAk581uZ4CiKOcQW+Zv3N8O7ogdtCF0ZXN0a2V5IDxwYWNrYWdpbmdAc2Fs
dHN0YWNrLmNvbT6JAc4EEwEKADgCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AW
IQSNLUOLE0KdqhmtEW68x5SDdBbwRQUCXPV6SgAKCRC8x5SDdBbwRWbWC/9nkIKo
WgXwhelreDQb70LiBFLSH60Ri2N5sCDEGf92qTpxI0hvUEYMz+xJbErn+PB43wIX
+iNWA/UIWteKBQucveDnfnUv7x95TYiY/Y4wvjfz8Zw44n6LBMpQFG6rGorw3BxG
643piZuO1TAlIPb7rQOVZ8awifJ6fAaSAEAzjhjp1IBcqybp0n8gZVNAiwcTmYpH
KStFnWPJfHCLUIXx8TIbp801vwjw6LKPnbF1gzEG8aaA+kWUXWNGhT45WIYV5SJn
j1cWULwY0J9uhZ9eD48LBl5ZR4n+zprqILkcbuILRdhGKqHPKY/xy2vDCbl09Ph5
tg19RKqryLT9gbokZ2xBmurSLQW5x5RJ84pV4W9Kb4CQIbeQvHGAz3bl+oF61aIA
lEINh2bLy5yObrXznziGpr/IUw5/BWguqryZWDzjDIZTqBejqFcJr/TTw8hMVkPj
sgu439nyIubw6v9okPlOr5aDevYOL34q7z+ByfdtXHAVGOJU+sXAmTKN8wWdBYYE
XPV3HgEMANRifPR58uZoqpkiN/HAN/uJWrBwP1vxwxci/ZprdjnpANUkFdTn3nYu
JTxe7MEE+r7gVhDvxSbFK/dGTospv0/aKHYvU9om5DVmQ2tAuppveiWkC79d7eto
YuAI5TREonAQ8XdFwq7wAu5Fyi9/DNdaDzDX1BFQ97Bl1ULhWCyUqnyY0sQK91TH
FUgNBZ/TpFm4IB9up2JhF4TrZBPs1HiUBrVGl9Ts23Xj05xAQ/xUpMBmBNjK7CFF
cVusMPdmnnyTkfScoc1f7DUDKKH9hSTZvd61oGGVxNUDL/vdM9+t3caXx23Mlk21
QP94tZ0Z8/2YKCGbICpuJCeAfZ4Ax6J8hiiufKdkVUE8GW+fDLsii6rRXq0hXI08
anSNzLWPZdM1EEnIVu9qbeGatFbwD9U4lDVbss4poz/mWH8FqutIsqfTGUv3sZOa
5KqVL5aubIyRTXzZNRjJQPtGzebJ2cxVL83hpHG7ghNpOb2+J5kuYA9H28+Cj4bB
0cSTCbv3uQARAQAB/gcDAgUPU1tmC3CS/x0qZYicVcMiU5wop6fnbnNkEfUQip8V
qpL64/GpP6X7sJiY2BCo0/5AMPDKlTwPxogMQ6NduZ2AbvJybGC7AQULMkd4Y4H1
nwrDk5HWO5dLVoXRSVw9Dm6oaV4bi6wno9yapkq7AVRnvtNEza47gxmV2iwRoU5H
5ciQTU6nd1TkFNhD4ZwZ25CMqffvbrE2Ie6RsBUr9HlxYIint91rVLkkBGhw8W4t
KushxAZpBOQB0Rqtuak/q+/xyDnvNvU/A9GeKpRrxzwAbIdtW0VjPulDk1tThGDA
kmuxSJ1yxUb+CzA/5YoMXto1OqjUI2hO108xgTVl5hpmckBnwsPtbjrtDYFAqwfq
qF9YAVQ3RfMn3ThZ2oXg+FJMcwd9uVJn2/LZd81Nc6g4UogD1sD2ye2vqDGTEztK
BAdthEcufnUP5UrEixE8CPzAJOjuC5ROU57FXCaSSUfIwXO3UoxvNWcuXDC7RVDz
nsv/Hg2j0pSeFht2NO6Pom+4XHY+LHImPTfXamN6IDsTRJGQW8R7Y131fjPQMn7I
0WjyIiqD4eLo5KQYjL+0bE0JiLRaJnlfbu1uoV3sgi8bwG6WlnLh7eKDErA2P0Zs
r0KX5yGR5Ih2CAMfMmqqYrkEYmNxNbLvL5ie9F35MnvRbqyY/9pl0p1Eah7uGnuK
or13bg801HoZJLBTr4sJTqkwuUztloVyBdM6T5hal+rxX37Wnj1PgD0e0Ydqo6hJ
7WJ/Zjd+0qk90VoiGNRre7tMBYDQ3w3wS+tSta3kxTKj5I4hLZncN+pt9F6o+tgd
YAhWO93DzWjMMUV/jkKTJqwAFAuRlnTwzbBS70N2Z8jrGczV05RV9OH7DRr34noF
O7/Bn0iDpKZdbArtkJZyu4B+MUp/RRiuxn7iWOM2tEjDhUuyHXYYFppFB8fG7r52
VcxH/Sc3VcXB0l2KywrAG2oZfiE8M4NPHuiIHFpcjeK2VLrP2iGLvdlL4IsvtFIU
uLiFi7r0egEi/Ge8ebRF7TtjmhL5Jzi9bbUGuvxSIGZU1HCECq+hHVy45XwKrRTo
AzDIwNjBFmZzL7FI7X16W/6Y11VVmXEmDt9dmmu78bT0z2Bq0Q7K9C7Eq2qzW65z
+4fntFF8BWDs3l5yPKLjg+wlgPPXieHgrUQpZOYCsFJqig3xFZSu1ZMzYdlvyNSF
KAgMPZfi37kAUo8ZiH27SZAA/aTK6b69oEQ6I7CsMJZLRp/gzYvn4NN/DIK3fuYc
jsKB6OR3gWmU7EDf/1EZkO0YK2YiwkSrDALJdUo7ArYR2KIZTUEG9rxDBUD8IyIz
PGdh7sBG4PhOxpQ+SiZyzLzZAJjviQG2BBgBCgAgAhsMFiEEjS1DixNCnaoZrRFu
vMeUg3QW8EUFAlz1eqQACgkQvMeUg3QW8EWXIQv+NWGbY8WPaYIu4oTn+dXgm/Fj
hC7j40IWIR1gr7NSgPfqAQndWMcvl3J0e5lZy+b5ctbU+x43HZrBWzjuaO/aJk4v
BjoG18AVaFYQWYER7p9MVfMqQNZGy3iCbRvQHtLdvKgAHBzJg1LpGRN9QCkrjN1g
Stt84uhP5drTxhAotUrt+YQoGcGecdS+S1hMS+HC/0EgP6oudNVJByNFxdsNABib
Xhzs3Jv9LLFxRpSQKTjUxVj6/M1CUWtVziHaenjVfwluauxBvfsTuUT6HfawkUFT
TdIZ/S9PtJddCP5QP8MQZrIa3/xpFrkUatwnIzmZN1tKC8KnQSNOt2iQrkYfAzbd
xtJOj6c30E2EgFFzj3b1JokNVGo4ign54dl1/eAiQw84Dd8FsxjNbrksjG3+nj20
cPRz+eKhkRPJwSAa65hueuch9ccWqIIDGShsgepSD8FK7VyzwY2ATsFVRfLwlOG7
OZV2Hg+93dg3Wi6g/JW4OuTKWKuHRqpRB1J4i4lO
=WRTN
-----END PGP PRIVATE KEY BLOCK-----
"""
try:
import gnupg # pylint: disable=import-error,unused-import
HAS_GPG = True
@ -31,15 +167,38 @@ except ImportError:
HAS_GPG = False
@skipIf(not salt.utils.path.which('gpg'), 'GPG not installed. Skipping')
@destructiveTest
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(not salt.utils.platform.is_linux(), 'These tests can only be run on linux')
class GpgTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.gpg
Validate the gpg module
'''
def setup_loader_modules(self):
return {gpg: {'__salt__': {}}}
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def setUp(self):
super(GpgTestCase, self).setUp()
self.gpghome = os.path.join(TMP, 'gpghome')
if not os.path.isdir(self.gpghome):
# left behind... Don't fail because of this!
os.makedirs(self.gpghome)
self.gpgfile_pub = os.path.join(TMP, 'gpgfile.pub')
with salt.utils.files.fopen(self.gpgfile_pub, 'wb') as fp:
fp.write(salt.utils.stringutils.to_bytes(GPG_TEST_PUB_KEY))
self.gpgfile_priv = os.path.join(TMP, 'gpgfile.priv')
with salt.utils.files.fopen(self.gpgfile_priv, 'wb') as fp:
fp.write(salt.utils.stringutils.to_bytes(GPG_TEST_PRIV_KEY))
self.user = 'salt'
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def tearDown(self):
if os.path.isfile(self.gpgfile_pub):
os.remove(self.gpgfile_pub)
shutil.rmtree(self.gpghome, ignore_errors=True)
super(GpgTestCase, self).tearDown()
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_list_keys(self):
'''
@ -156,7 +315,7 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
u'groups': [
u'root'
],
u'home': u'/root',
u'home': self.gpghome,
u'fullname': u'root',
u'homephone': u'',
u'name': u'root'}
@ -202,7 +361,7 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
'groups': [
'root'
],
'home': '/root',
'home': self.gpghome,
'fullname': 'root',
'homephone': '',
'name': 'root'}
@ -227,3 +386,35 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(gpg, '_search_keys', return_value=_search_result):
ret = gpg.search_keys('person@example.com')
self.assertEqual(ret, _expected_result)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_import_pub_key(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
ret = gpg.import_key(None, self.gpgfile_pub, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_import_priv_key(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
ret = gpg.import_key(None, self.gpgfile_priv, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_sign(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
pillar_mock = MagicMock(return_value={'gpg_passphrase': GPG_TEST_KEY_PASSPHRASE})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
with patch.dict(gpg.__salt__, {'pillar.get': pillar_mock}):
ret = gpg.import_key(None, self.gpgfile_priv, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
gpg_text_input = 'The quick brown fox jumped over the lazy dog'
gpg_sign_output = gpg.sign(config_user, GPG_TEST_KEY_ID, gpg_text_input, None, None, True, self.gpghome)
self.assertIsNotNone(gpg_sign_output)

View file

@ -122,7 +122,10 @@ class TestSaltCacheLoader(TestCase):
},
'pillar_roots': {
'test': [self.template_dir]
}
},
'extension_modules': os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'extmods'),
}
super(TestSaltCacheLoader, self).setUp()
@ -223,6 +226,36 @@ class TestSaltCacheLoader(TestCase):
result = jinja.get_template('hello_include').render(a='Hi', b='Salt')
self.assertEqual(result, 'Hey world !Hi Salt !')
def test_cached_file_client(self):
'''
Multiple instantiations of SaltCacheLoader use the cached file client
'''
with patch('salt.transport.client.ReqChannel.factory', Mock()):
loader_a = SaltCacheLoader(self.opts)
loader_b = SaltCacheLoader(self.opts)
assert loader_a._file_client is loader_b._file_client
def test_file_client_kwarg(self):
'''
A file client can be passed to SaltCacheLoader overriding the any
cached file client
'''
mfc = MockFileClient()
loader = SaltCacheLoader(self.opts, _file_client=mfc)
assert loader._file_client is mfc
def test_cache_loader_shutdown(self):
'''
The shudown method can be called without raising an exception when the
file_client does not have a destroy method
'''
mfc = MockFileClient()
assert not hasattr(mfc, 'destroy')
loader = SaltCacheLoader(self.opts, _file_client=mfc)
assert loader._file_client is mfc
# Shutdown method should not raise any exceptions
loader.shutdown()
class TestGetTemplate(TestCase):