Merge branch '2019.2.1' into fix_test_pydsl

This commit is contained in:
Shane Lee 2019-07-02 16:17:23 -06:00 committed by GitHub
commit 1619c687b1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
50 changed files with 1267 additions and 334 deletions

View file

@ -1,20 +1,15 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 2
def salt_target_branch = '2019.2.1'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
])
def shell_header
timeout(time: global_timeout, unit: 'HOURS') {
node('docs') {
node('docs') {
timeout(time: global_timeout, unit: 'HOURS') {
ansiColor('xterm') {
timestamps {
try {
@ -44,7 +39,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
pyenv install --skip-existing 3.6.8
pyenv shell 3.6.8
python --version
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
pip install -U nox-py2
nox --version
'''
}

152
.ci/kitchen-amazon2-py2 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

152
.ci/kitchen-amazon2-py3 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '6'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

152
.ci/kitchen-fedora29-py2 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

152
.ci/kitchen-fedora29-py3 Normal file
View file

@ -0,0 +1,152 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
ansiColor('xterm') {
timestamps {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
'RBENV_VERSION=2.4.2',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
// Set the GH status even before cloning the repo
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
stage('github-pending') {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
}
try {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}
} finally {
try {
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
}
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
fi
'''
}
}
}
}
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
} finally {
cleanWs notFailBuild: true
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
} else {
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
try {
slackSend channel: "#jenkins-prod-pr",
color: '#FF0000',
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
} catch (Exception e) {
sh 'echo Failed to send the Slack notification'
}
}
}
}
}
}
}
}
}
// vim: ft=groovy

View file

@ -1,4 +1,4 @@
//// Define the maximum time, in hours, that a test run should run for
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,4 +1,4 @@
//// Define the maximum time, in hours, that a test run should run for
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -37,6 +32,7 @@ timeout(time: global_timeout, unit: 'HOURS') {
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,5 +1,5 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py2'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -36,7 +31,8 @@ timeout(time: global_timeout, unit: 'HOURS') {
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
"NOX_PASSTHROUGH_OPTS=--unit",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -58,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -80,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

View file

@ -1,5 +1,5 @@
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
@ -7,23 +7,18 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py3'
def salt_target_branch = '2019.2.1'
def golden_images_branch = '2019.2'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
timeout(time: global_timeout, unit: 'HOURS') {
node('kitchen-slave') {
node('kitchen-slave') {
timeout(time: global_timeout, unit: 'HOURS') {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
credentialsId: 'AWS_ACCESS_KEY_ID',
@ -36,6 +31,8 @@ timeout(time: global_timeout, unit: 'HOURS') {
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
'NOX_ENV_NAME=runtests-zeromq',
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=--unit",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
@ -57,11 +54,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('setup-bundle') {
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${CHANGE_TARGET}:refs/remotes/origin/${CHANGE_TARGET}'
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
@ -79,11 +76,11 @@ timeout(time: global_timeout, unit: 'HOURS') {
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"'
}
}
}

122
.ci/lint
View file

@ -1,20 +1,15 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 3
def salt_target_branch = '2019.2.1'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
[
$class: 'ScannerJobProperty', doNotScan: false
],
[
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
],
])
def shell_header
timeout(time: global_timeout, unit: 'HOURS') {
node('lint') {
node('lint') {
timeout(time: global_timeout, unit: 'HOURS') {
ansiColor('xterm') {
timestamps {
try {
@ -30,35 +25,40 @@ timeout(time: global_timeout, unit: 'HOURS') {
} else {
shell_header = ''
}
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh shell_header + '''
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
# -M is on by default in git 2.9+
git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" > file-list-status.log
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
(git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
eval "$(pyenv init -)"
pyenv --version
pyenv install --skip-existing 2.7.15
pyenv shell 2.7.15
python --version
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
nox --version
# Create the required virtualenvs in serial
nox --install-only -e lint-salt
nox --install-only -e lint-tests
'''
withEnv(["SALT_TARGET_BRANCH=${salt_target_branch}"]) {
// Checkout the repo
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh shell_header + '''
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
# -M is on by default in git 2.9+
git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" > file-list-status.log
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
(git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
eval "$(pyenv init -)"
pyenv --version
pyenv install --skip-existing 2.7.15
pyenv shell 2.7.15
python --version
pip install -U nox-py2
nox --version
# Create the required virtualenvs in serial
nox --install-only -e lint-salt
nox --install-only -e lint-tests
'''
}
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log,file-list-experiment.log'
}
stage('Lint Changes') {
try {
parallel(
@ -94,17 +94,22 @@ timeout(time: global_timeout, unit: 'HOURS') {
}
)
} finally {
archiveArtifacts artifacts: 'pylint-report-*-chg.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report-*-chg.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
def changed_logs_pattern = 'pylint-report-*-chg.log'
archiveArtifacts artifacts: changed_logs_pattern, allowEmptyArchive: true
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: changed_logs_pattern
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
} else {
recordIssues(enabledForFailure: true, tool: pyLint(pattern: changed_logs_pattern, reportEncoding: 'UTF-8'))
}
}
}
stage('Lint Full') {
@ -146,17 +151,22 @@ timeout(time: global_timeout, unit: 'HOURS') {
}
)
} finally {
archiveArtifacts artifacts: 'pylint-report-*-full.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report-*-full.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
def full_logs_pattern = 'pylint-report-*-full.log'
archiveArtifacts artifacts: full_logs_pattern, allowEmptyArchive: true
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: full_logs_pattern
]],
failedTotalAll: '0',
useDeltaValues: false,
canRunOnFailed: true,
usePreviousBuildAsReference: true
])
} else {
recordIssues(enabledForFailure: true, tool: pyLint(pattern: full_logs_pattern, reportEncoding: 'UTF-8'))
}
}
}
}

View file

@ -151,7 +151,7 @@
# Store all returns in the given returner.
# Setting this option requires that any returner-specific configuration also
# be set. See various returners in salt/returners for details on required
# configuration values. (See also, event_return_queue below.)
# configuration values. (See also, event_return_queue, and event_return_queue_max_seconds below.)
#
#event_return: mysql
@ -161,6 +161,12 @@
# By default, events are not queued.
#event_return_queue: 0
# In some cases enabling event return queueing can be very helpful, but the bus
# may not busy enough to flush the queue consistently. Setting this to a reasonable
# value (1-30 seconds) will cause the queue to be flushed when the oldest event is older
# than `event_return_queue_max_seconds` regardless of how many events are in the queue.
#event_return_queue_max_seconds: 0
# Only return events matching tags in a whitelist, supports glob matches.
#event_return_whitelist:
# - salt/master/a_tag

View file

@ -4,3 +4,13 @@ In Progress: Salt 2018.3.5 Release Notes
Version 2018.3.5 is an **unreleased** bugfix release for :ref:`2018.3.0 <release-2018-3-0>`.
This release is still in progress and has not been released yet.
Master Configuration Changes
============================
To fix `#53411`_ a new configuration parameter `event_listen_queue_max_seconds` is provided.
When this is set to a value greater than 0 and `event_listen_queue` is not 0, if the oldest event
in the listen queue is older than `event_listen_queue_max_seconds`, the queue will be flushed to
returners regardless of how many events are in the queue.
.. _`#53411`: https://github.com/saltstack/salt/issues/53411

View file

@ -434,3 +434,8 @@ class Beacon(object):
Reset the beacons to defaults
'''
self.opts['beacons'] = {}
evt = salt.utils.event.get_event('minion', opts=self.opts)
evt.fire_event({'complete': True, 'comment': 'Beacons have been reset',
'beacons': self.opts['beacons']},
tag='/salt/minion/minion_beacon_reset_complete')
return True

View file

@ -550,6 +550,11 @@ VALID_OPTS = {
# returner specified by 'event_return'
'event_return_queue': int,
# The number of seconds that events can languish in the queue before we flush them.
# The goal here is to ensure that if the bus is not busy enough to reach a total
# `event_return_queue` events won't get stale.
'event_return_queue_max_seconds': int,
# Only forward events to an event returner if it matches one of the tags in this list
'event_return_whitelist': list,

View file

@ -389,7 +389,7 @@ def _file_lists(load, form):
rel_dest = _translate_sep(
os.path.relpath(
os.path.realpath(os.path.normpath(joined)),
fs_root
os.path.realpath(fs_root)
)
)
log.trace(

View file

@ -1706,7 +1706,7 @@ class LazyLoader(salt.utils.lazy.LazyDict):
# enforce depends
try:
Depends.enforce_dependencies(self._dict, self.tag)
Depends.enforce_dependencies(self._dict, self.tag, name)
except RuntimeError as exc:
log.info(
'Depends.enforce_dependencies() failed for the following '

View file

@ -29,6 +29,7 @@ from salt.ext import six
import salt.utils.files
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
log = logging.getLogger(__name__)
__virtualname__ = 'at'

View file

@ -614,22 +614,29 @@ def reset(**kwargs):
ret = {'comment': [],
'result': True}
if 'test' in kwargs and kwargs['test']:
if kwargs.get('test'):
ret['comment'] = 'Beacons would be reset.'
else:
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__, listen=True)
res = __salt__['event.fire']({'func': 'reset'}, 'manage_beacons')
if res:
wait = kwargs.get('timeout', default_event_wait)
event_ret = eventer.get_event(
tag='/salt/minion/minion_beacon_reset_complete',
wait=kwargs.get('timeout', default_event_wait))
wait=wait)
if event_ret and event_ret['complete']:
ret['result'] = True
ret['comment'] = 'Beacon configuration reset.'
else:
ret['result'] = False
ret['comment'] = event_ret['comment']
if event_ret is None:
ret['result'] = False
ret['comment'] = (
'minion reset event not recieved after {} seconds'
).format(wait)
else:
ret['result'] = False
ret['comment'] = event_ret['comment']
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system

View file

@ -23,6 +23,7 @@ import time
# Import salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.stringutils
from salt.exceptions import SaltInvocationError
from salt.utils.versions import LooseVersion as _LooseVersion
@ -722,8 +723,7 @@ def import_key(text=None,
if filename:
try:
with salt.utils.files.flopen(filename, 'rb') as _fp:
lines = _fp.readlines()
text = ''.join(lines)
text = salt.utils.stringutils.to_unicode(_fp.read())
except IOError:
raise SaltInvocationError('filename does not exist.')
@ -1009,21 +1009,22 @@ def sign(user=None,
gnupg_version = _LooseVersion(gnupg.__version__)
if text:
if gnupg_version >= '1.3.1':
if gnupg_version >= _LooseVersion('1.3.1'):
signed_data = gpg.sign(text, default_key=keyid, passphrase=gpg_passphrase)
else:
signed_data = gpg.sign(text, keyid=keyid, passphrase=gpg_passphrase)
elif filename:
with salt.utils.files.flopen(filename, 'rb') as _fp:
if gnupg_version >= '1.3.1':
if gnupg_version >= _LooseVersion('1.3.1'):
signed_data = gpg.sign(text, default_key=keyid, passphrase=gpg_passphrase)
else:
signed_data = gpg.sign_file(_fp, keyid=keyid, passphrase=gpg_passphrase)
if output:
with salt.utils.files.flopen(output, 'w') as fout:
fout.write(signed_data.data)
fout.write(salt.utils.stringutils.to_bytes(signed_data.data))
else:
raise SaltInvocationError('filename or text must be passed.')
return signed_data.data
@ -1193,7 +1194,7 @@ def encrypt(user=None,
# This version does not allow us to encrypt using the
# file stream # have to read in the contents and encrypt.
with salt.utils.files.flopen(filename, 'rb') as _fp:
_contents = _fp.read()
_contents = salt.utils.stringutils.to_unicode(_fp.read())
result = gpg.encrypt(_contents, recipients, passphrase=gpg_passphrase, output=output)
else:
# This version allows encrypting the file stream

View file

@ -55,11 +55,16 @@ def install(app_id, enable=True):
salt '*' assistive.install com.smileonmymac.textexpander
'''
ge_el_capitan = True if _LooseVersion(__grains__['osrelease']) >= salt.utils.stringutils.to_str('10.11') else False
ge_mojave = True if _LooseVersion(__grains__['osrelease']) >= salt.utils.stringutils.to_str('10.14') else False
client_type = _client_type(app_id)
enable_str = '1' if enable else '0'
cmd = 'sqlite3 "/Library/Application Support/com.apple.TCC/TCC.db" ' \
'"INSERT or REPLACE INTO access VALUES(\'kTCCServiceAccessibility\',\'{0}\',{1},{2},1,NULL{3})"'.\
format(app_id, client_type, enable_str, ',NULL' if ge_el_capitan else '')
'"INSERT or REPLACE INTO access VALUES(\'kTCCServiceAccessibility\',\'{0}\',{1},{2},1,NULL{3}{4})"'.\
format(app_id,
client_type,
enable_str,
',NULL' if ge_el_capitan else '',
",NULL,NULL,NULL,NULL,''" if ge_mojave else '')
call = __salt__['cmd.run_all'](
cmd,

View file

@ -132,7 +132,7 @@ class Depends(object):
return retcode
@classmethod
def enforce_dependencies(cls, functions, kind):
def enforce_dependencies(cls, functions, kind, tgt_mod):
'''
This is a class global method to enforce the dependencies that you
currently know about.
@ -141,6 +141,8 @@ class Depends(object):
'''
for dependency, dependent_dict in six.iteritems(cls.dependency_dict[kind]):
for (mod_name, func_name), (frame, params) in six.iteritems(dependent_dict):
if mod_name != tgt_mod:
continue
if 'retcode' in params or 'nonzero_retcode' in params:
try:
retcode = cls.run_command(dependency, mod_name, func_name)

View file

@ -320,12 +320,8 @@ class SaltEvent(object):
sock_dir,
'minion_event_{0}_pull.ipc'.format(id_hash)
)
log.debug(
'{0} PUB socket URI: {1}'.format(self.__class__.__name__, puburi)
)
log.debug(
'{0} PULL socket URI: {1}'.format(self.__class__.__name__, pulluri)
)
log.debug('%s PUB socket URI: %s', self.__class__.__name__, puburi)
log.debug('%s PULL socket URI: %s', self.__class__.__name__, pulluri)
return puburi, pulluri
def subscribe(self, tag=None, match_type=None):
@ -370,9 +366,9 @@ class SaltEvent(object):
with salt.utils.asynchronous.current_ioloop(self.io_loop):
if self.subscriber is None:
self.subscriber = salt.transport.ipc.IPCMessageSubscriber(
self.puburi,
io_loop=self.io_loop
)
self.puburi,
io_loop=self.io_loop
)
try:
self.io_loop.run_sync(
lambda: self.subscriber.connect(timeout=timeout))
@ -382,9 +378,9 @@ class SaltEvent(object):
else:
if self.subscriber is None:
self.subscriber = salt.transport.ipc.IPCMessageSubscriber(
self.puburi,
io_loop=self.io_loop
)
self.puburi,
io_loop=self.io_loop
)
# For the asynchronous case, the connect will be defered to when
# set_event_handler() is invoked.
@ -982,16 +978,10 @@ class AsyncEventPublisher(object):
epub_uri = epub_sock_path
epull_uri = epull_sock_path
log.debug(
'{0} PUB socket URI: {1}'.format(
self.__class__.__name__, epub_uri
)
)
log.debug(
'{0} PULL socket URI: {1}'.format(
self.__class__.__name__, epull_uri
)
)
log.debug('%s PUB socket URI: %s',
self.__class__.__name__, epub_uri)
log.debug('%s PULL socket URI: %s',
self.__class__.__name__, epull_uri)
minion_sock_dir = self.opts['sock_dir']
@ -1001,7 +991,7 @@ class AsyncEventPublisher(object):
try:
os.makedirs(minion_sock_dir, 0o755)
except OSError as exc:
log.error('Could not create SOCK_DIR: {0}'.format(exc))
log.error('Could not create SOCK_DIR: %s', exc)
# Let's not fail yet and try using the default path
if minion_sock_dir == default_minion_sock_dir:
# We're already trying the default system path, stop now!
@ -1011,7 +1001,7 @@ class AsyncEventPublisher(object):
try:
os.makedirs(default_minion_sock_dir, 0o755)
except OSError as exc:
log.error('Could not create SOCK_DIR: {0}'.format(exc))
log.error('Could not create SOCK_DIR: %s', exc)
# Let's stop at this stage
raise
@ -1027,7 +1017,7 @@ class AsyncEventPublisher(object):
payload_handler=self.handle_publish
)
log.info('Starting pull socket on {0}'.format(epull_uri))
log.info('Starting pull socket on %s', epull_uri)
with salt.utils.files.set_umask(0o177):
self.publisher.start()
self.puller.start()
@ -1192,6 +1182,7 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
self.opts = opts
self.event_return_queue = self.opts['event_return_queue']
self.event_return_queue_max_seconds = self.opts.get('event_return_queue_max_seconds', 0)
local_minion_opts = self.opts.copy()
local_minion_opts['file_client'] = 'local'
self.minion = salt.minion.MasterMinion(local_minion_opts)
@ -1227,13 +1218,13 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
if isinstance(self.opts['event_return'], list):
# Multiple event returners
for r in self.opts['event_return']:
log.debug('Calling event returner {0}, one of many.'.format(r))
log.debug('Calling event returner %s, one of many.', r)
event_return = '{0}.event_return'.format(r)
self._flush_event_single(event_return)
else:
# Only a single event returner
log.debug('Calling event returner {0}, only one '
'configured.'.format(self.opts['event_return']))
log.debug('Calling event returner %s, only one '
'configured.', self.opts['event_return'])
event_return = '{0}.event_return'.format(
self.opts['event_return']
)
@ -1245,13 +1236,13 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
try:
self.minion.returners[event_return](self.event_queue)
except Exception as exc:
log.error('Could not store events - returner \'{0}\' raised '
'exception: {1}'.format(event_return, exc))
log.error('Could not store events - returner \'%s\' raised '
'exception: %s', event_return, exc)
# don't waste processing power unnecessarily on converting a
# potentially huge dataset to a string
if log.level <= logging.DEBUG:
log.debug('Event data that caused an exception: {0}'.format(
self.event_queue))
log.debug('Event data that caused an exception: %s',
self.event_queue)
else:
log.error('Could not store return for event(s) - returner '
'\'%s\' not found.', event_return)
@ -1265,17 +1256,52 @@ class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
events = self.event.iter_events(full=True)
self.event.fire_event({}, 'salt/event_listen/start')
try:
# events below is a generator, we will iterate until we get the salt/event/exit tag
oldestevent = None
for event in events:
if event['tag'] == 'salt/event/exit':
# We're done eventing
self.stop = True
if self._filter(event):
# This event passed the filter, add it to the queue
self.event_queue.append(event)
if len(self.event_queue) >= self.event_return_queue:
too_long_in_queue = False
# if max_seconds is >0, then we want to make sure we flush the queue
# every event_return_queue_max_seconds seconds, If it's 0, don't
# apply any of this logic
if self.event_return_queue_max_seconds > 0:
rightnow = datetime.datetime.now()
if not oldestevent:
oldestevent = rightnow
age_in_seconds = (rightnow - oldestevent).seconds
if age_in_seconds > 0:
log.debug('Oldest event in queue is %s seconds old.', age_in_seconds)
if age_in_seconds >= self.event_return_queue_max_seconds:
too_long_in_queue = True
oldestevent = None
else:
too_long_in_queue = False
if too_long_in_queue:
log.debug('Oldest event has been in queue too long, will flush queue')
# If we are over the max queue size or the oldest item in the queue has been there too long
# then flush the queue
if len(self.event_queue) >= self.event_return_queue or too_long_in_queue:
log.debug('Flushing %s events.', len(self.event_queue))
self.flush_events()
oldestevent = None
if self.stop:
# We saw the salt/event/exit tag, we can stop eventing
break
finally: # flush all we have at this moment
# No matter what, make sure we flush the queue even when we are exiting
# and there will be no more events.
if self.event_queue:
log.debug('Flushing %s events.', len(self.event_queue))
self.flush_events()
def _filter(self, event):

View file

@ -5,6 +5,7 @@ Jinja loading utils to enable a more powerful backend for jinja templates
# Import python libs
from __future__ import absolute_import, unicode_literals
import atexit
import collections
import logging
import os.path
@ -54,8 +55,20 @@ class SaltCacheLoader(BaseLoader):
Templates are cached like regular salt states
and only loaded once per loader instance.
'''
_cached_client = None
@classmethod
def shutdown(cls):
if cls._cached_client is None:
return
# PillarClient and LocalClient objects do not have a destroy method
if hasattr(cls._cached_client, 'destroy'):
cls._cached_client.destroy()
cls._cached_client = None
def __init__(self, opts, saltenv='base', encoding='utf-8',
pillar_rend=False):
pillar_rend=False, _file_client=None):
self.opts = opts
self.saltenv = saltenv
self.encoding = encoding
@ -69,7 +82,7 @@ class SaltCacheLoader(BaseLoader):
self.searchpath = [os.path.join(opts['cachedir'], 'files', saltenv)]
log.debug('Jinja search path: %s', self.searchpath)
self.cached = []
self._file_client = None
self._file_client = _file_client
# Instantiate the fileclient
self.file_client()
@ -77,9 +90,14 @@ class SaltCacheLoader(BaseLoader):
'''
Return a file client. Instantiates on first call.
'''
if not self._file_client:
self._file_client = salt.fileclient.get_file_client(
self.opts, self.pillar_rend)
# If there was no file_client passed to the class, create a cache_client
# and use that. This avoids opening a new file_client every time this
# class is instantiated
if self._file_client is None:
if not SaltCacheLoader._cached_client:
SaltCacheLoader._cached_client = salt.fileclient.get_file_client(
self.opts, self.pillar_rend)
self._file_client = SaltCacheLoader._cached_client
return self._file_client
def cache_file(self, template):
@ -171,6 +189,9 @@ class SaltCacheLoader(BaseLoader):
raise TemplateNotFound(template)
atexit.register(SaltCacheLoader.shutdown)
class PrintableDict(OrderedDict):
'''
Ensures that dict str() and repr() are YAML friendly.

View file

@ -47,6 +47,37 @@ TARGET_REX = re.compile(
)
def _nodegroup_regex(nodegroup, words, opers):
opers_set = set(opers)
ret = words
if (set(ret) - opers_set) == set(ret):
# No compound operators found in nodegroup definition. Check for
# group type specifiers
group_type_re = re.compile('^[A-Z]@')
regex_chars = ['(', '[', '{', '\\', '?', '}', ']', ')']
if not [x for x in ret if '*' in x or group_type_re.match(x)]:
# No group type specifiers and no wildcards.
# Treat this as an expression.
if [x for x in ret if x in [x for y in regex_chars if y in x]]:
joined = 'E@' + ','.join(ret)
log.debug(
'Nodegroup \'%s\' (%s) detected as an expression. '
'Assuming compound matching syntax of \'%s\'',
nodegroup, ret, joined
)
else:
# Treat this as a list of nodenames.
joined = 'L@' + ','.join(ret)
log.debug(
'Nodegroup \'%s\' (%s) detected as list of nodenames. '
'Assuming compound matching syntax of \'%s\'',
nodegroup, ret, joined
)
# Return data must be a list of compound matching components
# to be fed into compound matcher. Enclose return data in list.
return [joined]
def parse_target(target_expression):
'''Parse `target_expressing` splitting it into `engine`, `delimiter`,
`pattern` - returns a dict'''
@ -141,36 +172,16 @@ def nodegroup_comp(nodegroup, nodegroups, skip=None, first_call=True):
# Only return list form if a nodegroup was expanded. Otherwise return
# the original string to conserve backwards compat
if expanded_nodegroup or not first_call:
if not first_call:
joined = _nodegroup_regex(nodegroup, words, opers)
if joined:
return joined
return ret
else:
opers_set = set(opers)
ret = words
if (set(ret) - opers_set) == set(ret):
# No compound operators found in nodegroup definition. Check for
# group type specifiers
group_type_re = re.compile('^[A-Z]@')
regex_chars = ['(', '[', '{', '\\', '?', '}', ']', ')']
if not [x for x in ret if '*' in x or group_type_re.match(x)]:
# No group type specifiers and no wildcards.
# Treat this as an expression.
if [x for x in ret if x in [x for y in regex_chars if y in x]]:
joined = 'E@' + ','.join(ret)
log.debug(
'Nodegroup \'%s\' (%s) detected as an expression. '
'Assuming compound matching syntax of \'%s\'',
nodegroup, ret, joined
)
else:
# Treat this as a list of nodenames.
joined = 'L@' + ','.join(ret)
log.debug(
'Nodegroup \'%s\' (%s) detected as list of nodenames. '
'Assuming compound matching syntax of \'%s\'',
nodegroup, ret, joined
)
# Return data must be a list of compound matching components
# to be fed into compound matcher. Enclose return data in list.
return [joined]
joined = _nodegroup_regex(nodegroup, ret, opers)
if joined:
return joined
log.debug(
'No nested nodegroups detected. Using original nodegroup '

View file

@ -102,15 +102,15 @@ class DigitalOceanTest(ShellCase):
pub = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example'
finger_print = '3b:16:bf:e4:8b:00:8b:b8:59:8c:a9:d3:f0:19:45:fa'
_key = self.run_cloud('-f create_key {0} name="MyPubKey" public_key="{1}"'.format(PROVIDER_NAME, pub))
# Upload public key
self.assertIn(
finger_print,
[i.strip() for i in _key]
)
try:
_key = self.run_cloud('-f create_key {0} name="MyPubKey" public_key="{1}"'.format(PROVIDER_NAME, pub))
# Upload public key
self.assertIn(
finger_print,
[i.strip() for i in _key]
)
# List all keys
list_keypairs = self.run_cloud('-f list_keypairs {0}'.format(PROVIDER_NAME))

View file

@ -70,14 +70,22 @@ nodegroups:
min: minion
sub_min: sub_minion
mins: N@min or N@sub_min
list_nodegroup:
- 'minion'
- 'sub_minion'
multiline_nodegroup:
- 'minion'
- 'or'
- 'sub_minion'
one_minion_list:
- 'minion'
redundant_minions: N@min or N@mins
nodegroup_loop_a: N@nodegroup_loop_b
nodegroup_loop_b: N@nodegroup_loop_a
missing_minion: L@minion,ghostminion
list_group: N@multiline_nodegroup
one_list_group: N@one_minion_list
list_group2: N@list_nodegroup
mysql.host: localhost

View file

@ -286,6 +286,7 @@ class CPModuleTest(ModuleCase):
self.assertNotIn('bacon', data)
@skipIf(not SSL3_SUPPORT, 'Requires python with SSL3 support')
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
@with_tempfile()
def test_get_url_https(self, tgt):
'''
@ -305,6 +306,7 @@ class CPModuleTest(ModuleCase):
self.assertNotIn('AYBABTU', data)
@skipIf(not SSL3_SUPPORT, 'Requires python with SSL3 support')
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_get_url_https_dest_empty(self):
'''
cp.get_url with https:// source given and destination omitted.
@ -322,6 +324,7 @@ class CPModuleTest(ModuleCase):
self.assertNotIn('AYBABTU', data)
@skipIf(not SSL3_SUPPORT, 'Requires python with SSL3 support')
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_get_url_https_no_dest(self):
'''
cp.get_url with https:// source given and destination set as None
@ -397,6 +400,7 @@ class CPModuleTest(ModuleCase):
self.assertEqual(ret, False)
@skipIf(not SSL3_SUPPORT, 'Requires python with SSL3 support')
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_get_file_str_https(self):
'''
cp.get_file_str with https:// source given

View file

@ -17,7 +17,7 @@ import salt.utils.platform
@skip_if_not_root
@flaky
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacPowerModuleTest(ModuleCase):
@ -142,7 +142,7 @@ class MacPowerModuleTest(ModuleCase):
@skip_if_not_root
@flaky
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacPowerModuleTestSleepOnPowerButton(ModuleCase):
@ -193,7 +193,7 @@ class MacPowerModuleTestSleepOnPowerButton(ModuleCase):
@skip_if_not_root
@flaky
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacPowerModuleTestRestartPowerFailure(ModuleCase):
@ -243,7 +243,7 @@ class MacPowerModuleTestRestartPowerFailure(ModuleCase):
@skip_if_not_root
@flaky
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacPowerModuleTestWakeOnNet(ModuleCase):
@ -290,7 +290,7 @@ class MacPowerModuleTestWakeOnNet(ModuleCase):
@skip_if_not_root
@flaky
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacPowerModuleTestWakeOnModem(ModuleCase):

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
'''
integration tests for mac_system
integration tests for mac_shadow
'''
# Import Python libs
@ -40,7 +40,7 @@ NO_USER = __random_string()
@skipIf(not salt.utils.path.which('pwpolicy'), '\'pwpolicy\' binary not found in $PATH')
class MacShadowModuleTest(ModuleCase):
'''
Validate the mac_system module
Validate the mac_shadow module
'''
def setUp(self):

View file

@ -34,7 +34,7 @@ SET_SUBNET_NAME = __random_string()
@skip_if_not_root
@flaky(attempts=8)
@flaky(attempts=10)
@skipIf(not salt.utils.platform.is_darwin(), 'Test only available on macOS')
@skipIf(not salt.utils.path.which('systemsetup'), '\'systemsetup\' binary not found in $PATH')
class MacSystemModuleTest(ModuleCase):

View file

@ -1864,6 +1864,7 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
pass
@skipIf(sys.platform.startswith('win'), 'Skipped until parallel states can be fixed on Windows')
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_parallel_state_with_long_tag(self):
'''
This tests the case where the state being executed has a long ID dec or

View file

@ -153,6 +153,23 @@ class MatchTest(ShellCase, ShellCaseCommonTestsMixin):
self.assertTrue(minion_in_returns('minion', data))
self.assertTrue(minion_in_returns('sub_minion', data))
def test_nodegroup_list(self):
data = self.run_salt('-N list_group test.ping')
self.assertTrue(minion_in_returns('minion', data))
self.assertTrue(minion_in_returns('sub_minion', data))
data = self.run_salt('-N list_group2 test.ping')
self.assertTrue(minion_in_returns('minion', data))
self.assertTrue(minion_in_returns('sub_minion', data))
data = self.run_salt('-N one_list_group test.ping')
self.assertTrue(minion_in_returns('minion', data))
self.assertFalse(minion_in_returns('sub_minion', data))
data = self.run_salt('-N one_minion_list test.ping')
self.assertTrue(minion_in_returns('minion', data))
self.assertFalse(minion_in_returns('sub_minion', data))
def test_glob(self):
'''
test salt glob matcher

View file

@ -63,6 +63,7 @@ class MinionTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMix
with salt.utils.files.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
config = salt.utils.yaml.safe_load(fhr)
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
config['id'] = 'issue-7754'
with salt.utils.files.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
salt.utils.yaml.safe_dump(config, fhw, default_flow_style=False)

View file

@ -2,17 +2,12 @@
'''
Integration tests for the beacon states
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.case import ModuleCase
from tests.support.mixins import SaltReturnAssertsMixin
import logging
log = logging.getLogger(__name__)
class BeaconStateTestCase(ModuleCase, SaltReturnAssertsMixin):
'''

View file

@ -74,6 +74,9 @@ else:
FILEPILLARDEF = '/tmp/filepillar-defaultvalue'
FILEPILLARGIT = '/tmp/filepillar-bar'
TEST_SYSTEM_USER = 'test_system_user'
TEST_SYSTEM_GROUP = 'test_system_group'
def _test_managed_file_mode_keep_helper(testcase, local=False):
'''
@ -1260,11 +1263,11 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
if IS_WINDOWS:
ret = self.run_state(
'file.directory', name=tmp_dir, recurse={'mode'},
'file.directory', name=tmp_dir, recurse=['mode'],
follow_symlinks=True, win_owner='Administrators')
else:
ret = self.run_state(
'file.directory', name=tmp_dir, recurse={'mode'},
'file.directory', name=tmp_dir, recurse=['mode'],
file_mode=644, dir_mode=755)
self.assertSaltTrueReturn(ret)
@ -2300,6 +2303,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual([salt.utils.stringutils.to_str(line) for line in expected], contents)
@with_tempdir()
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_issue_11003_immutable_lazy_proxy_sum(self, base_dir):
# causes the Import-Module ServerManager error on Windows
template_path = os.path.join(TMP_STATE_TREE, 'issue-11003.sls')
@ -2343,7 +2347,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
with salt.utils.files.fopen(template_path, 'w') as fp_:
fp_.write(os.linesep.join(sls_template).format(testcase_filedest))
ret = self.run_function('state.sls', mods='issue-11003')
ret = self.run_function('state.sls', mods='issue-11003', timeout=600)
for name, step in six.iteritems(ret):
self.assertSaltTrueReturn({name: step})
with salt.utils.files.fopen(testcase_filedest) as fp_:
@ -2476,7 +2480,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(not HAS_PWD, "pwd not available. Skipping test")
@skipIf(not HAS_GRP, "grp not available. Skipping test")
@with_system_user_and_group('user12209', 'group12209',
@with_system_user_and_group(TEST_SYSTEM_USER, TEST_SYSTEM_GROUP,
on_existing='delete', delete=True)
@with_tempdir()
def test_issue_12209_follow_symlinks(self, tempdir, user, group):
@ -2512,7 +2516,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(not HAS_PWD, "pwd not available. Skipping test")
@skipIf(not HAS_GRP, "grp not available. Skipping test")
@with_system_user_and_group('user12209', 'group12209',
@with_system_user_and_group(TEST_SYSTEM_USER, TEST_SYSTEM_GROUP,
on_existing='delete', delete=True)
@with_tempdir()
def test_issue_12209_no_follow_symlinks(self, tempdir, user, group):
@ -2637,7 +2641,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(not HAS_PWD, "pwd not available. Skipping test")
@skipIf(not HAS_GRP, "grp not available. Skipping test")
@with_system_user_and_group('test_setuid_user', 'test_setuid_group',
@with_system_user_and_group(TEST_SYSTEM_USER, TEST_SYSTEM_GROUP,
on_existing='delete', delete=True)
def test_owner_after_setuid(self, user, group):
@ -2694,7 +2698,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(not HAS_PWD, "pwd not available. Skipping test")
@skipIf(not HAS_GRP, "grp not available. Skipping test")
@with_system_user_and_group('user12209', 'group12209',
@with_system_user_and_group(TEST_SYSTEM_USER, TEST_SYSTEM_GROUP,
on_existing='delete', delete=True)
@with_tempdir()
def test_issue_48336_file_managed_mode_setuid(self, tempdir, user, group):

View file

@ -497,6 +497,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
if os.path.isfile(requirements_file):
os.unlink(requirements_file)
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_22359_pip_installed_unless_does_not_trigger_warnings(self):
# This test case should be moved to a format_call unit test specific to
# the state internal keywords
@ -509,12 +510,12 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
)
)
false_cmd = '/bin/false'
false_cmd = salt.utils.path.which('false')
if salt.utils.platform.is_windows():
false_cmd = 'exit 1 >nul'
try:
ret = self.run_state(
'pip.installed', name='pep8', bin_env=venv_dir, unless=false_cmd
'pip.installed', name='pep8', bin_env=venv_dir, unless=false_cmd, timeout=600
)
self.assertSaltTrueReturn(ret)
self.assertNotIn('warnings', next(six.itervalues(ret)))

View file

@ -9,6 +9,13 @@ from __future__ import absolute_import, unicode_literals, print_function
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.helpers import flaky
from tests.support.unit import skipIf
# Import Salt libs
import salt.utils.platform
# Import 3rd-party libs
from salt.ext import six
class TestJinjaRenderer(ModuleCase):
@ -24,6 +31,7 @@ class TestJinjaRenderer(ModuleCase):
self.assertTrue(state_ret['result'])
@flaky
@skipIf(salt.utils.platform.is_darwin() and six.PY2, 'This test hangs on OS X on Py2')
def test_salt_contains_function(self):
'''
Test if we are able to check if a function exists inside the "salt"

View file

@ -1,17 +1,21 @@
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Gareth J. Greenaway <gareth@saltstack.com>`
:codeauthor: :email:`David Murphy <dmurphy@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
import shutil
import datetime
import time
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
# Import Salt Testing libs
from tests.support.helpers import destructiveTest
from tests.support.unit import TestCase, skipIf
from tests.support.paths import TMP
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import (
MagicMock,
patch,
@ -19,11 +23,143 @@ from tests.support.mock import (
NO_MOCK_REASON
)
# Import Salt Libs
import salt.modules.gpg as gpg
# Import Salt libs
import salt.utils.path
import salt.utils.platform
import salt.utils.files
import salt.modules.gpg as gpg
GPG_TEST_KEY_PASSPHRASE = 'testkeypassphrase'
GPG_TEST_KEY_ID = '7416F045'
GPG_TEST_PUB_KEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
mQGNBFz1dx4BDACph7J5nuWE+zb9rZqTaL8akAnPAli2j6Qtk7BTDzTM9Kq80U2P
O3QRAFBQDODsgmrBTWgeZeNhN6Snz2WrZ8pC0RMK+mCmEt5S49ydWtvWl/xtzPfg
sy8h8OrIjSH1G0NQv9vdBpg2Y9vXLNDCZAqH0/Sh/wCkHT80c4i8TU09UZfx96S6
fFVmB7nHu/ztgiIkC6Fr04WGHlkd50g8r8CFPvwKOTD+rfoMsGppmAC1+y8ajfik
B+tEL88Rv2s4hLU78nQ3iGtdVRSs5Ip0x4O/PYZIDEd8KuBMo+flSzOZj2HCbUzN
MHiTXIbL8hBlwku9LGO7Itks4v2vfDh57JRHykwzGnvOlgXtSSvbayWLohNXtzWq
WtsMKrsrsIUprg1jhIVHnMSZZXMMizlni6MT5vbil2Bgn1g7diNscDnfCD6vlWUH
FRS1XkFFZ5ozu0+lC/5UaTcrjhH8/hQBNJwjsDSPsyKYQm+t7PXqq4SlI3Er7JJW
esK0diZ6reeebl0AEQEAAbQhdGVzdGtleSA8cGFja2FnaW5nQHNhbHRzdGFjay5j
b20+iQHOBBMBCgA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEEjS1DixNC
naoZrRFuvMeUg3QW8EUFAlz1ekoACgkQvMeUg3QW8EVm1gv/Z5CCqFoF8IXpa3g0
G+9C4gRS0h+tEYtjebAgxBn/dqk6cSNIb1BGDM/sSWxK5/jweN8CF/ojVgP1CFrX
igULnL3g5351L+8feU2ImP2OML438/GcOOJ+iwTKUBRuqxqK8NwcRuuN6YmbjtUw
JSD2+60DlWfGsInyenwGkgBAM44Y6dSAXKsm6dJ/IGVTQIsHE5mKRykrRZ1jyXxw
i1CF8fEyG6fNNb8I8Oiyj52xdYMxBvGmgPpFlF1jRoU+OViGFeUiZ49XFlC8GNCf
boWfXg+PCwZeWUeJ/s6a6iC5HG7iC0XYRiqhzymP8ctrwwm5dPT4ebYNfUSqq8i0
/YG6JGdsQZrq0i0FuceUSfOKVeFvSm+AkCG3kLxxgM925fqBetWiAJRCDYdmy8uc
jm618584hqa/yFMOfwVoLqq8mVg84wyGU6gXo6hXCa/008PITFZD47ILuN/Z8iLm
8Or/aJD5Tq+Wg3r2Di9+Ku8/gcn3bVxwFRjiVPrFwJkyjfMFuQGNBFz1dx4BDADU
Ynz0efLmaKqZIjfxwDf7iVqwcD9b8cMXIv2aa3Y56QDVJBXU5952LiU8XuzBBPq+
4FYQ78UmxSv3Rk6LKb9P2ih2L1PaJuQ1ZkNrQLqab3olpAu/Xe3raGLgCOU0RKJw
EPF3RcKu8ALuRcovfwzXWg8w19QRUPewZdVC4VgslKp8mNLECvdUxxVIDQWf06RZ
uCAfbqdiYReE62QT7NR4lAa1RpfU7Nt149OcQEP8VKTAZgTYyuwhRXFbrDD3Zp58
k5H0nKHNX+w1Ayih/YUk2b3etaBhlcTVAy/73TPfrd3Gl8dtzJZNtUD/eLWdGfP9
mCghmyAqbiQngH2eAMeifIYornynZFVBPBlvnwy7Iouq0V6tIVyNPGp0jcy1j2XT
NRBJyFbvam3hmrRW8A/VOJQ1W7LOKaM/5lh/BarrSLKn0xlL97GTmuSqlS+WrmyM
kU182TUYyUD7Rs3mydnMVS/N4aRxu4ITaTm9vieZLmAPR9vPgo+GwdHEkwm797kA
EQEAAYkBtgQYAQoAIAIbDBYhBI0tQ4sTQp2qGa0RbrzHlIN0FvBFBQJc9XqkAAoJ
ELzHlIN0FvBFlyEL/jVhm2PFj2mCLuKE5/nV4JvxY4Qu4+NCFiEdYK+zUoD36gEJ
3VjHL5dydHuZWcvm+XLW1PseNx2awVs47mjv2iZOLwY6BtfAFWhWEFmBEe6fTFXz
KkDWRst4gm0b0B7S3byoABwcyYNS6RkTfUApK4zdYErbfOLoT+Xa08YQKLVK7fmE
KBnBnnHUvktYTEvhwv9BID+qLnTVSQcjRcXbDQAYm14c7Nyb/SyxcUaUkCk41MVY
+vzNQlFrVc4h2np41X8JbmrsQb37E7lE+h32sJFBU03SGf0vT7SXXQj+UD/DEGay
Gt/8aRa5FGrcJyM5mTdbSgvCp0EjTrdokK5GHwM23cbSTo+nN9BNhIBRc4929SaJ
DVRqOIoJ+eHZdf3gIkMPOA3fBbMYzW65LIxt/p49tHD0c/nioZETycEgGuuYbnrn
IfXHFqiCAxkobIHqUg/BSu1cs8GNgE7BVUXy8JThuzmVdh4Pvd3YN1ouoPyVuDrk
ylirh0aqUQdSeIuJTg==
=yF8M
-----END PGP PUBLIC KEY BLOCK-----
"""
GPG_TEST_PRIV_KEY = """-----BEGIN PGP PRIVATE KEY BLOCK-----
lQWFBFz1dx4BDACph7J5nuWE+zb9rZqTaL8akAnPAli2j6Qtk7BTDzTM9Kq80U2P
O3QRAFBQDODsgmrBTWgeZeNhN6Snz2WrZ8pC0RMK+mCmEt5S49ydWtvWl/xtzPfg
sy8h8OrIjSH1G0NQv9vdBpg2Y9vXLNDCZAqH0/Sh/wCkHT80c4i8TU09UZfx96S6
fFVmB7nHu/ztgiIkC6Fr04WGHlkd50g8r8CFPvwKOTD+rfoMsGppmAC1+y8ajfik
B+tEL88Rv2s4hLU78nQ3iGtdVRSs5Ip0x4O/PYZIDEd8KuBMo+flSzOZj2HCbUzN
MHiTXIbL8hBlwku9LGO7Itks4v2vfDh57JRHykwzGnvOlgXtSSvbayWLohNXtzWq
WtsMKrsrsIUprg1jhIVHnMSZZXMMizlni6MT5vbil2Bgn1g7diNscDnfCD6vlWUH
FRS1XkFFZ5ozu0+lC/5UaTcrjhH8/hQBNJwjsDSPsyKYQm+t7PXqq4SlI3Er7JJW
esK0diZ6reeebl0AEQEAAf4HAwIqiZQqEMAZQ/+u0gE6tBcp52lUhE9fjORqgsY6
C5klAfrnrQyHXYkfjjQMWErSDR5FHeOxOPdZNnhVTBRaUIypLd+Os/YWl5lVO223
znbfK8GJIwHbDFQBSxtkC3WtD8cCqtKXvzApZzqeOhgNcaFJE956ZNlZfsCf0qsm
6xpEq07YiRVga6jJvjIFiPv7wMtdQQ67pEP4/tavLl+yuf6oJko2FjuG3RxrTf/C
CB4tyHCsRwgV7ouEdoSVhjFiDSS5xeWWLPRaXu4ceL0AjClHmdlMJtcpbyXKoh3U
uG5Cnwv9gXh24Qc6uuTWX61u7EhFLUWmKMFOI8dA+INYS8cXU8t6TU6XeV/01c7N
Q1O6QUCOx5VRbWRQuuvQN4f1gZm5QqN2jpNWjoUp2GSoxcHycEVSweEr+TmaouDA
ZOo12gx6dppkiwqS7Feq28qdpiZZPfdl/CvuWHxveNU9OVlexJ6A5PLep053qY+3
OlkvvkOxwmkJk2A3ITb1XngQkZCQDxAqCG6xMYjGIblKqqLqV1/q3pQ1nNbq5/iM
OtoxB7O7kZcyk7fQodk8EUz/ehTAZ5K8EWUETmiH9YlKTBbw8YMYEnuKfUFW6xqT
ROqurJfBlYmZEOxQ3oDVLZSfJQ3g/SXAOTKprB9GKyahM026Y+gfqR7yfwA8ifrH
E+HV4u7n/UjaUgu45LRGLGZQ7slmm79uYcVhBodQ0/eWcow7roHpOCFWTgyY3uhS
xdfuqgkEF8BGjQFaI4VNVeY+3+SM989BagAFuDlJw33fDoFSTPt9C+sqP1t1IvLv
9Eajn55MhO6gUptO3ViFPQE/EkiOrmaAJglu1LHEF/ssqWb/1+RGqF6N0OkKC+gx
iuuTgWl4wfxUsyh2JqIcj+xHRSf3G9OVJYkXaYsSNQ2eoSRlEzzu7Cxi83/qt6Dm
S+ST4wHl2VypfkhbNMq0W1aR8Kubi2Ixvk31ZDyk0uecRf3kNjVwD84WPjDedBsh
5rtCZO5kCAyWooCG41il09HfV9NCTjACCeO+dl4FO5aaLS0JSCBLVtORtwDCCZz+
QhS9CeXC+ook7sIaaiT0xWSnPmhEYE6roqwj4Lq3vvXIgHZjxeJizlGO0OSdTPBw
9wQ5ij/8G6MEGap4thvTohsFGUxHK2xx8Z089kGdmKd4piY/kjtX7AFtLEc0YiDa
w7PTlrqJA9FRta7g/aYVCKBk8G+8dxiErErFgm6RBxnQiJ/lLUAVsJv1cAQ8oyCK
GyDzGXEFk9eQtKGczF4CK8NhOMc9HabnQnzxcMGiVXEn/E3bDqocfeOAWEYft8zJ
sy96EJAk581uZ4CiKOcQW+Zv3N8O7ogdtCF0ZXN0a2V5IDxwYWNrYWdpbmdAc2Fs
dHN0YWNrLmNvbT6JAc4EEwEKADgCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AW
IQSNLUOLE0KdqhmtEW68x5SDdBbwRQUCXPV6SgAKCRC8x5SDdBbwRWbWC/9nkIKo
WgXwhelreDQb70LiBFLSH60Ri2N5sCDEGf92qTpxI0hvUEYMz+xJbErn+PB43wIX
+iNWA/UIWteKBQucveDnfnUv7x95TYiY/Y4wvjfz8Zw44n6LBMpQFG6rGorw3BxG
643piZuO1TAlIPb7rQOVZ8awifJ6fAaSAEAzjhjp1IBcqybp0n8gZVNAiwcTmYpH
KStFnWPJfHCLUIXx8TIbp801vwjw6LKPnbF1gzEG8aaA+kWUXWNGhT45WIYV5SJn
j1cWULwY0J9uhZ9eD48LBl5ZR4n+zprqILkcbuILRdhGKqHPKY/xy2vDCbl09Ph5
tg19RKqryLT9gbokZ2xBmurSLQW5x5RJ84pV4W9Kb4CQIbeQvHGAz3bl+oF61aIA
lEINh2bLy5yObrXznziGpr/IUw5/BWguqryZWDzjDIZTqBejqFcJr/TTw8hMVkPj
sgu439nyIubw6v9okPlOr5aDevYOL34q7z+ByfdtXHAVGOJU+sXAmTKN8wWdBYYE
XPV3HgEMANRifPR58uZoqpkiN/HAN/uJWrBwP1vxwxci/ZprdjnpANUkFdTn3nYu
JTxe7MEE+r7gVhDvxSbFK/dGTospv0/aKHYvU9om5DVmQ2tAuppveiWkC79d7eto
YuAI5TREonAQ8XdFwq7wAu5Fyi9/DNdaDzDX1BFQ97Bl1ULhWCyUqnyY0sQK91TH
FUgNBZ/TpFm4IB9up2JhF4TrZBPs1HiUBrVGl9Ts23Xj05xAQ/xUpMBmBNjK7CFF
cVusMPdmnnyTkfScoc1f7DUDKKH9hSTZvd61oGGVxNUDL/vdM9+t3caXx23Mlk21
QP94tZ0Z8/2YKCGbICpuJCeAfZ4Ax6J8hiiufKdkVUE8GW+fDLsii6rRXq0hXI08
anSNzLWPZdM1EEnIVu9qbeGatFbwD9U4lDVbss4poz/mWH8FqutIsqfTGUv3sZOa
5KqVL5aubIyRTXzZNRjJQPtGzebJ2cxVL83hpHG7ghNpOb2+J5kuYA9H28+Cj4bB
0cSTCbv3uQARAQAB/gcDAgUPU1tmC3CS/x0qZYicVcMiU5wop6fnbnNkEfUQip8V
qpL64/GpP6X7sJiY2BCo0/5AMPDKlTwPxogMQ6NduZ2AbvJybGC7AQULMkd4Y4H1
nwrDk5HWO5dLVoXRSVw9Dm6oaV4bi6wno9yapkq7AVRnvtNEza47gxmV2iwRoU5H
5ciQTU6nd1TkFNhD4ZwZ25CMqffvbrE2Ie6RsBUr9HlxYIint91rVLkkBGhw8W4t
KushxAZpBOQB0Rqtuak/q+/xyDnvNvU/A9GeKpRrxzwAbIdtW0VjPulDk1tThGDA
kmuxSJ1yxUb+CzA/5YoMXto1OqjUI2hO108xgTVl5hpmckBnwsPtbjrtDYFAqwfq
qF9YAVQ3RfMn3ThZ2oXg+FJMcwd9uVJn2/LZd81Nc6g4UogD1sD2ye2vqDGTEztK
BAdthEcufnUP5UrEixE8CPzAJOjuC5ROU57FXCaSSUfIwXO3UoxvNWcuXDC7RVDz
nsv/Hg2j0pSeFht2NO6Pom+4XHY+LHImPTfXamN6IDsTRJGQW8R7Y131fjPQMn7I
0WjyIiqD4eLo5KQYjL+0bE0JiLRaJnlfbu1uoV3sgi8bwG6WlnLh7eKDErA2P0Zs
r0KX5yGR5Ih2CAMfMmqqYrkEYmNxNbLvL5ie9F35MnvRbqyY/9pl0p1Eah7uGnuK
or13bg801HoZJLBTr4sJTqkwuUztloVyBdM6T5hal+rxX37Wnj1PgD0e0Ydqo6hJ
7WJ/Zjd+0qk90VoiGNRre7tMBYDQ3w3wS+tSta3kxTKj5I4hLZncN+pt9F6o+tgd
YAhWO93DzWjMMUV/jkKTJqwAFAuRlnTwzbBS70N2Z8jrGczV05RV9OH7DRr34noF
O7/Bn0iDpKZdbArtkJZyu4B+MUp/RRiuxn7iWOM2tEjDhUuyHXYYFppFB8fG7r52
VcxH/Sc3VcXB0l2KywrAG2oZfiE8M4NPHuiIHFpcjeK2VLrP2iGLvdlL4IsvtFIU
uLiFi7r0egEi/Ge8ebRF7TtjmhL5Jzi9bbUGuvxSIGZU1HCECq+hHVy45XwKrRTo
AzDIwNjBFmZzL7FI7X16W/6Y11VVmXEmDt9dmmu78bT0z2Bq0Q7K9C7Eq2qzW65z
+4fntFF8BWDs3l5yPKLjg+wlgPPXieHgrUQpZOYCsFJqig3xFZSu1ZMzYdlvyNSF
KAgMPZfi37kAUo8ZiH27SZAA/aTK6b69oEQ6I7CsMJZLRp/gzYvn4NN/DIK3fuYc
jsKB6OR3gWmU7EDf/1EZkO0YK2YiwkSrDALJdUo7ArYR2KIZTUEG9rxDBUD8IyIz
PGdh7sBG4PhOxpQ+SiZyzLzZAJjviQG2BBgBCgAgAhsMFiEEjS1DixNCnaoZrRFu
vMeUg3QW8EUFAlz1eqQACgkQvMeUg3QW8EWXIQv+NWGbY8WPaYIu4oTn+dXgm/Fj
hC7j40IWIR1gr7NSgPfqAQndWMcvl3J0e5lZy+b5ctbU+x43HZrBWzjuaO/aJk4v
BjoG18AVaFYQWYER7p9MVfMqQNZGy3iCbRvQHtLdvKgAHBzJg1LpGRN9QCkrjN1g
Stt84uhP5drTxhAotUrt+YQoGcGecdS+S1hMS+HC/0EgP6oudNVJByNFxdsNABib
Xhzs3Jv9LLFxRpSQKTjUxVj6/M1CUWtVziHaenjVfwluauxBvfsTuUT6HfawkUFT
TdIZ/S9PtJddCP5QP8MQZrIa3/xpFrkUatwnIzmZN1tKC8KnQSNOt2iQrkYfAzbd
xtJOj6c30E2EgFFzj3b1JokNVGo4ign54dl1/eAiQw84Dd8FsxjNbrksjG3+nj20
cPRz+eKhkRPJwSAa65hueuch9ccWqIIDGShsgepSD8FK7VyzwY2ATsFVRfLwlOG7
OZV2Hg+93dg3Wi6g/JW4OuTKWKuHRqpRB1J4i4lO
=WRTN
-----END PGP PRIVATE KEY BLOCK-----
"""
try:
import gnupg # pylint: disable=import-error,unused-import
HAS_GPG = True
@ -31,15 +167,38 @@ except ImportError:
HAS_GPG = False
@skipIf(not salt.utils.path.which('gpg'), 'GPG not installed. Skipping')
@destructiveTest
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(not salt.utils.platform.is_linux(), 'These tests can only be run on linux')
class GpgTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.gpg
Validate the gpg module
'''
def setup_loader_modules(self):
return {gpg: {'__salt__': {}}}
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def setUp(self):
super(GpgTestCase, self).setUp()
self.gpghome = os.path.join(TMP, 'gpghome')
if not os.path.isdir(self.gpghome):
# left behind... Don't fail because of this!
os.makedirs(self.gpghome)
self.gpgfile_pub = os.path.join(TMP, 'gpgfile.pub')
with salt.utils.files.fopen(self.gpgfile_pub, 'wb') as fp:
fp.write(salt.utils.stringutils.to_bytes(GPG_TEST_PUB_KEY))
self.gpgfile_priv = os.path.join(TMP, 'gpgfile.priv')
with salt.utils.files.fopen(self.gpgfile_priv, 'wb') as fp:
fp.write(salt.utils.stringutils.to_bytes(GPG_TEST_PRIV_KEY))
self.user = 'salt'
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def tearDown(self):
if os.path.isfile(self.gpgfile_pub):
os.remove(self.gpgfile_pub)
shutil.rmtree(self.gpghome, ignore_errors=True)
super(GpgTestCase, self).tearDown()
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_list_keys(self):
'''
@ -156,7 +315,7 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
u'groups': [
u'root'
],
u'home': u'/root',
u'home': self.gpghome,
u'fullname': u'root',
u'homephone': u'',
u'name': u'root'}
@ -202,7 +361,7 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
'groups': [
'root'
],
'home': '/root',
'home': self.gpghome,
'fullname': 'root',
'homephone': '',
'name': 'root'}
@ -227,3 +386,35 @@ class GpgTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(gpg, '_search_keys', return_value=_search_result):
ret = gpg.search_keys('person@example.com')
self.assertEqual(ret, _expected_result)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_import_pub_key(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
ret = gpg.import_key(None, self.gpgfile_pub, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_import_priv_key(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
ret = gpg.import_key(None, self.gpgfile_priv, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
@skipIf(not HAS_GPG, 'GPG Module Unavailable')
def test_gpg_sign(self):
config_user = MagicMock(return_value='salt')
user_info = MagicMock(return_value={'name': 'salt', 'home': self.gpghome, 'uid': 1000})
pillar_mock = MagicMock(return_value={'gpg_passphrase': GPG_TEST_KEY_PASSPHRASE})
with patch.dict(gpg.__salt__, {'config.option': config_user}):
with patch.dict(gpg.__salt__, {'user.info': user_info}):
with patch.dict(gpg.__salt__, {'pillar.get': pillar_mock}):
ret = gpg.import_key(None, self.gpgfile_priv, 'salt', self.gpghome)
self.assertEqual(ret['res'], True)
gpg_text_input = 'The quick brown fox jumped over the lazy dog'
gpg_sign_output = gpg.sign(config_user, GPG_TEST_KEY_ID, gpg_text_input, None, None, True, self.gpghome)
self.assertIsNotNone(gpg_sign_output)

View file

@ -122,7 +122,10 @@ class TestSaltCacheLoader(TestCase):
},
'pillar_roots': {
'test': [self.template_dir]
}
},
'extension_modules': os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'extmods'),
}
super(TestSaltCacheLoader, self).setUp()
@ -223,6 +226,36 @@ class TestSaltCacheLoader(TestCase):
result = jinja.get_template('hello_include').render(a='Hi', b='Salt')
self.assertEqual(result, 'Hey world !Hi Salt !')
def test_cached_file_client(self):
'''
Multiple instantiations of SaltCacheLoader use the cached file client
'''
with patch('salt.transport.client.ReqChannel.factory', Mock()):
loader_a = SaltCacheLoader(self.opts)
loader_b = SaltCacheLoader(self.opts)
assert loader_a._file_client is loader_b._file_client
def test_file_client_kwarg(self):
'''
A file client can be passed to SaltCacheLoader overriding the any
cached file client
'''
mfc = MockFileClient()
loader = SaltCacheLoader(self.opts, _file_client=mfc)
assert loader._file_client is mfc
def test_cache_loader_shutdown(self):
'''
The shudown method can be called without raising an exception when the
file_client does not have a destroy method
'''
mfc = MockFileClient()
assert not hasattr(mfc, 'destroy')
loader = SaltCacheLoader(self.opts, _file_client=mfc)
assert loader._file_client is mfc
# Shutdown method should not raise any exceptions
loader.shutdown()
class TestGetTemplate(TestCase):

View file

@ -19,6 +19,9 @@ NODEGROUPS = {
'group2': ['G@foo:bar', 'or', 'web1*'],
'group3': ['N@group1', 'or', 'N@group2'],
'group4': ['host4', 'host5', 'host6'],
'group5': 'N@group4',
'group6': 'N@group3',
'group7': ['host1']
}
EXPECTED = {
@ -26,6 +29,10 @@ EXPECTED = {
'group2': ['G@foo:bar', 'or', 'web1*'],
'group3': ['(', '(', 'L@host1,host2,host3', ')', 'or', '(', 'G@foo:bar', 'or', 'web1*', ')', ')'],
'group4': ['L@host4,host5,host6'],
'group5': ['(', 'L@host4,host5,host6', ')'],
'group6': ['(', '(', '(', 'L@host1,host2,host3', ')', 'or', '(',
'G@foo:bar', 'or', 'web1*', ')', ')', ')'],
'group7': ['L@host1']
}

View file

@ -495,6 +495,7 @@ class SSHThinTestCase(TestCase):
@patch('salt.utils.thin.zipfile', MagicMock())
@patch('salt.utils.thin.os.getcwd', MagicMock())
@patch('salt.utils.thin.os.chdir', MagicMock())
@patch('salt.utils.thin.os.close', MagicMock())
@patch('salt.utils.thin.tempfile.mkdtemp', MagicMock())
@patch('salt.utils.thin.tempfile.mkstemp', MagicMock(return_value=(3, ".temporary")))
@patch('salt.utils.thin.shutil', MagicMock())