mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge remote-tracking branch 'origin/2018.3' into patch-3
This commit is contained in:
commit
4bed6e90c7
67 changed files with 2900 additions and 2087 deletions
148
.ci/docs
148
.ci/docs
|
@ -1,69 +1,87 @@
|
|||
pipeline {
|
||||
agent {
|
||||
label 'docs'
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
ansiColor('xterm')
|
||||
timeout(time: 2, unit: 'HOURS')
|
||||
}
|
||||
environment {
|
||||
PYENV_ROOT = "/usr/local/pyenv"
|
||||
PATH = "$PYENV_ROOT/bin:$PATH"
|
||||
PY_COLORS = 1
|
||||
SPHINXOPTS = "-W"
|
||||
}
|
||||
stages {
|
||||
stage('github-pending') {
|
||||
steps {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Testing docs...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/docs"
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def global_timeout = 2
|
||||
def salt_target_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
])
|
||||
|
||||
def shell_header
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('docs') {
|
||||
ansiColor('xterm') {
|
||||
timestamps {
|
||||
try {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Testing docs...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/docs"
|
||||
}
|
||||
shell_header = 'export PYENV_ROOT="/usr/local/pyenv"\nexport PATH="$PYENV_ROOT/bin:$PATH"'
|
||||
} else {
|
||||
shell_header = ''
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv --version
|
||||
pyenv install --skip-existing 3.6.8
|
||||
pyenv shell 3.6.8
|
||||
python --version
|
||||
pip install -U nox-py2
|
||||
nox --version
|
||||
'''
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv shell 3.6.8
|
||||
nox -e docs
|
||||
'''
|
||||
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz'
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The docs job has passed',
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/docs"
|
||||
}
|
||||
} else {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The docs job has failed',
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/docs"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('setup') {
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv --version
|
||||
pyenv install --skip-existing 3.6.8
|
||||
pyenv shell 3.6.8
|
||||
python --version
|
||||
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
|
||||
nox --version
|
||||
'''
|
||||
}
|
||||
}
|
||||
stage('build') {
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv shell 3.6.8
|
||||
nox -e docs
|
||||
'''
|
||||
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
success {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The docs job has passed',
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/docs"
|
||||
}
|
||||
failure {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The docs job has failed',
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/docs"
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '6'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=centos6,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=centos-6',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=centos7,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=centos-7',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=centos7,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=centos-7',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '8'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=debian8,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=debian-8',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '8'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=debian8,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=debian-8',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '9'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=debian9,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=debian-9',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '9'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=debian9,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=debian-9',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -1,128 +0,0 @@
|
|||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=fedora28,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=fedora-28',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
|
@ -1,128 +0,0 @@
|
|||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=fedora28,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=fedora-28',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
152
.ci/kitchen-fedora29-py2
Normal file
152
.ci/kitchen-fedora29-py2
Normal file
|
@ -0,0 +1,152 @@
|
|||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '29'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
try {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: ft=groovy
|
152
.ci/kitchen-fedora29-py3
Normal file
152
.ci/kitchen-fedora29-py3
Normal file
|
@ -0,0 +1,152 @@
|
|||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '29'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
try {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: ft=groovy
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=ubuntu1604,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=ubuntu-1604',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=ubuntu1604,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=ubuntu-1604',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1804'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=ubuntu1804,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=ubuntu-1804',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,111 +4,135 @@ def testrun_timeout = 6
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1804'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=ubuntu1804,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=ubuntu-1804',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'NOX_PASSTHROUGH_OPTS=--ssh-tests',
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -125,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,110 +4,135 @@ def testrun_timeout = 8
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2016'
|
||||
def python_version = 'py2'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-win-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=windows2016,py2',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py2',
|
||||
'TEST_PLATFORM=windows-2016',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=--unit",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -124,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -4,110 +4,135 @@ def testrun_timeout = 8
|
|||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2016'
|
||||
def python_version = 'py3'
|
||||
def salt_target_branch = '2018.3'
|
||||
def golden_images_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
[
|
||||
$class: 'ScannerJobProperty', doNotScan: false
|
||||
],
|
||||
[
|
||||
$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false
|
||||
],
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
parameters([
|
||||
booleanParam(defaultValue: false, description: 'Run full test suite', name: 'runFull')
|
||||
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
|
||||
])
|
||||
])
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('kitchen-slave') {
|
||||
timestamps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
|
||||
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
|
||||
credentialsId: 'AWS_ACCESS_KEY_ID',
|
||||
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
ansiColor('xterm') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-win-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
'GOLDEN_IMAGES_CI_BRANCH=2018.3',
|
||||
'CODECOV_FLAGS=windows2016,py3',
|
||||
'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
'TEST_SUITE=py3',
|
||||
'TEST_PLATFORM=windows-2016',
|
||||
'PY_COLORS=1',
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
try {
|
||||
timestamps {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
'NOX_ENV_NAME=runtests-zeromq',
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=--unit",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin',
|
||||
'RBENV_VERSION=2.4.2',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
}
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('setup-bundle') {
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
try {
|
||||
stage('run kitchen') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stage('cleanup kitchen') {
|
||||
script {
|
||||
withCredentials([
|
||||
[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']
|
||||
]) {
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
|
||||
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
}
|
||||
stage('report code coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 1-15 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
try {
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
sshagent(credentials: ['jenkins-testing-ssh-key']) {
|
||||
sh 'ssh-add ~/.ssh/jenkins-testing.pem || ssh-add ~/.ssh/kitchen.pem'
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
}
|
||||
archiveArtifacts artifacts: 'artifacts/*,artifacts/**/*'
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?;"
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}"
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
} else {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
|
@ -124,4 +149,4 @@ timeout(time: global_timeout, unit: 'HOURS') {
|
|||
}
|
||||
}
|
||||
|
||||
// vi: ft=groovy
|
||||
// vim: ft=groovy
|
||||
|
|
362
.ci/lint
362
.ci/lint
|
@ -1,175 +1,205 @@
|
|||
pipeline {
|
||||
agent { label 'pr-lint-slave' }
|
||||
options {
|
||||
timestamps()
|
||||
ansiColor('xterm')
|
||||
timeout(time: 3, unit: 'HOURS')
|
||||
}
|
||||
environment {
|
||||
PYENV_ROOT = "/usr/local/pyenv"
|
||||
PATH = "$PYENV_ROOT/bin:$PATH"
|
||||
PY_COLORS = 1
|
||||
}
|
||||
stages {
|
||||
stage('github-pending') {
|
||||
steps {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint on changes begins...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
}
|
||||
stage('setup') {
|
||||
steps {
|
||||
sh '''
|
||||
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
|
||||
# -M is on by default in git 2.9+
|
||||
git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" > file-list-status.log
|
||||
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
|
||||
(git diff --name-status -l99999 -C "origin/$CHANGE_TARGET" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
|
||||
eval "$(pyenv init -)"
|
||||
pyenv --version
|
||||
pyenv install --skip-existing 2.7.15
|
||||
pyenv shell 2.7.15
|
||||
python --version
|
||||
pip install -U https://github.com/s0undt3ch/nox/archive/hotfix/py2.zip#egg=Nox==2018.10.17
|
||||
nox --version
|
||||
# Create the required virtualens in serial
|
||||
nox --install-only -e lint-salt
|
||||
nox --install-only -e lint-tests
|
||||
'''
|
||||
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log,file-list-experiment.log'
|
||||
}
|
||||
}
|
||||
stage('linting chg') {
|
||||
parallel {
|
||||
stage('lint salt chg') {
|
||||
when {
|
||||
expression { return readFile('file-list-changed.log') =~ /(?i)(^|\n)(salt\/.*\.py|setup\.py)\n/ }
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def global_timeout = 3
|
||||
def salt_target_branch = '2018.3'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
|
||||
])
|
||||
|
||||
def shell_header
|
||||
|
||||
timeout(time: global_timeout, unit: 'HOURS') {
|
||||
node('lint') {
|
||||
ansiColor('xterm') {
|
||||
timestamps {
|
||||
try {
|
||||
// Set the GH status even before cloning the repo
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
stage('github-pending') {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint begins...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
shell_header = 'export PYENV_ROOT="/usr/local/pyenv"\nexport PATH="$PYENV_ROOT/bin:$PATH"'
|
||||
} else {
|
||||
shell_header = ''
|
||||
}
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-salt-chg.log
|
||||
grep -Ei '^salt/.*\\.py$|^setup\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-salt --
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
|
||||
withEnv(["SALT_TARGET_BRANCH=${salt_target_branch}"]) {
|
||||
// Checkout the repo
|
||||
stage('checkout-scm') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh shell_header + '''
|
||||
# Need -M to detect renames otherwise they are reported as Delete and Add, need -C to detect copies, -C includes -M
|
||||
# -M is on by default in git 2.9+
|
||||
git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" > file-list-status.log
|
||||
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
|
||||
(git diff --name-status -l99999 -C "origin/${SALT_TARGET_BRANCH}" "origin/$BRANCH_NAME";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
|
||||
eval "$(pyenv init -)"
|
||||
pyenv --version
|
||||
pyenv install --skip-existing 2.7.15
|
||||
pyenv shell 2.7.15
|
||||
python --version
|
||||
pip install -U nox-py2
|
||||
nox --version
|
||||
# Create the required virtualenvs in serial
|
||||
nox --install-only -e lint-salt
|
||||
nox --install-only -e lint-tests
|
||||
'''
|
||||
}
|
||||
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log,file-list-experiment.log'
|
||||
}
|
||||
}
|
||||
stage('lint test chg') {
|
||||
when {
|
||||
expression { return readFile('file-list-changed.log') =~ /(?i)(^|\n)tests\/.*\.py\n/ }
|
||||
|
||||
stage('Lint Changes') {
|
||||
try {
|
||||
parallel(
|
||||
lintSalt: {
|
||||
stage('Lint Salt Changes') {
|
||||
if (readFile('file-list-changed.log') =~ /(?i)(^|\n)(salt\/.*\.py|setup\.py)\n/) {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-salt-chg.log
|
||||
grep -Ei '^salt/.*\\.py$|^setup\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-salt --
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
}
|
||||
},
|
||||
lintTests: {
|
||||
stage('Lint Test Changes') {
|
||||
if (readFile('file-list-changed.log') =~ /(?i)(^|\n)tests\/.*\.py\n/) {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-tests-chg.log
|
||||
grep -Ei '^tests/.*\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-tests --
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
} finally {
|
||||
def changed_logs_pattern = 'pylint-report-*-chg.log'
|
||||
archiveArtifacts artifacts: changed_logs_pattern, allowEmptyArchive: true
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
step([$class: 'WarningsPublisher',
|
||||
parserConfigurations: [[
|
||||
parserName: 'PyLint',
|
||||
pattern: changed_logs_pattern
|
||||
]],
|
||||
failedTotalAll: '0',
|
||||
useDeltaValues: false,
|
||||
canRunOnFailed: true,
|
||||
usePreviousBuildAsReference: true
|
||||
])
|
||||
} else {
|
||||
recordIssues(enabledForFailure: true, tool: pyLint(pattern: changed_logs_pattern, reportEncoding: 'UTF-8'))
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-tests-chg.log
|
||||
grep -Ei '^tests/.*\\.py$' file-list-changed.log | xargs -r '--delimiter=\\n' nox -e lint-tests --
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
stage('Lint Full') {
|
||||
if (env.CHANGE_BRANCH =~ /(?i)^merge[._-]/) {
|
||||
// perform a full linit if this is a merge forward and the change only lint passed.
|
||||
try {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint on everything begins...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
parallel(
|
||||
lintSaltFull: {
|
||||
stage('Lint Salt Full') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-salt-full.log
|
||||
nox -e lint-salt
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
},
|
||||
lintTestsFull: {
|
||||
stage('Lint Tests Full') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-tests-full.log
|
||||
nox -e lint-salt
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
}
|
||||
)
|
||||
} finally {
|
||||
def full_logs_pattern = 'pylint-report-*-full.log'
|
||||
archiveArtifacts artifacts: full_logs_pattern, allowEmptyArchive: true
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
step([$class: 'WarningsPublisher',
|
||||
parserConfigurations: [[
|
||||
parserName: 'PyLint',
|
||||
pattern: full_logs_pattern
|
||||
]],
|
||||
failedTotalAll: '0',
|
||||
useDeltaValues: false,
|
||||
canRunOnFailed: true,
|
||||
usePreviousBuildAsReference: true
|
||||
])
|
||||
} else {
|
||||
recordIssues(enabledForFailure: true, tool: pyLint(pattern: full_logs_pattern, reportEncoding: 'UTF-8'))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
} finally {
|
||||
cleanWs notFailBuild: true
|
||||
if (currentBuild.resultIsBetterOrEqualTo('SUCCESS')) {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The lint test passed',
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
} else {
|
||||
if (env.NODE_NAME.startsWith('jenkins-pr-')) {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'The lint test failed',
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
try {
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
} catch (Exception e) {
|
||||
sh 'echo Failed to send the Slack notification'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
archiveArtifacts artifacts: 'pylint-report-*-chg.log', allowEmptyArchive: true
|
||||
step([$class: 'WarningsPublisher',
|
||||
parserConfigurations: [[
|
||||
parserName: 'PyLint',
|
||||
pattern: 'pylint-report-*-chg.log'
|
||||
]],
|
||||
failedTotalAll: '0',
|
||||
useDeltaValues: false,
|
||||
canRunOnFailed: true,
|
||||
usePreviousBuildAsReference: true
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('linting all') {
|
||||
// perform a full linit if this is a merge forward and the change only lint passed.
|
||||
when {
|
||||
expression { return env.CHANGE_BRANCH =~ /(?i)^merge[._-]/ }
|
||||
}
|
||||
parallel {
|
||||
stage('setup full') {
|
||||
steps {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint on everything begins...',
|
||||
status: 'PENDING',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
}
|
||||
stage('lint salt full') {
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-salt-full.log
|
||||
nox -e lint-salt
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
}
|
||||
stage('lint test full') {
|
||||
steps {
|
||||
sh '''
|
||||
eval "$(pyenv init - --no-rehash)"
|
||||
pyenv shell 2.7.15
|
||||
EC=254
|
||||
export PYLINT_REPORT=pylint-report-tests-full.log
|
||||
nox -e lint-salt
|
||||
EC=$?
|
||||
exit $EC
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
archiveArtifacts artifacts: 'pylint-report-*-full.log', allowEmptyArchive: true
|
||||
step([$class: 'WarningsPublisher',
|
||||
parserConfigurations: [[
|
||||
parserName: 'PyLint',
|
||||
pattern: 'pylint-report-*-full.log'
|
||||
]],
|
||||
failedTotalAll: '0',
|
||||
useDeltaValues: false,
|
||||
canRunOnFailed: true,
|
||||
usePreviousBuildAsReference: true
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
success {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint test has passed',
|
||||
status: 'SUCCESS',
|
||||
context: "jenkins/pr/lint"
|
||||
}
|
||||
failure {
|
||||
githubNotify credentialsId: 'test-jenkins-credentials',
|
||||
description: 'Python lint test has failed',
|
||||
status: 'FAILURE',
|
||||
context: "jenkins/pr/lint"
|
||||
slackSend channel: "#jenkins-prod-pr",
|
||||
color: '#FF0000',
|
||||
message: "FAILED: PR-Job: '${env.JOB_NAME} [${env.BUILD_NUMBER}]' (${env.BUILD_URL})"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -3,40 +3,6 @@ repos:
|
|||
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
|
||||
rev: master
|
||||
hooks:
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py2.7-tcp-requirements
|
||||
name: Linux Py2.7 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(lint|cloud|docs|osx|windows|)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --platform=linux
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py2.7-tcp-requirements
|
||||
name: OSX Py2.7 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/osx\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --platform=darwin
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-windows-py2.7-tcp-requirements
|
||||
name: Windows Py2.7 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/windows\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --platform=windows
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py2.7-zmq-requirements
|
||||
name: Linux Py2.7 ZeroMQ Requirements
|
||||
|
@ -54,12 +20,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py2.7-zmq-requirements
|
||||
name: OSX Py2.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --platform=darwin
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -67,12 +35,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py2.7-zmq-requirements
|
||||
name: Windows Py2.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --platform=windows
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -94,12 +64,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py2.7-raet-requirements
|
||||
name: OSX Py2.7 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --out-prefix=raet
|
||||
- --platform=darwin
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -107,12 +79,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py2.7-raet-requirements
|
||||
name: Windows Py2.7 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
- --out-prefix=raet
|
||||
- --platform=windows
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -136,23 +110,11 @@ repos:
|
|||
- --py-version=2.7
|
||||
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.4-tcp-requirements
|
||||
name: Linux Py3.4 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.4
|
||||
- --platform=linux
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.4-zmq-requirements
|
||||
name: Linux Py3.4 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.4
|
||||
|
@ -166,7 +128,7 @@ repos:
|
|||
alias: compile-linux-py3.4-raet-requirements
|
||||
name: Linux Py3.4 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.4
|
||||
|
@ -186,45 +148,11 @@ repos:
|
|||
- -v
|
||||
- --py-version=3.4
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.5-tcp-requirements
|
||||
name: Linux Py3.5 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=linux
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.5-tcp-requirements
|
||||
name: OSX Py3.5 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/osx\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=darwin
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.5-tcp-requirements
|
||||
name: Windows Py3.5 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/windows\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=windows
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.5-zmq-requirements
|
||||
name: Linux Py3.5 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
|
@ -237,12 +165,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.5-zmq-requirements
|
||||
name: OSX Py3.5 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=darwin
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -250,12 +180,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.5-zmq-requirements
|
||||
name: Windows Py3.5 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=windows
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -264,7 +196,7 @@ repos:
|
|||
alias: compile-linux-py3.5-raet-requirements
|
||||
name: Linux Py3.5 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
|
@ -278,12 +210,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.5-raet-requirements
|
||||
name: OSX Py3.5 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=darwin
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -292,12 +226,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.5-raet-requirements
|
||||
name: Windows Py3.5 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
- --platform=windows
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -313,33 +249,19 @@ repos:
|
|||
- --py-version=3.5
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.6-tcp-requirements
|
||||
name: Linux Py3.6 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
alias: compile-doc-requirements
|
||||
name: Docs Py3.5 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/docs\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --py-version=3.5
|
||||
- --platform=linux
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.6-tcp-requirements
|
||||
name: OSX Py3.6 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/osx\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=darwin
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.6-zmq-requirements
|
||||
name: Linux Py3.6 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
|
@ -352,12 +274,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.6-zmq-requirements
|
||||
name: OSX Py3.6 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=darwin
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -365,12 +289,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.6-zmq-requirements
|
||||
name: Windows Py3.6 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=windows
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -379,7 +305,7 @@ repos:
|
|||
alias: compile-linux-py3.6-raet-requirements
|
||||
name: Linux Py3.6 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
|
@ -393,12 +319,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.6-raet-requirements
|
||||
name: OSX Py3.6 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=darwin
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -407,12 +335,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.6-raet-requirements
|
||||
name: Windows Py3.6 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=windows
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -436,34 +366,11 @@ repos:
|
|||
- --py-version=3.6
|
||||
- --platform=linux
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.7-tcp-requirements
|
||||
name: Linux Py3.7 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=linux
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.7-tcp-requirements
|
||||
name: OSX Py3.7 TCP Requirements
|
||||
files: ^requirements/((base|pytest)\.txt|static/osx\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=darwin
|
||||
- --out-prefix=tcp
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/pytest.txt
|
||||
- id: pip-tools-compile
|
||||
alias: compile-linux-py3.7-zmq-requirements
|
||||
name: Linux Py3.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
|
@ -476,12 +383,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.7-zmq-requirements
|
||||
name: OSX Py3.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=darwin
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -489,12 +398,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.7-zmq-requirements
|
||||
name: Windows Py3.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|zeromq|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=windows
|
||||
- --out-prefix=zeromq
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -503,7 +414,7 @@ repos:
|
|||
alias: compile-linux-py3.7-raet-requirements
|
||||
name: Linux Py3.7 RAET Requirements
|
||||
files: ^requirements/((base|raet|pytest)\.txt|static/(.*)\.in)$
|
||||
exclude: ^requirements/static/(centos-6|ubuntu-14.04|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
|
@ -517,12 +428,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.7-raet-requirements
|
||||
name: OSX Py3.7 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/osx\.in)$
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|raet|pytest)\.txt|static/osx\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=darwin
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/osx/req.txt
|
||||
- --include=pkg/osx/req_ext.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -531,12 +444,14 @@ repos:
|
|||
- id: pip-tools-compile
|
||||
alias: compile-windows-py3.7-raet-requirements
|
||||
name: Windows Py3.7 RAET Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/windows\.in)$
|
||||
files: ^(pkg/windows/(req|req_win)\.txt|requirements/((base|raet|pytest)\.txt|static/windows\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=windows
|
||||
- --out-prefix=raet
|
||||
- --include=pkg/windows/req.txt
|
||||
- --include=pkg/windows/req_win.txt
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/raet.txt
|
||||
- --include=requirements/pytest.txt
|
||||
|
@ -550,3 +465,12 @@ repos:
|
|||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-doc-requirements
|
||||
name: Docs Py3.7 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/docs\.in)$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=linux
|
||||
|
|
|
@ -26,7 +26,8 @@ load-plugins=saltpylint.pep8,
|
|||
saltpylint.smartup,
|
||||
saltpylint.minpyver,
|
||||
saltpylint.blacklist,
|
||||
saltpylint.thirdparty
|
||||
saltpylint.thirdparty,
|
||||
saltpylint.dunder_del
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
# Don't bump this values on PyLint 1.4.0 - Know bug that ignores the passed --rcfile
|
||||
|
|
|
@ -23,7 +23,8 @@ load-plugins=saltpylint.pep8,
|
|||
saltpylint.smartup,
|
||||
saltpylint.minpyver,
|
||||
saltpylint.blacklist,
|
||||
saltpylint.thirdparty
|
||||
saltpylint.thirdparty,
|
||||
saltpylint.dunder_del
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
# Don't bump this values on PyLint 1.4.0 - Know bug that ignores the passed --rcfile
|
||||
|
|
|
@ -99,8 +99,8 @@ Salt loader modules use a series of globally available dunder variables,
|
|||
``__salt__``, ``__opts__``, ``__pillar__``, etc. To facilitate testing these
|
||||
modules a mixin class was created, ``LoaderModuleMockMixin`` which can be found
|
||||
in ``tests/support/mixins.py``. The reason for the existence of this class is
|
||||
because historiclly and because it was easier, one would add these dunder
|
||||
variables directly on the imported module. This however, introduces unexpected
|
||||
because historically one would add these dunder
|
||||
variables directly on the imported module. This, however, introduces unexpected
|
||||
behavior when running the full test suite since those attributes would not be
|
||||
removed once we were done testing the module and would therefore leak to other
|
||||
modules being tested with unpredictable results. This is the kind of work that
|
||||
|
|
86
noxfile.py
86
noxfile.py
|
@ -25,6 +25,8 @@ if __name__ == '__main__':
|
|||
import nox
|
||||
from nox.command import CommandFailed
|
||||
|
||||
IS_PY3 = sys.version_info > (2,)
|
||||
|
||||
# Be verbose when runing under a CI context
|
||||
PIP_INSTALL_SILENT = (os.environ.get('JENKINS_URL') or os.environ.get('CI') or os.environ.get('DRONE')) is None
|
||||
|
||||
|
@ -55,15 +57,22 @@ def _get_session_python_version_info(session):
|
|||
try:
|
||||
version_info = session._runner._real_python_version_info
|
||||
except AttributeError:
|
||||
session_py_version = session.run(
|
||||
'python', '-c'
|
||||
'import sys; sys.stdout.write("{}.{}.{}".format(*sys.version_info))',
|
||||
silent=True,
|
||||
log=False,
|
||||
bypass_install_only=True
|
||||
)
|
||||
version_info = tuple(int(part) for part in session_py_version.split('.') if part.isdigit())
|
||||
session._runner._real_python_version_info = version_info
|
||||
old_install_only_value = session._runner.global_config.install_only
|
||||
try:
|
||||
# Force install only to be false for the following chunk of code
|
||||
# For additional information as to why see:
|
||||
# https://github.com/theacodes/nox/pull/181
|
||||
session._runner.global_config.install_only = False
|
||||
session_py_version = session.run(
|
||||
'python', '-c'
|
||||
'import sys; sys.stdout.write("{}.{}.{}".format(*sys.version_info))',
|
||||
silent=True,
|
||||
log=False,
|
||||
)
|
||||
version_info = tuple(int(part) for part in session_py_version.split('.') if part.isdigit())
|
||||
session._runner._real_python_version_info = version_info
|
||||
finally:
|
||||
session._runner.global_config.install_only = old_install_only_value
|
||||
return version_info
|
||||
|
||||
|
||||
|
@ -71,14 +80,21 @@ def _get_session_python_site_packages_dir(session):
|
|||
try:
|
||||
site_packages_dir = session._runner._site_packages_dir
|
||||
except AttributeError:
|
||||
site_packages_dir = session.run(
|
||||
'python', '-c'
|
||||
'import sys; from distutils.sysconfig import get_python_lib; sys.stdout.write(get_python_lib())',
|
||||
silent=True,
|
||||
log=False,
|
||||
bypass_install_only=True
|
||||
)
|
||||
session._runner._site_packages_dir = site_packages_dir
|
||||
old_install_only_value = session._runner.global_config.install_only
|
||||
try:
|
||||
# Force install only to be false for the following chunk of code
|
||||
# For additional information as to why see:
|
||||
# https://github.com/theacodes/nox/pull/181
|
||||
session._runner.global_config.install_only = False
|
||||
site_packages_dir = session.run(
|
||||
'python', '-c'
|
||||
'import sys; from distutils.sysconfig import get_python_lib; sys.stdout.write(get_python_lib())',
|
||||
silent=True,
|
||||
log=False,
|
||||
)
|
||||
session._runner._site_packages_dir = site_packages_dir
|
||||
finally:
|
||||
session._runner.global_config.install_only = old_install_only_value
|
||||
return site_packages_dir
|
||||
|
||||
|
||||
|
@ -94,11 +110,19 @@ def _get_distro_info(session):
|
|||
distro = session._runner._distro
|
||||
except AttributeError:
|
||||
# The distro package doesn't output anything for Windows
|
||||
session.install('--progress-bar=off', 'distro', silent=PIP_INSTALL_SILENT)
|
||||
output = session.run('distro', '-j', silent=True, bypass_install_only=True)
|
||||
distro = json.loads(output.strip())
|
||||
session.log('Distro information:\n%s', pprint.pformat(distro))
|
||||
session._runner._distro = distro
|
||||
old_install_only_value = session._runner.global_config.install_only
|
||||
try:
|
||||
# Force install only to be false for the following chunk of code
|
||||
# For additional information as to why see:
|
||||
# https://github.com/theacodes/nox/pull/181
|
||||
session._runner.global_config.install_only = False
|
||||
session.install('--progress-bar=off', 'distro', silent=PIP_INSTALL_SILENT)
|
||||
output = session.run('distro', '-j', silent=True)
|
||||
distro = json.loads(output.strip())
|
||||
session.log('Distro information:\n%s', pprint.pformat(distro))
|
||||
session._runner._distro = distro
|
||||
finally:
|
||||
session._runner.global_config.install_only = old_install_only_value
|
||||
return distro
|
||||
|
||||
|
||||
|
@ -837,8 +861,12 @@ def _lint(session, rcfile, flags, paths):
|
|||
raise
|
||||
finally:
|
||||
stdout.seek(0)
|
||||
contents = stdout.read().encode('utf-8')
|
||||
contents = stdout.read()
|
||||
if contents:
|
||||
if IS_PY3:
|
||||
contents = contents.decode('utf-8')
|
||||
else:
|
||||
contents = contents.encode('utf-8')
|
||||
sys.stdout.write(contents)
|
||||
sys.stdout.flush()
|
||||
if pylint_report_path:
|
||||
|
@ -888,14 +916,20 @@ def lint_tests(session):
|
|||
_lint(session, '.testing.pylintrc', flags, paths)
|
||||
|
||||
|
||||
@nox.session(python='3.6')
|
||||
@nox.session(python='3')
|
||||
def docs(session):
|
||||
'''
|
||||
Build Salt's Documentation
|
||||
'''
|
||||
session.install('--progress-bar=off', '-r', 'requirements/static/py3.6/docs.txt', silent=PIP_INSTALL_SILENT)
|
||||
pydir = _get_pydir(session)
|
||||
if pydir == 'py3.4':
|
||||
session.error('Sphinx only runs on Python >= 3.5')
|
||||
session.install(
|
||||
'--progress-bar=off',
|
||||
'-r', 'requirements/static/{}/docs.txt'.format(pydir),
|
||||
silent=PIP_INSTALL_SILENT)
|
||||
os.chdir('doc/')
|
||||
session.run('make', 'clean', external=True)
|
||||
session.run('make', 'html', external=True)
|
||||
session.run('make', 'html', 'SPHINXOPTS=-W', external=True)
|
||||
session.run('tar', '-czvf', 'doc-archive.tar.gz', '_build/html')
|
||||
os.chdir('..')
|
||||
|
|
|
@ -2,31 +2,31 @@ apache-libcloud==2.2.1
|
|||
backports.ssl_match_hostname==3.5.0.1
|
||||
backports_abc==0.5
|
||||
certifi
|
||||
cffi==1.11.2
|
||||
cffi==1.12.2
|
||||
CherryPy==13.0.0
|
||||
click==6.7
|
||||
enum34==1.1.6
|
||||
futures==3.1.1
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb==0.6.4
|
||||
GitPython==2.1.7
|
||||
idna==2.6
|
||||
ipaddress==1.0.18
|
||||
Jinja2==2.10
|
||||
gitpython==2.1.11
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jinja2==2.10.1
|
||||
linode-python==1.1.1
|
||||
Mako==1.0.7
|
||||
MarkupSafe==1.0
|
||||
msgpack-python==0.4.8
|
||||
pyasn1==0.4.2
|
||||
pycparser==2.18
|
||||
pycrypto==2.6.1
|
||||
python-dateutil==2.6.1
|
||||
python-gnupg==0.4.1
|
||||
PyYAML==3.12
|
||||
pyzmq==17.0.0
|
||||
markupsafe==1.1.1
|
||||
msgpack-python==0.5.6
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
python-dateutil==2.8.0
|
||||
python-gnupg==0.4.4
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
requests==2.21.0
|
||||
singledispatch==3.4.0.3
|
||||
six==1.11.0
|
||||
six==1.12.0
|
||||
smmap==0.9.0
|
||||
timelib==0.2.4
|
||||
tornado==4.5.2
|
||||
tornado==4.5.3
|
||||
vultr==1.0rc1
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
cryptography==2.1.4
|
||||
pyOpenSSL==17.5.0
|
||||
cryptography==2.6.1
|
||||
pyopenssl==19.0.0
|
||||
|
|
|
@ -1,39 +1,38 @@
|
|||
-r req_win.txt
|
||||
backports-abc==0.5
|
||||
backports.ssl-match-hostname==3.5.0.1
|
||||
certifi
|
||||
cffi==1.10.0
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
CherryPy==10.2.1
|
||||
cryptography==2.1.4
|
||||
cryptography==2.6.1
|
||||
enum34==1.1.6
|
||||
futures==3.1.1
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb==0.6.4
|
||||
GitPython==2.1.3
|
||||
idna==2.5
|
||||
idna==2.8
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.18
|
||||
Jinja2==2.9.6
|
||||
libnacl==1.6.1 # required by the nacl module
|
||||
ipaddress==1.0.22
|
||||
jinja2==2.10.1
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
Mako==1.0.6
|
||||
MarkupSafe==1.0
|
||||
msgpack-python==0.4.8
|
||||
psutil==5.2.2
|
||||
pyasn1==0.2.3
|
||||
pycparser==2.17
|
||||
pycrypto==2.6.1
|
||||
markupsafe==1.1.1
|
||||
msgpack-python==0.5.6
|
||||
psutil==5.6.1
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodomex==3.8.1
|
||||
pycurl==7.43.0
|
||||
PyMySQL==0.7.11
|
||||
pyOpenSSL==17.5.0
|
||||
#python-certifi-win32==1.2
|
||||
python-dateutil==2.6.1
|
||||
python-gnupg==0.4.1
|
||||
pyopenssl==19.0.0
|
||||
python-dateutil==2.8.0
|
||||
python-gnupg==0.4.4
|
||||
pythonnet==2.3.0
|
||||
PyYAML==3.12
|
||||
pyzmq==16.0.3
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
requests==2.21.0
|
||||
singledispatch==3.4.0.3
|
||||
smmap==0.9.0
|
||||
timelib==0.2.4
|
||||
tornado==4.5.1
|
||||
wheel==0.30.0a0
|
||||
tornado==4.5.3
|
||||
wheel==0.33.4
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
-r base.txt
|
||||
|
||||
mock>=2.0.0
|
||||
SaltPyLint>=v2017.3.6
|
||||
SaltPyLint>=v2019.6.7
|
||||
testinfra>=1.7.0,!=1.17.0
|
||||
|
||||
# httpretty Needs to be here for now even though it's a dependency of boto.
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
# Lint requirements
|
||||
pylint==1.6.5
|
||||
SaltPyLint>=v2017.3.6
|
||||
SaltPyLint>=v2019.6.7
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# This is a compilation of requirements installed on salt-jenkins git.salt state run
|
||||
apache-libcloud==2.0.0
|
||||
boto3
|
||||
boto>=2.46.0
|
||||
cffi
|
||||
|
|
|
@ -14,6 +14,6 @@ mccabe==0.6.1 # via pylint
|
|||
modernize==0.5 # via saltpylint
|
||||
pycodestyle==2.5.0 # via saltpylint
|
||||
pylint==1.6.5
|
||||
saltpylint==2019.1.11
|
||||
saltpylint==2019.6.7
|
||||
six==1.12.0 # via astroid, pylint
|
||||
wrapt==1.11.1 # via astroid
|
||||
|
|
|
@ -2,15 +2,16 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/raet-osx.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py2.7/raet-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via docker, websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot, jaraco.functools
|
||||
backports.ssl-match-hostname==3.5.0.1
|
||||
backports.tempfile==1.0 # via moto
|
||||
backports.weakref==1.0.post1 # via backports.tempfile
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
|
@ -18,30 +19,36 @@ boto3==1.9.132
|
|||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
cookies==2.2.1 # via responses
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6 # via cryptography, raet
|
||||
enum34==1.1.6
|
||||
funcsigs==1.0.2 # via mock, pytest
|
||||
functools32==3.2.3.post2 # via jsonschema
|
||||
future==0.17.1 # via python-jose
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via cryptography, docker, kubernetes
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -52,7 +59,9 @@ jxmlease==1.0.1
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
meld3==1.0.2 # via supervisor
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
|
@ -66,12 +75,13 @@ paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pylxd==2.2.9
|
||||
pynacl==1.3.0 # via paramiko
|
||||
|
@ -84,13 +94,14 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests-toolbelt==0.9.1 # via pylxd
|
||||
requests-unixsocket==0.1.5 # via pylxd
|
||||
|
@ -104,15 +115,18 @@ scandir==1.10.0 # via pathlib2
|
|||
scp==0.13.2 # via junos-eznc
|
||||
selectors2==2.0.1 # via ncclient
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
supervisor==3.3.5 ; python_version < "3"
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version < "3"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,42 +2,50 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/raet-windows.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py2.7/raet-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via docker, websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot, jaraco.functools
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
backports.tempfile==1.0 # via moto
|
||||
backports.weakref==1.0.post1 # via backports.tempfile
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
cookies==2.2.1 # via responses
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==2.7.0
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6 # via cryptography, raet
|
||||
enum34==1.1.6
|
||||
funcsigs==1.0.2 # via mock, pytest
|
||||
functools32==3.2.3.post2 # via jsonschema
|
||||
future==0.17.1 # via python-jose
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via cryptography, docker, kubernetes
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -46,6 +54,8 @@ jsonschema==2.6.0
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
meld3==1.0.2 # via supervisor
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
|
@ -57,15 +67,20 @@ patch==1.16
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -73,14 +88,16 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
|
@ -91,16 +108,20 @@ salttesting==2017.6.1
|
|||
scandir==1.10.0 # via pathlib2
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
supervisor==3.3.5 ; python_version < "3"
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version < "3"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2,15 +2,16 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/zeromq-osx.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py2.7/zeromq-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via docker, websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot, jaraco.functools
|
||||
backports.ssl-match-hostname==3.5.0.1
|
||||
backports.tempfile==1.0 # via moto
|
||||
backports.weakref==1.0.post1 # via backports.tempfile
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
|
@ -18,29 +19,35 @@ boto3==1.9.132
|
|||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
cookies==2.2.1 # via responses
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6 # via cryptography
|
||||
enum34==1.1.6
|
||||
funcsigs==1.0.2 # via mock, pytest
|
||||
functools32==3.2.3.post2 # via jsonschema
|
||||
future==0.17.1 # via python-jose
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via cryptography, docker, kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -50,7 +57,9 @@ junos-eznc==2.2.0
|
|||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
meld3==1.0.2 # via supervisor
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
|
@ -64,12 +73,13 @@ paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
|
||||
# pycrypto==2.6.1 ; sys_platform != "win32"
|
||||
pycryptodome==3.8.1
|
||||
|
@ -84,11 +94,11 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
|
@ -104,15 +114,18 @@ scandir==1.10.0 # via pathlib2
|
|||
scp==0.13.2 # via junos-eznc
|
||||
selectors2==2.0.1 # via ncclient
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
supervisor==3.3.5 ; python_version < "3"
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version < "3"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,41 +2,49 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/zeromq-windows.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py2.7/zeromq-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via docker, websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot, jaraco.functools
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
backports.tempfile==1.0 # via moto
|
||||
backports.weakref==1.0.post1 # via backports.tempfile
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
cookies==2.2.1 # via responses
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==2.7.0
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6 # via cryptography
|
||||
enum34==1.1.6
|
||||
funcsigs==1.0.2 # via mock, pytest
|
||||
functools32==3.2.3.post2 # via jsonschema
|
||||
future==0.17.1 # via python-jose
|
||||
futures==3.2.0 ; python_version < "3.0"
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via cryptography, docker, kubernetes
|
||||
idna==2.8
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -44,6 +52,9 @@ jsonpickle==1.1 # via aws-xray-sdk
|
|||
jsonschema==2.6.0
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
meld3==1.0.2 # via supervisor
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
|
@ -55,16 +66,20 @@ patch==1.16
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1 ; sys_platform == "win32"
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -72,11 +87,12 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
|
@ -90,16 +106,20 @@ salttesting==2017.6.1
|
|||
scandir==1.10.0 # via pathlib2
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
supervisor==3.3.5 ; python_version < "3"
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version < "3"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
30
requirements/static/py3.5/docs.txt
Normal file
30
requirements/static/py3.5/docs.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/docs.txt -v requirements/static/docs.in
|
||||
#
|
||||
alabaster==0.7.12 # via sphinx
|
||||
babel==2.7.0 # via sphinx
|
||||
certifi==2019.3.9 # via requests
|
||||
chardet==3.0.4 # via requests
|
||||
docutils==0.14 # via sphinx
|
||||
idna==2.8 # via requests
|
||||
imagesize==1.1.0 # via sphinx
|
||||
jinja2==2.10.1 # via sphinx
|
||||
markupsafe==1.1.1 # via jinja2
|
||||
packaging==19.0 # via sphinx
|
||||
pygments==2.4.2 # via sphinx
|
||||
pyparsing==2.4.0 # via packaging
|
||||
pytz==2019.1 # via babel
|
||||
requests==2.22.0 # via sphinx
|
||||
six==1.12.0 # via packaging
|
||||
snowballstemmer==1.2.1 # via sphinx
|
||||
sphinx==2.0.1
|
||||
sphinxcontrib-applehelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-devhelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-htmlhelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||
sphinxcontrib-qthelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.3 # via sphinx
|
||||
urllib3==1.25.3 # via requests
|
|
@ -2,41 +2,48 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/raet-osx.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.5/raet-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -47,7 +54,9 @@ jxmlease==1.0.1
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
more-itertools==5.0.0
|
||||
|
@ -60,12 +69,13 @@ paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pylxd==2.2.9
|
||||
pynacl==1.3.0 # via paramiko
|
||||
|
@ -78,13 +88,14 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests-toolbelt==0.9.1 # via pylxd
|
||||
requests-unixsocket==0.1.5 # via pylxd
|
||||
|
@ -96,14 +107,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,23 +2,27 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/raet-windows.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.5/raet-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
|
@ -26,12 +30,17 @@ docker==2.7.0
|
|||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -40,6 +49,8 @@ jsonschema==2.6.0
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
more-itertools==5.0.0
|
||||
|
@ -50,15 +61,20 @@ patch==1.16
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -66,14 +82,16 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
|
@ -83,14 +101,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2,38 +2,46 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/zeromq-osx.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.5/zeromq-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -43,7 +51,9 @@ junos-eznc==2.2.0
|
|||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
more-itertools==5.0.0
|
||||
|
@ -56,12 +66,13 @@ paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
|
||||
# pycrypto==2.6.1 ; sys_platform != "win32"
|
||||
pycryptodome==3.8.1
|
||||
|
@ -76,11 +87,11 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
|
@ -94,14 +105,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pathlib2, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,33 +2,43 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/zeromq-windows.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.5/zeromq-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==2.7.0
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -36,6 +46,9 @@ jsonpickle==1.1 # via aws-xray-sdk
|
|||
jsonschema==2.6.0
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 ; python_version < "3.6"
|
||||
more-itertools==5.0.0
|
||||
|
@ -46,16 +59,20 @@ patch==1.16
|
|||
pathlib2==2.3.3 # via pytest
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1 ; sys_platform == "win32"
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -63,11 +80,12 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
|
@ -80,14 +98,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pathlib2, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2,41 +2,48 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/raet-osx.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.6/raet-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -47,7 +54,9 @@ jxmlease==1.0.1
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -59,12 +68,13 @@ netaddr==0.7.19 # via junos-eznc
|
|||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pylxd==2.2.9
|
||||
pynacl==1.3.0 # via paramiko
|
||||
|
@ -77,13 +87,14 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests-toolbelt==0.9.1 # via pylxd
|
||||
requests-unixsocket==0.1.5 # via pylxd
|
||||
|
@ -95,14 +106,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,23 +2,27 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/raet-windows.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.6/raet-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
|
@ -26,12 +30,17 @@ docker==2.7.0
|
|||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -40,6 +49,8 @@ jsonschema==2.6.0
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -49,15 +60,20 @@ msgpack==0.6.1
|
|||
patch==1.16
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -65,14 +81,16 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
|
@ -82,14 +100,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2,38 +2,46 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/zeromq-osx.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.6/zeromq-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -43,7 +51,9 @@ junos-eznc==2.2.0
|
|||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -55,12 +65,13 @@ netaddr==0.7.19 # via junos-eznc
|
|||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
|
||||
# pycrypto==2.6.1 ; sys_platform != "win32"
|
||||
pycryptodome==3.8.1
|
||||
|
@ -75,11 +86,11 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
|
@ -93,14 +104,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,33 +2,43 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/zeromq-windows.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.6/zeromq-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==2.7.0
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -36,6 +46,9 @@ jsonpickle==1.1 # via aws-xray-sdk
|
|||
jsonschema==2.6.0
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -45,16 +58,20 @@ msgpack==0.6.1
|
|||
patch==1.16
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1 ; sys_platform == "win32"
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -62,11 +79,12 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
|
@ -79,14 +97,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
30
requirements/static/py3.7/docs.txt
Normal file
30
requirements/static/py3.7/docs.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/docs.txt -v requirements/static/docs.in
|
||||
#
|
||||
alabaster==0.7.12 # via sphinx
|
||||
babel==2.7.0 # via sphinx
|
||||
certifi==2019.3.9 # via requests
|
||||
chardet==3.0.4 # via requests
|
||||
docutils==0.14 # via sphinx
|
||||
idna==2.8 # via requests
|
||||
imagesize==1.1.0 # via sphinx
|
||||
jinja2==2.10.1 # via sphinx
|
||||
markupsafe==1.1.1 # via jinja2
|
||||
packaging==19.0 # via sphinx
|
||||
pygments==2.4.2 # via sphinx
|
||||
pyparsing==2.4.0 # via packaging
|
||||
pytz==2019.1 # via babel
|
||||
requests==2.22.0 # via sphinx
|
||||
six==1.12.0 # via packaging
|
||||
snowballstemmer==1.2.1 # via sphinx
|
||||
sphinx==2.0.1
|
||||
sphinxcontrib-applehelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-devhelp==1.0.1 # via sphinx
|
||||
sphinxcontrib-htmlhelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||
sphinxcontrib-qthelp==1.0.2 # via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.3 # via sphinx
|
||||
urllib3==1.25.3 # via requests
|
|
@ -2,41 +2,48 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/raet-osx.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.7/raet-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -47,7 +54,9 @@ jxmlease==1.0.1
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -59,12 +68,13 @@ netaddr==0.7.19 # via junos-eznc
|
|||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1
|
||||
pylxd==2.2.9
|
||||
pynacl==1.3.0 # via paramiko
|
||||
|
@ -77,13 +87,14 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests-toolbelt==0.9.1 # via pylxd
|
||||
requests-unixsocket==0.1.5 # via pylxd
|
||||
|
@ -95,14 +106,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,24 +2,27 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/raet-windows.txt -v requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.7/raet-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/raet.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
|
@ -27,12 +30,17 @@ docker==2.7.0
|
|||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^enum34==(.*)$'
|
||||
# enum34==1.1.6 # via raet
|
||||
# enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
idna==2.8
|
||||
ioflo==1.7.5
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -41,6 +49,8 @@ jsonschema==2.6.0
|
|||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -50,15 +60,20 @@ msgpack==0.6.1
|
|||
patch==1.16
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -66,14 +81,16 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1
|
||||
raet==0.6.8
|
||||
requests==2.21.0
|
||||
responses==0.10.6 # via moto
|
||||
|
@ -83,15 +100,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, raet, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2,38 +2,46 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/zeromq-osx.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.7/zeromq-osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
#
|
||||
apache-libcloud==2.0.0
|
||||
apache-libcloud==2.2.1
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl_match_hostname==3.5.0.1
|
||||
bcrypt==3.1.6 # via paramiko
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==13.0.0
|
||||
click==6.7
|
||||
clustershell==1.8.1
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1 # via moto, paramiko, pylxd, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ipaddress==1.0.22
|
||||
jaraco.classes==2.0 # via cherrypy
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -43,7 +51,9 @@ junos-eznc==2.2.0
|
|||
jxmlease==1.0.1
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
linode-python==1.1.1
|
||||
lxml==4.3.3 # via junos-eznc, ncclient
|
||||
mako==1.0.7
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -55,12 +65,13 @@ netaddr==0.7.19 # via junos-eznc
|
|||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pbr==5.1.3 # via mock, pylxd
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
# Next line explicitly commented out by pip-tools-compile because of the following regex: '^pycrypto==(.*)$'
|
||||
# pycrypto==2.6.1 ; sys_platform != "win32"
|
||||
pycryptodome==3.8.1
|
||||
|
@ -75,11 +86,11 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, pylxd
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pyyaml==3.13
|
||||
pyzmq==18.0.1 ; python_version != "3.4"
|
||||
|
@ -93,14 +104,17 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via bcrypt, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, pylxd, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests, requests-unixsocket
|
||||
virtualenv==16.4.3
|
||||
vultr==1.0rc1
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
|
|
|
@ -2,34 +2,43 @@
|
|||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/zeromq-windows.txt -v requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
# pip-compile -o requirements/static/py3.7/zeromq-windows.txt -v pkg/windows/req.txt pkg/windows/req_win.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/windows.in
|
||||
#
|
||||
asn1crypto==0.24.0 # via cryptography
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
attrs==19.1.0 # via pytest
|
||||
aws-xray-sdk==0.95 # via moto
|
||||
backports-abc==0.5 # via tornado
|
||||
backports.ssl-match-hostname==3.7.0.1 # via websocket-client
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.5 # via cheroot
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
boto3==1.9.132
|
||||
boto==2.49.0
|
||||
botocore==1.12.132 # via boto3, moto, s3transfer
|
||||
cachetools==3.1.0 # via google-auth
|
||||
certifi==2019.3.9 # via kubernetes, requests, tornado
|
||||
cffi==1.12.2 # via cryptography, pygit2
|
||||
certifi==2019.3.9
|
||||
cffi==1.12.2
|
||||
chardet==3.0.4 # via requests
|
||||
cheroot==6.5.5 # via cherrypy
|
||||
cherrypy==10.2.1
|
||||
colorama==0.4.1 # via pytest
|
||||
coverage==4.5.3 # via pytest-cov
|
||||
cryptography==2.6.1 # via moto, pyopenssl
|
||||
cryptography==2.6.1
|
||||
dmidecode==0.9.0
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==2.7.0
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.2 # via python-jose
|
||||
enum34==1.1.6
|
||||
future==0.17.1 # via python-jose
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitdb==0.6.4
|
||||
gitpython==2.1.3
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8 # via requests
|
||||
ipaddress==1.0.22 # via kubernetes
|
||||
idna==2.8
|
||||
ioloop==0.1a0
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
jmespath==0.9.4 # via boto3, botocore
|
||||
jsondiff==1.1.1 # via moto
|
||||
|
@ -37,6 +46,9 @@ jsonpickle==1.1 # via aws-xray-sdk
|
|||
jsonschema==2.6.0
|
||||
keyring==5.7.1
|
||||
kubernetes==3.0.0
|
||||
libnacl==1.6.1
|
||||
lxml==3.7.3
|
||||
mako==1.0.6
|
||||
markupsafe==1.1.1
|
||||
mock==2.0.0 # via moto
|
||||
more-itertools==5.0.0
|
||||
|
@ -46,16 +58,20 @@ msgpack==0.6.1
|
|||
patch==1.16
|
||||
pbr==5.1.3 # via mock
|
||||
pluggy==0.9.0 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
psutil==5.6.1
|
||||
py==1.8.0 # via pytest
|
||||
pyaml==19.4.1 # via moto
|
||||
pyasn1-modules==0.2.4 # via google-auth
|
||||
pyasn1==0.4.5 # via pyasn1-modules, rsa
|
||||
pycparser==2.19 # via cffi
|
||||
pyasn1==0.4.5
|
||||
pycparser==2.19
|
||||
pycryptodome==3.8.1 # via python-jose
|
||||
pycryptodomex==3.8.1 ; sys_platform == "win32"
|
||||
pycurl==7.43.0
|
||||
pygit2==0.28.1
|
||||
pymysql==0.7.11
|
||||
pyopenssl==19.0.0
|
||||
pypiwin32==223 # via cherrypy
|
||||
pytest-cov==2.6.1
|
||||
pytest-helpers-namespace==2019.1.8
|
||||
pytest-salt-runtests-bridge==2019.1.30
|
||||
|
@ -63,11 +79,12 @@ pytest-salt==2018.12.8
|
|||
pytest-tempdir==2018.8.11
|
||||
pytest-timeout==1.3.3
|
||||
pytest==4.4.1
|
||||
python-dateutil==2.8.0 # via botocore, kubernetes, moto
|
||||
python-dateutil==2.8.0
|
||||
python-etcd==0.4.5
|
||||
python-gnupg==0.4.4
|
||||
python-jose==2.0.2 # via moto
|
||||
pytz==2019.1 # via moto
|
||||
pythonnet==2.3.0
|
||||
pytz==2019.1 # via moto, tempora
|
||||
pyvmomi==6.7.1.2018.12
|
||||
pywin32==223
|
||||
pyyaml==3.13
|
||||
|
@ -80,15 +97,19 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
sed==0.3.1
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 # via tornado
|
||||
six==1.12.0 # via cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, websocket-client
|
||||
singledispatch==3.4.0.3
|
||||
six==1.12.0 # via cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, kubernetes, mock, more-itertools, moto, pygit2, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
tornado==4.5.3 ; python_version >= "3.4"
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
virtualenv==16.4.3
|
||||
websocket-client==0.40.0 # via docker, kubernetes
|
||||
werkzeug==0.15.2 # via moto
|
||||
wheel==0.33.4
|
||||
wmi==1.4.9
|
||||
wrapt==1.11.1 # via aws-xray-sdk
|
||||
xmltodict==0.12.0 # via moto
|
||||
|
|
|
@ -2004,6 +2004,7 @@ class LocalClient(object):
|
|||
raise tornado.gen.Return({'jid': payload['load']['jid'],
|
||||
'minions': payload['load']['minions']})
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
# This IS really necessary!
|
||||
# When running tests, if self.events is not destroyed, we leak 2
|
||||
|
@ -2011,6 +2012,7 @@ class LocalClient(object):
|
|||
if hasattr(self, 'event'):
|
||||
# The call below will take care of calling 'self.event.destroy()'
|
||||
del self.event
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _clean_up_subscriptions(self, job_id):
|
||||
if self.opts.get('order_masters'):
|
||||
|
|
|
@ -182,8 +182,10 @@ class SaltRaetWorkerSetup(ioflo.base.deeding.Deed):
|
|||
self.stack.value.transmit(init, self.stack.value.fetchUidByName(manor_yard.name))
|
||||
self.stack.value.serviceAll()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.stack.server.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class SaltRaetWorkerRouter(ioflo.base.deeding.Deed):
|
||||
|
|
|
@ -1067,13 +1067,12 @@ def __process_multiprocessing_logging_queue(opts, queue):
|
|||
setup_extended_logging(opts)
|
||||
while True:
|
||||
try:
|
||||
record_dict = queue.get()
|
||||
if record_dict is None:
|
||||
record = queue.get()
|
||||
if record is None:
|
||||
# A sentinel to stop processing the queue
|
||||
break
|
||||
# Just log everything, filtering will happen on the main process
|
||||
# logging handlers
|
||||
record = logging.makeLogRecord(record_dict)
|
||||
logger = logging.getLogger(record.name)
|
||||
logger.handle(record)
|
||||
except (EOFError, KeyboardInterrupt, SystemExit):
|
||||
|
|
|
@ -916,8 +916,10 @@ class ReqServer(salt.utils.process.SignalHandlingMultiprocessingProcess):
|
|||
self.process_manager.send_signal_to_processes(signum)
|
||||
self.process_manager.kill_children()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess):
|
||||
|
|
|
@ -941,8 +941,10 @@ class MinionManager(MinionBase):
|
|||
self.process_manager = ProcessManager(name='MultiMinionProcessManager')
|
||||
self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) # Tornado backward compat
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _bind(self):
|
||||
# start up the event publisher, so we can see events during startup
|
||||
|
@ -2742,8 +2744,10 @@ class Minion(MinionBase):
|
|||
for cb in six.itervalues(self.periodic_callbacks):
|
||||
cb.stop()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class Syndic(Minion):
|
||||
|
|
|
@ -12,13 +12,22 @@ import time
|
|||
|
||||
# Import Salt libs
|
||||
import salt.utils.http
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
def query(url, **kwargs):
|
||||
'''
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
Query a resource, and decode the return data
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
raise_error : True
|
||||
If ``False``, and if a connection cannot be made, the error will be
|
||||
suppressed and the body of the return will simply be ``None``.
|
||||
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
@ -35,7 +44,10 @@ def query(url, **kwargs):
|
|||
opts.update(kwargs['opts'])
|
||||
del kwargs['opts']
|
||||
|
||||
return salt.utils.http.query(url=url, opts=opts, **kwargs)
|
||||
try:
|
||||
return salt.utils.http.query(url=url, opts=opts, **kwargs)
|
||||
except Exception as exc:
|
||||
raise CommandExecutionError(six.text_type(exc))
|
||||
|
||||
|
||||
def wait_for_successful_query(url, wait_for=300, **kwargs):
|
||||
|
|
|
@ -400,5 +400,7 @@ class SREQ(object):
|
|||
if self.context.closed is False:
|
||||
self.context.term()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
|
|
@ -46,7 +46,8 @@ optional. The following ssl options are simply for illustration purposes:
|
|||
|
||||
Should you wish the returner data to be cleaned out every so often, set
|
||||
`keep_jobs` to the number of hours for the jobs to live in the tables.
|
||||
Setting it to `0` or leaving it unset will cause the data to stay in the tables.
|
||||
Setting it to `0` will cause the data to stay in the tables. The default
|
||||
setting for `keep_jobs` is set to `24`.
|
||||
|
||||
Should you wish to archive jobs in a different table for later processing,
|
||||
set `archive_jobs` to True. Salt will create 3 archive tables
|
||||
|
|
|
@ -216,8 +216,10 @@ class IPCServer(object):
|
|||
if hasattr(self.sock, 'close'):
|
||||
self.sock.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class IPCClient(object):
|
||||
|
@ -323,6 +325,7 @@ class IPCClient(object):
|
|||
|
||||
yield tornado.gen.sleep(1)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
try:
|
||||
self.close()
|
||||
|
@ -334,6 +337,7 @@ class IPCClient(object):
|
|||
# This is raised when Python's GC has collected objects which
|
||||
# would be needed when calling self.close()
|
||||
pass
|
||||
# pylint: enable=W1701
|
||||
|
||||
def close(self):
|
||||
'''
|
||||
|
@ -548,8 +552,10 @@ class IPCMessagePublisher(object):
|
|||
if hasattr(self.sock, 'close'):
|
||||
self.sock.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class IPCMessageSubscriber(IPCClient):
|
||||
|
@ -701,6 +707,8 @@ class IPCMessageSubscriber(IPCClient):
|
|||
if exc and not isinstance(exc, StreamClosedError):
|
||||
log.error("Read future returned exception %r", exc)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
if IPCMessageSubscriber in globals():
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
|
|
@ -175,8 +175,10 @@ if USE_LOAD_BALANCER:
|
|||
self._socket.close()
|
||||
self._socket = None
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
|
@ -288,8 +290,10 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel):
|
|||
self._closing = True
|
||||
self.message_client.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _package_load(self, load):
|
||||
return {
|
||||
|
@ -395,8 +399,10 @@ class AsyncTCPPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.tran
|
|||
if hasattr(self, 'message_client'):
|
||||
self.message_client.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _package_load(self, load):
|
||||
return {
|
||||
|
@ -574,8 +580,10 @@ class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.tra
|
|||
except Exception as exc:
|
||||
log.exception('TCPReqServerChannel close generated an exception: %s', str(exc))
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def pre_fork(self, process_manager):
|
||||
'''
|
||||
|
@ -820,8 +828,10 @@ class SaltMessageClientPool(salt.transport.MessageClientPool):
|
|||
def __init__(self, opts, args=None, kwargs=None):
|
||||
super(SaltMessageClientPool, self).__init__(SaltMessageClient, opts, args=args, kwargs=kwargs)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def close(self):
|
||||
for message_client in self.message_clients:
|
||||
|
@ -926,8 +936,10 @@ class SaltMessageClient(object):
|
|||
self.connect_callback = None
|
||||
self.disconnect_callback = None
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def connect(self):
|
||||
'''
|
||||
|
@ -1167,8 +1179,10 @@ class Subscriber(object):
|
|||
# 'StreamClosedError' when the stream is closed.
|
||||
self._read_until_future.exception()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class PubServer(tornado.tcpserver.TCPServer, object):
|
||||
|
@ -1207,8 +1221,10 @@ class PubServer(tornado.tcpserver.TCPServer, object):
|
|||
return
|
||||
self._closing = True
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _add_client_present(self, client):
|
||||
id_ = client.id_
|
||||
|
|
|
@ -204,6 +204,7 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
|||
args=(self.opts, self.master_uri,),
|
||||
kwargs={'io_loop': self._io_loop})
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
'''
|
||||
Since the message_client creates sockets and assigns them to the IOLoop we have to
|
||||
|
@ -211,6 +212,7 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
|||
'''
|
||||
if hasattr(self, 'message_client'):
|
||||
self.message_client.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
@property
|
||||
def master_uri(self):
|
||||
|
@ -437,8 +439,10 @@ class AsyncZeroMQPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.t
|
|||
if hasattr(self, 'context') and self.context.closed is False:
|
||||
self.context.term()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
# TODO: this is the time to see if we are connected, maybe use the req channel to guess?
|
||||
@tornado.gen.coroutine
|
||||
|
@ -989,8 +993,10 @@ class AsyncReqMessageClientPool(salt.transport.MessageClientPool):
|
|||
def __init__(self, opts, args=None, kwargs=None):
|
||||
super(AsyncReqMessageClientPool, self).__init__(AsyncReqMessageClient, opts, args=args, kwargs=kwargs)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def destroy(self):
|
||||
for message_client in self.message_clients:
|
||||
|
@ -1060,8 +1066,10 @@ class AsyncReqMessageClient(object):
|
|||
if self.context.closed is False:
|
||||
self.context.term()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.destroy()
|
||||
# pylint: enable=W1701
|
||||
|
||||
def _init_socket(self):
|
||||
if hasattr(self, 'stream'):
|
||||
|
|
|
@ -76,6 +76,7 @@ class SyncWrapper(object):
|
|||
self.io_loop.start()
|
||||
return future.result()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
'''
|
||||
On deletion of the asynchronous wrapper, make sure to clean up the asynchronous stuff
|
||||
|
@ -92,3 +93,4 @@ class SyncWrapper(object):
|
|||
elif hasattr(self, 'io_loop'):
|
||||
self.io_loop.close()
|
||||
del self.io_loop
|
||||
# pylint: enable=W1701
|
||||
|
|
|
@ -876,6 +876,7 @@ class SaltEvent(object):
|
|||
# This will handle reconnects
|
||||
return self.subscriber.read_async(event_handler)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
# skip exceptions in destroy-- since destroy() doesn't cover interpreter
|
||||
# shutdown-- where globals start going missing
|
||||
|
@ -883,6 +884,7 @@ class SaltEvent(object):
|
|||
self.destroy()
|
||||
except Exception:
|
||||
pass
|
||||
# pylint: enable=W1701
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
@ -1071,8 +1073,10 @@ class AsyncEventPublisher(object):
|
|||
if hasattr(self, 'puller'):
|
||||
self.puller.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class EventPublisher(salt.utils.process.SignalHandlingMultiprocessingProcess):
|
||||
|
@ -1173,8 +1177,10 @@ class EventPublisher(salt.utils.process.SignalHandlingMultiprocessingProcess):
|
|||
self.close()
|
||||
super(EventPublisher, self)._handle_signals(signum, sigframe)
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
|
||||
class EventReturn(salt.utils.process.SignalHandlingMultiprocessingProcess):
|
||||
|
|
|
@ -104,6 +104,8 @@ def __decompressContent(coding, pgctnt):
|
|||
Currently supports identity/none, deflate, and gzip, which should
|
||||
cover 99%+ of the content on the internet.
|
||||
'''
|
||||
if not pgctnt:
|
||||
return pgctnt
|
||||
|
||||
log.trace("Decompressing %s byte content with compression type: %s", len(pgctnt), coding)
|
||||
|
||||
|
@ -122,9 +124,6 @@ def __decompressContent(coding, pgctnt):
|
|||
elif coding == "compress":
|
||||
raise ValueError("LZW compression is not currently supported")
|
||||
|
||||
elif coding == 'identity':
|
||||
pass
|
||||
|
||||
log.trace("Content size after decompression: %s", len(pgctnt))
|
||||
return pgctnt
|
||||
|
||||
|
@ -465,7 +464,7 @@ def query(url,
|
|||
not isinstance(result_text, six.text_type):
|
||||
result_text = result_text.decode(res_params['charset'])
|
||||
if six.PY3 and isinstance(result_text, bytes):
|
||||
result_text = result.body.decode('utf-8')
|
||||
result_text = result_text.decode('utf-8')
|
||||
ret['body'] = result_text
|
||||
else:
|
||||
# Tornado
|
||||
|
|
|
@ -115,9 +115,11 @@ class RSAX931Signer(object):
|
|||
if not libcrypto.PEM_read_bio_RSAPrivateKey(self._bio, pointer(self._rsa), None, None):
|
||||
raise ValueError('invalid RSA private key')
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
libcrypto.BIO_free(self._bio)
|
||||
libcrypto.RSA_free(self._rsa)
|
||||
# pylint: enable=W1701
|
||||
|
||||
def sign(self, msg):
|
||||
'''
|
||||
|
@ -153,9 +155,11 @@ class RSAX931Verifier(object):
|
|||
if not libcrypto.PEM_read_bio_RSA_PUBKEY(self._bio, pointer(self._rsa), None, None):
|
||||
raise ValueError('invalid RSA public key')
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
libcrypto.BIO_free(self._bio)
|
||||
libcrypto.RSA_free(self._rsa)
|
||||
# pylint: enable=W1701
|
||||
|
||||
def verify(self, signed):
|
||||
'''
|
||||
|
|
|
@ -931,6 +931,7 @@ class Terminal(object):
|
|||
# <---- Linux Methods ----------------------------------------------------
|
||||
|
||||
# ----- Cleanup!!! ------------------------------------------------------>
|
||||
# pylint: disable=W1701
|
||||
def __del__(self, _maxsize=sys.maxsize, _active=_ACTIVE): # pylint: disable=W0102
|
||||
# I've disabled W0102 above which is regarding a dangerous default
|
||||
# value of [] for _ACTIVE, though, this is how Python itself handles
|
||||
|
@ -945,5 +946,6 @@ class Terminal(object):
|
|||
if self.isalive() and _ACTIVE is not None:
|
||||
# Child is still running, keep us alive until we can wait on it.
|
||||
_ACTIVE.append(self)
|
||||
# pylint: enable=W1701
|
||||
# <---- Cleanup!!! -------------------------------------------------------
|
||||
# <---- Platform Specific Methods --------------------------------------------
|
||||
|
|
|
@ -28,15 +28,25 @@ class MinionBlackoutTestCase(ModuleCase):
|
|||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.top_pillar = os.path.join(RUNTIME_VARS.TMP_PILLAR_TREE, 'top.sls')
|
||||
cls.blackout_pillar = os.path.join(RUNTIME_VARS.TMP_PILLAR_TREE, 'blackout.sls')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if os.path.exists(cls.top_pillar):
|
||||
os.unlink(cls.top_pillar)
|
||||
del cls.top_pillar
|
||||
if os.path.exists(cls.blackout_pillar):
|
||||
os.unlink(cls.blackout_pillar)
|
||||
del cls.blackout_pillar
|
||||
|
||||
def setUp(self):
|
||||
with salt.utils.files.fopen(self.top_pillar, 'w') as wfh:
|
||||
wfh.write(textwrap.dedent('''\
|
||||
base:
|
||||
'*':
|
||||
- blackout
|
||||
'''))
|
||||
with salt.utils.files.fopen(self.blackout_pillar, 'w') as wfh:
|
||||
wfh.write('minion_blackout: False')
|
||||
self.addCleanup(self.cleanup_blackout_pillar)
|
||||
|
@ -49,6 +59,8 @@ class MinionBlackoutTestCase(ModuleCase):
|
|||
self.wait_for_all_jobs()
|
||||
|
||||
def cleanup_blackout_pillar(self):
|
||||
if os.path.exists(self.top_pillar):
|
||||
os.unlink(self.top_pillar)
|
||||
if os.path.exists(self.blackout_pillar):
|
||||
os.unlink(self.blackout_pillar)
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
|
||||
# Import Salt Libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import socket
|
||||
from contextlib import closing
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
@ -90,3 +92,19 @@ class HTTPTestCase(TestCase):
|
|||
mock_ret = 'foo=XXXXXXXXXX&foo=XXXXXXXXXX&api_key=testing&'
|
||||
ret = http._sanitize_url_components(mock_component_list, 'foo')
|
||||
self.assertEqual(ret, mock_ret)
|
||||
|
||||
def test_query_null_response(self):
|
||||
'''
|
||||
This tests that we get a null response when raise_error=False and the
|
||||
host/port cannot be reached.
|
||||
'''
|
||||
host = '127.0.0.1'
|
||||
|
||||
# Find unused port
|
||||
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
|
||||
sock.bind((host, 0))
|
||||
port = sock.getsockname()[1]
|
||||
|
||||
url = 'http://{host}:{port}/'.format(host=host, port=port)
|
||||
result = http.query(url, raise_error=False)
|
||||
assert result == {'body': None}, result
|
||||
|
|
Loading…
Add table
Reference in a new issue