diff --git a/.coveragerc b/.coveragerc index f0fb46b6fce..a713d6773b2 100644 --- a/.coveragerc +++ b/.coveragerc @@ -32,14 +32,18 @@ ignore_errors = True [paths] salt = salt/ + /tmp/testing/salt/ /tmp/kitchen/testing/salt/ /private/tmp/kitchen/testing/salt/ + C:\Windows\Temp\testing\salt C:\Users\admini~1\AppData\Local\Temp\kitchen\testing\salt\ C:\Users\Administrator\AppData\Local\Temp\kitchen\testing\salt\ tests = tests/ + /tmp/testing/tests/ /tmp/kitchen/testing/tests/ /private/tmp/kitchen/testing/tests/ + C:\Windows\Temp\testing\tests C:\Users\admini~1\AppData\Local\Temp\kitchen\testing\tests\ C:\Users\Administrator\AppData\Local\Temp\kitchen\testing\tests\ extension_modules = diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000000..9b34b2630bf --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,152 @@ +name: CI + +on: + - push + - pull_request + +permissions: + contents: read + +jobs: + get-changed-files: + name: Get Changed Files + runs-on: ubuntu-latest + permissions: + contents: read # for dorny/paths-filter to fetch a list of changed files + pull-requests: read # for dorny/paths-filter to read pull requests + outputs: + changed-files: ${{ toJSON(steps.changed-files.outputs) }} + steps: + - uses: actions/checkout@v3 + - name: Get Changed Files + id: changed-files + uses: dorny/paths-filter@v2 + with: + token: ${{ github.token }} + list-files: json + filters: | + repo: + - added|modified: + - '**' + deleted: + - deleted: + - '**' + docs: + - doc/** + salt: + - added|modified: + - setup.py + - noxfile.py + - salt/**/*.py + - tasks/**/*.py + - tools/**/*.py + tests: + - added|modified: + - tests/**/*.py + pylintrc: + - added|modified: + - .pylintrc + + pre-commit: + name: Pre-Commit + uses: ./.github/workflows/pre-commit-action.yml + needs: + - get-changed-files + with: + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + docs: + name: Build Docs + uses: ./.github/workflows/docs-action.yml + needs: + - get-changed-files + with: + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + lint: + name: Lint + uses: ./.github/workflows/lint-action.yml + needs: + - get-changed-files + with: + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + debian-11: + name: Debian 11 + needs: + - get-changed-files + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-3 + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + debian-11-arm64: + name: Debian 11 ARM64 + needs: + - get-changed-files + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-3 + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + ubuntu-2204: + name: Ubuntu 22.04 + needs: + - get-changed-files + uses: ./.github/workflows/test-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-3 + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + windows-2019: + name: Windows 2019 + needs: + - get-changed-files + uses: ./.github/workflows/test-action.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-3 + changed-files: ${{ needs.get-changed-files.outputs.changed-files }} + + + set-pipeline-exit-status: + # This step is just so we can make github require this step, to pass checks + # on a pull request instead of requiring all + name: Set the ${{ github.workflow }} Pipeline Exit Status + runs-on: ubuntu-latest + if: always() + needs: + - pre-commit + - docs + - lint + - debian-11 + - debian-11-arm64 + - ubuntu-2204 + - windows-2019 + steps: + - name: Download Exit Status Files + if: always() + uses: actions/download-artifact@v3 + with: + name: exitstatus + path: exitstatus + + - name: Delete Exit Status Artifacts + if: always() + uses: geekyeggo/delete-artifact@v1 + with: + name: exitstatus + failOnError: false + + - name: Set Pipeline Exit Status + run: | + tree exitstatus + grep -RE 'failure|cancelled' exitstatus/ && exit 1 || exit 0 + + - name: Done + if: always() + run: + echo "All worflows finished" diff --git a/.github/workflows/docs-action.yml b/.github/workflows/docs-action.yml new file mode 100644 index 00000000000..01dd84f2f11 --- /dev/null +++ b/.github/workflows/docs-action.yml @@ -0,0 +1,132 @@ +name: Docs + +on: + workflow_call: + inputs: + changed-files: + required: true + type: string + description: JSON string containing information about changed files + +jobs: + Salt: + name: Build Salt Documentation + runs-on: ubuntu-latest + if: ${{ fromJSON(inputs.changed-files)['docs'] == 'true' }} + + container: + image: python:3.8.6-slim-buster + + steps: + - name: Install System Deps + run: | + echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + apt-get update + apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils + apt-get install -y git/buster-backports + + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install Nox + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + python -m pip install --upgrade pip + pip install nox + + - name: Install Python Requirements + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: + nox --install-only --forcecolor -e 'docs-html(compress=False, clean=True)' + + - name: Build Docs + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e 'docs-html(compress=False, clean=True)' + + - name: Store Generated Documentation + uses: actions/upload-artifact@v3 + with: + name: salt-html-docs + path: doc/_build/html + if-no-files-found: error + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-html + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + Manpages: + name: Build Salt man Pages + runs-on: ubuntu-latest + if: ${{ fromJSON(inputs.changed-files)['docs'] == 'true' }} + + container: + image: python:3.8.6-slim-buster + + steps: + - name: Install System Deps + run: | + echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + apt-get update + apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + apt-get install -y git/buster-backports + + - uses: actions/checkout@v3 + + - name: Install Nox + if: github.event_name == 'push' || env.CF_DOCS == 'true' + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + python -m pip install --upgrade pip + pip install nox + + - name: Install Python Requirements + if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true' + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: + nox --install-only --forcecolor -e 'docs-man(compress=False, update=False, clean=True)' + + - name: Build Manpages + if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true' + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e 'docs-man(compress=False, update=False, clean=True)' + + - name: Store Generated Documentation + if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true' + uses: actions/upload-artifact@v3 + with: + name: salt-man-pages + path: doc/_build/man + if-no-files-found: error + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-man + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index f30dd4d594b..00000000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,125 +0,0 @@ -name: Docs - -on: [push, pull_request] - -permissions: - contents: read - -jobs: - Salt: - permissions: - contents: read # for dorny/paths-filter to fetch a list of changed files - pull-requests: read # for dorny/paths-filter to read pull requests - name: Build Salt Documentation - runs-on: ubuntu-latest - - container: - image: python:3.8.6-slim-buster - - steps: - - - name: Install System Deps - run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list - apt-get update - apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils - apt-get install -y git/buster-backports - - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install Nox - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - python -m pip install --upgrade pip - pip install nox - - - id: changed-files - name: Get Changed Files - uses: dorny/paths-filter@v2 - with: - token: ${{ github.token }} - list-files: json - filters: | - docs: - - doc/** - - - name: Install Python Requirements - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: - nox --install-only --forcecolor -e 'docs-html(compress=False, clean=True)' - - - name: Build Docs - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e 'docs-html(compress=False, clean=True)' - - - name: Store Generated Documentation - uses: actions/upload-artifact@v3 - with: - name: salt-html-docs - path: doc/_build/html - - Manpages: - permissions: - contents: read # for dorny/paths-filter to fetch a list of changed files - pull-requests: read # for dorny/paths-filter to read pull requests - name: Build Salt man Pages - runs-on: ubuntu-latest - - container: - image: python:3.8.6-slim-buster - - steps: - - - name: Install System Deps - run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list - apt-get update - apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports - - - uses: actions/checkout@v3 - - - id: changed-files - name: Get Changed Files - uses: dorny/paths-filter@v2 - with: - token: ${{ github.token }} - list-files: json - filters: | - docs: - - doc/** - - - name: Install Nox - if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true' - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - python -m pip install --upgrade pip - pip install nox - - - name: Install Python Requirements - if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true' - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: - nox --install-only --forcecolor -e 'docs-man(compress=False, update=False, clean=True)' - - - name: Build Manpages - if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true' - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e 'docs-man(compress=False, update=False, clean=True)' - - - name: Store Generated Documentation - if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true' - uses: actions/upload-artifact@v3 - with: - name: salt-man-pages - path: doc/_build/man diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml new file mode 100644 index 00000000000..eded68dbadd --- /dev/null +++ b/.github/workflows/lint-action.yml @@ -0,0 +1,128 @@ +name: Lint + +on: + workflow_call: + inputs: + changed-files: + required: true + type: string + description: JSON string containing information about changed files + +jobs: + Salt: + name: Lint Salt's Source Code + runs-on: ubuntu-latest + if: ${{ fromJSON(inputs.changed-files)['salt'] == 'true' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' }} + + container: + image: python:3.8.6-slim-buster + + steps: + - name: Install System Deps + run: | + echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + apt-get update + apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + apt-get install -y git/buster-backports + + - uses: actions/checkout@v3 + + - name: Install Nox + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + python -m pip install --upgrade pip + pip install nox + + - name: Install Python Requirements + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: + nox --install-only --forcecolor -e lint-salt + + - name: Lint Changed Files + if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['salt'] == 'true' && fromJSON(inputs.changed-files)['pylintrc'] != 'true' + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e lint-salt -- ${{ join(fromJSON(inputs.changed-files)['salt_files'], ' ') }} + + - name: Lint ALL Files + if: github.event_name != 'pull_request' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e lint-salt + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + Tests: + name: Lint Salt's Test Suite + runs-on: ubuntu-latest + if: ${{ fromJSON(inputs.changed-files)['tests'] == 'true' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' }} + + container: + image: python:3.8.6-slim-buster + + steps: + - name: Install System Deps + run: | + echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + apt-get update + apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + apt-get install -y git/buster-backports + + - uses: actions/checkout@v3 + + - name: Install Nox + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + python -m pip install --upgrade pip + pip install nox + + - name: Install Python Requirements + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: + nox --install-only --forcecolor -e lint-tests + + - name: Lint Changed Files + if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['tests'] == 'true' && fromJSON(inputs.changed-files)['pylintrc'] != 'true' + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e lint-tests -- ${{ join(fromJSON(inputs.changed-files)['tests_files'], ' ') }} + + - name: Lint ALL Files + if: github.event_name != 'pull_request' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' + env: + SKIP_REQUIREMENTS_INSTALL: YES + run: | + nox --forcecolor -e lint-tests + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 3044748f875..00000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,134 +0,0 @@ -name: Lint - -on: [push, pull_request] - -permissions: - contents: read - -jobs: - Salt: - permissions: - contents: read # for dorny/paths-filter to fetch a list of changed files - pull-requests: read # for dorny/paths-filter to read pull requests - name: Lint Salt's Source Code - runs-on: ubuntu-latest - - container: - image: python:3.8.6-slim-buster - - steps: - - - name: Install System Deps - run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list - apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports - - - uses: actions/checkout@v3 - - - name: Install Nox - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - python -m pip install --upgrade pip - pip install nox - - - id: changed-files - name: Get Changed Files - uses: dorny/paths-filter@v2 - with: - token: ${{ github.token }} - list-files: json - filters: | - salt: - - added|modified: - - setup.py - - noxfile.py - - salt/**/*.py - - tasks/**/*.py - rcfile: - - added|modified: - - .pylintrc - - - name: Install Python Requirements - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: - nox --install-only --forcecolor -e lint-salt - - - name: Lint Changed Files - if: github.event_name == 'pull_request' && steps.changed-files.outputs.salt == 'true' - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e lint-salt -- ${{ join(fromJSON(steps.changed-files.outputs.salt_files), ' ') }} - - - name: Lint ALL Files - if: steps.changed-files.outputs.salt == 'true' || steps.changed-files.outputs.rcfile == 'true' - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e lint-salt - - Tests: - permissions: - contents: read # for dorny/paths-filter to fetch a list of changed files - pull-requests: read # for dorny/paths-filter to read pull requests - name: Lint Salt's Test Suite - runs-on: ubuntu-latest - - container: - image: python:3.8.6-slim-buster - - steps: - - - name: Install System Deps - run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list - apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports - - - uses: actions/checkout@v3 - - - name: Install Nox - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - python -m pip install --upgrade pip - pip install nox - - - id: changed-files - name: Get Changed Files - uses: dorny/paths-filter@v2 - with: - token: ${{ github.token }} - list-files: json - filters: | - tests: - - added|modified: - - tests/**/*.py - rcfile: - - added|modified: - - .pylintrc - - - name: Install Python Requirements - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: - nox --install-only --forcecolor -e lint-tests - - - name: Lint Changed Files - if: github.event_name == 'pull_request' && steps.changed-files.outputs.tests == 'true' - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e lint-tests -- ${{ join(fromJSON(steps.changed-files.outputs.tests_files), ' ') }} - - - name: Lint ALL Files - if: steps.changed-files.outputs.tests == 'true' || steps.changed-files.outputs.rcfile == 'true' - env: - SKIP_REQUIREMENTS_INSTALL: YES - run: | - nox --forcecolor -e lint-tests diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml new file mode 100644 index 00000000000..b0c2cea21f2 --- /dev/null +++ b/.github/workflows/pre-commit-action.yml @@ -0,0 +1,72 @@ +name: Pre-Commit + +on: + workflow_call: + inputs: + changed-files: + required: true + type: string + description: JSON string containing information about changed files + +jobs: + Pre-Commit: + name: Run Pre-Commit Against Salt + + runs-on: ubuntu-latest + + container: + image: python:3.8.6-slim-buster + + steps: + + - name: Install System Deps + run: | + echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + apt-get update + apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + apt-get install -y git/buster-backports + + - uses: actions/checkout@v3 + + - name: Install Pre-Commit + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + python -m pip install --upgrade pip + pip install pre-commit + pre-commit install --install-hooks + + - name: Check ALL Files On Branch + if: github.event_name != 'pull_request' + env: + SKIP: lint-salt,lint-tests,remove-import-headers,rstcheck + run: | + pre-commit run --show-diff-on-failure --color=always --all-files + + - name: Check Changed Files On PR + if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] == 'true' + env: + SKIP: lint-salt,lint-tests + run: | + pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(inputs.changed-files)['repo_files'], ' ') }} + + - name: Check Docs On Deleted Files + if: fromJSON(inputs.changed-files)['deleted'] == 'true' + env: + PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + run: | + pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(inputs.changed-files)['deleted_files'], ' ') }} + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-pre-commit + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml deleted file mode 100644 index 36c804a8fb8..00000000000 --- a/.github/workflows/pre-commit.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Pre-Commit - -on: [push, pull_request] - -permissions: - contents: read - -jobs: - Pre-Commit: - permissions: - contents: read # for dorny/paths-filter to fetch a list of changed files - pull-requests: read # for dorny/paths-filter to read pull requests - name: Run Pre-Commit Against Salt - - runs-on: ubuntu-latest - - container: - image: python:3.8.6-slim-buster - - steps: - - - name: Install System Deps - run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list - apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports - - - uses: actions/checkout@v3 - - - name: Install Pre-Commit - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - python -m pip install --upgrade pip - pip install pre-commit - pre-commit install --install-hooks - - - id: changed-files - name: Get Changed Files - uses: dorny/paths-filter@v2 - with: - token: ${{ github.token }} - list-files: json - filters: | - repo: - - added|modified: - - '**' - deleted: - - deleted: - - '**' - - - name: Check ALL Files On Branch - if: github.event_name != 'pull_request' - env: - SKIP: lint-salt,lint-tests,remove-import-headers,rstcheck - run: | - pre-commit run --show-diff-on-failure --color=always --all-files - - - name: Check Changed Files On PR - if: github.event_name == 'pull_request' && steps.changed-files.outputs.repo == 'true' - env: - SKIP: lint-salt,lint-tests - run: | - pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(steps.changed-files.outputs.repo_files), ' ') }} - - - name: Check Docs On Deleted Files - if: steps.changed-files.outputs.deleted == 'true' - env: - PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - run: | - pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(steps.changed-files.outputs.deleted_files), ' ') }} diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml new file mode 100644 index 00000000000..eee8cbe0504 --- /dev/null +++ b/.github/workflows/test-action.yml @@ -0,0 +1,291 @@ +name: Test Artifact + +on: + workflow_call: + inputs: + distro-slug: + required: true + type: string + description: The OS slug to run tests against + nox-session: + required: true + type: string + description: The nox session to run + changed-files: + required: true + type: string + description: JSON string containing information about changed files + + +env: + NOX_VERSION: "2022.8.7" + COLUMNS: 160 + +jobs: + + dependencies: + name: Setup Test Dependencies + runs-on: + - self-hosted + timeout-minutes: 90 + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt') }} + + # Check the existance of nox.{distro-slug}.tar.*. + # If it exists, it means the cache was hit and we don't have to regenerate it again + - name: Check nox.${{ inputs.distro-slug }}.tar.* Exists + id: check-nox-tarball + uses: andstor/file-existence-action@v1 + with: + files: "nox.${{ inputs.distro-slug }}.tar.*" + + - name: PyPi Proxy + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + - name: Install Python Tools Scripts + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + python3 -m pip install -r requirements/static/ci/py3.10/tools.txt + + - name: Start VM + if: steps.check-nox-tarball.outputs.files_exists != 'true' + id: spin-up-vm + run: | + tools vm create ${{ inputs.distro-slug }} + + - name: Upload Checkout To VM + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm rsync ${{ inputs.distro-slug }} + + - name: Install Dependencies + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} + + - name: Compress .nox Directory + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm compress-dependencies ${{ inputs.distro-slug }} + + - name: Download Compressed .nox Directory + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm download-dependencies ${{ inputs.distro-slug }} + + - name: Destroy VM + if: always() + run: | + tools vm destroy ${{ inputs.distro-slug }} || true + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + test: + name: Test + runs-on: + - self-hosted + timeout-minutes: 240 # 4 Hours + needs: + - dependencies + strategy: + fail-fast: false + matrix: + tests-chunk: + - unit + - functional + - integration + - scenarios + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt') }} + + - name: PyPi Proxy + run: | + sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + - name: Install Python Tools Scripts + run: | + python3 -m pip install -r requirements/static/ci/py3.10/tools.txt + python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + - name: Start VM + id: spin-up-vm + run: | + tools vm create ${{ inputs.distro-slug }} + + - name: Upload Checkout To VM + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm rsync ${{ inputs.distro-slug }} + + - name: Decompress .nox Directory + if: steps.check-nox-tarball.outputs.files_exists != 'true' + run: | + tools vm decompress-dependencies ${{ inputs.distro-slug }} + + - name: Run Tests + id: run-tests + run: | + tools vm test --print-tests-selection --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + + - name: Combine Coverage Reports + if: always() && steps.spin-up-vm.conclusion == 'success' && steps.run-tests.conclusion != 'cancelled' && steps.run-tests.conclusion != 'skipped' + run: | + tools vm combine-coverage ${{ inputs.distro-slug }} + + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.conclusion == 'success' && steps.run-tests.conclusion != 'cancelled' && steps.run-tests.conclusion != 'skipped' + run: | + tools vm download-artifacts ${{ inputs.distro-slug }} + tree -a artifacts + mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.tests-chunk }} + + - name: Destroy VM + if: always() + run: | + tools vm destroy ${{ inputs.distro-slug }} || true + + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.conclusion == 'success' + uses: actions/upload-artifact@v3 + with: + name: artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + path: artifacts + +# - name: Publish Test Report +# uses: mikepenz/action-junit-report@v3 +# if: always() && steps.download-artifacts-from-vm.conclusion == 'success' +# with: +# check_name: Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) +# report_paths: 'artifacts/xml-unittests-output/*.xml' + + - name: Report Salt Code Coverage(${{ matrix.tests-chunk }}) + if: always() && steps.download-artifacts-from-vm.conclusion == 'success' + run: | + nox --force-color -e report-coverage -- salt + + - name: Report Tests Code Coverage(${{ matrix.tests-chunk }}) + if: always() && steps.download-artifacts-from-vm.conclusion == 'success' + run: | + nox --force-color -e report-coverage -- tests + + - name: Report Combined Code Coverage(${{ matrix.tests-chunk }}) + if: always() && steps.download-artifacts-from-vm.conclusion == 'success' + run: | + nox --force-color -e report-coverage + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.tests-chunk }}-tests + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + report: + name: Reports for ${{ inputs.distro-slug }} + runs-on: ubuntu-latest + needs: test + if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Download Test Run Artifacts + id: download-test-run-artifacts + uses: actions/download-artifact@v3 + with: + name: testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + path: artifacts + + - name: Show Test Run Artifacts + if: always() && steps.download-test-run-artifacts.conclusion == 'success' + run: | + tree -a artifacts + + - name: Upload Code Coverage DB + if: always() && steps.download-test-run-artifacts.conclusion == 'success' + uses: actions/upload-artifact@v3 + with: + name: code-coverage + path: artifacts/coverage + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install Nox + run: | + python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + - name: Report Salt Code Coverage + run: | + nox --force-color -e report-coverage -- salt + + - name: Report Tests Code Coverage + run: | + nox --force-color -e report-coverage -- tests + + - name: Report Combined Code Coverage + run: | + nox --force-color -e report-coverage + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && steps.download-test-run-artifacts.conclusion == 'success' + with: + check_name: Overall Test Results(${{ inputs.distro-slug }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + + - name: Set Exit Status + if: always() + run: | + mkdir exitstatus + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-report + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.gitignore b/.gitignore index e9ad3cb0059..de8a0d6a047 100644 --- a/.gitignore +++ b/.gitignore @@ -121,3 +121,9 @@ requirements/static/*/py*/*.log # Vim's default session file Session.vim + +# VM task state directory +.vms-state/ + +# Nox requirements archives +nox.*.tar.bzip2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4055f1f504e..6be77561285 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1027,6 +1027,30 @@ repos: - requirements/static/ci/invoke.in # <---- Invoke ----------------------------------------------------------------------------------------------------- + # ----- Tools ----------------------------------------------------------------------------------------------------> + - id: pip-tools-compile + alias: compile-ci-tools-3.9-requirements + name: Linux CI Py3.9 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.9/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.9 + - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - requirements/static/ci/tools.in + + - id: pip-tools-compile + alias: compile-ci-tools-3.10-requirements + name: Linux CI Py3.10 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.10/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.10 + - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - requirements/static/ci/tools.in + # <---- Tools ----------------------------------------------------------------------------------------------------- + # ----- Local Hooks -----------------------------------------------------------------------------------------------> - repo: local hooks: @@ -1245,6 +1269,20 @@ repos: - jinja2==3.0.3 - msgpack==1.0.3 + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.971 + hooks: + - id: mypy + alias: mypy-tools + name: Run mypy against tools + files: ^tools/.*\.py$ + #args: [--strict] + additional_dependencies: + - attrs + - rich + - types-attrs + - types-pyyaml + - repo: https://github.com/saltstack/mirrors-nox rev: v2021.6.12 hooks: diff --git a/.pylintrc b/.pylintrc index fe9357cdb20..77b23940867 100644 --- a/.pylintrc +++ b/.pylintrc @@ -694,7 +694,8 @@ allowed-3rd-party-modules=msgpack, copy, base64, tempfile, - fnmatch + fnmatch, + ptscripts [EXCEPTIONS] diff --git a/cicd/images.yml b/cicd/images.yml new file mode 100644 index 00000000000..7421fceb5e8 --- /dev/null +++ b/cicd/images.yml @@ -0,0 +1,13 @@ +debian-11: + ami: ami-0446131071e8012bc + ssh_username: admin +debian-11-arm64: + ami: ami-00900a8ea49344c7d + ssh_username: admin +ubuntu-22.04: + ami: ami-0cfac30ab1d41cf6c + ssh_username: ubuntu +windows-2019: + ami: ami-0b4f5ddce805b3c17 + ssh_username: Administrator + connect_timeout: 600 diff --git a/noxfile.py b/noxfile.py index 375f7cf504b..ab4cb437a6c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,6 +32,16 @@ CI_RUN = ( or os.environ.get("DRONE") is not None ) PIP_INSTALL_SILENT = CI_RUN is False +PRINT_TEST_SELECTION = os.environ.get("PRINT_TEST_SELECTION") +if PRINT_TEST_SELECTION is None: + PRINT_TEST_SELECTION = CI_RUN +else: + PRINT_TEST_SELECTION = PRINT_TEST_SELECTION == "1" +PRINT_SYSTEM_INFO = os.environ.get("PRINT_SYSTEM_INFO") +if PRINT_SYSTEM_INFO is None: + PRINT_SYSTEM_INFO = CI_RUN +else: + PRINT_SYSTEM_INFO = PRINT_SYSTEM_INFO == "1" SKIP_REQUIREMENTS_INSTALL = os.environ.get("SKIP_REQUIREMENTS_INSTALL", "0") == "1" EXTRA_REQUIREMENTS_INSTALL = os.environ.get("EXTRA_REQUIREMENTS_INSTALL") COVERAGE_REQUIREMENT = os.environ.get("COVERAGE_REQUIREMENT") @@ -407,10 +417,7 @@ def _run_with_coverage(session, *test_cmd, env=None): def _report_coverage(session): - if SKIP_REQUIREMENTS_INSTALL is False: - session.install( - "--progress-bar=off", COVERAGE_REQUIREMENT, silent=PIP_INSTALL_SILENT - ) + _install_coverage_requirement(session) env = { # The full path to the .coverage data file. Makes sure we always write @@ -945,6 +952,7 @@ def _pytest(session, coverage, cmd_args): "--show-capture=no", "-ra", "-s", + "-vv", "--showlocals", ] for arg in cmd_args: @@ -954,7 +962,10 @@ def _pytest(session, coverage, cmd_args): args.append("--log-file={}".format(RUNTESTS_LOGFILE)) args.extend(cmd_args) - if CI_RUN: + if PRINT_SYSTEM_INFO and "--sysinfo" not in args: + args.append("--sysinfo") + + if PRINT_TEST_SELECTION: # We'll print out the collected tests on CI runs. # This will show a full list of what tests are going to run, in the right order, which, in case # of a test suite hang, helps us pinpoint which test is hanging @@ -978,11 +989,173 @@ def _pytest(session, coverage, cmd_args): session.run("python", "-m", "pytest", *args, env=env) +def _ci_test(session, transport): + # Install requirements + _install_requirements(session, transport) + chunks = { + "unit": [ + "tests/unit", + "tests/pytests/unit", + ], + "functional": [ + "tests/pytests/functional", + ], + "scenarios": ["tests/pytests/scenarios"], + } + + if not session.posargs: + chunk_cmd = [] + junit_report_filename = "test-results" + runtests_log_filename = "runtests" + else: + chunk = session.posargs.pop(0) + if chunk in ["unit", "functional", "integration", "scenarios", "all"]: + if chunk == "all": + chunk_cmd = [] + junit_report_filename = "test-results" + runtests_log_filename = "runtests" + elif chunk == "integration": + chunk_cmd = [] + for values in chunks.values(): + for value in values: + chunk_cmd.append(f"--ignore={value}") + junit_report_filename = f"test-results-{chunk}" + runtests_log_filename = f"runtests-{chunk}" + else: + chunk_cmd = chunks[chunk] + junit_report_filename = f"test-results-{chunk}" + runtests_log_filename = f"runtests-{chunk}" + if session.posargs: + if session.posargs[0] == "--": + session.posargs.pop(0) + chunk_cmd.extend(session.posargs) + else: + chunk_cmd = [chunk] + session.posargs + junit_report_filename = "test-results" + runtests_log_filename = "runtests" + + rerun_failures = os.environ.get("RERUN_FAILURES", "0") == "1" + + track_code_coverage = os.environ.get("CI_TRACK_COVERAGE", "1") == "1" + + common_pytest_args = [ + "--color=yes", + "--run-slow", + "--ssh-tests", + "--sys-stats", + "--run-destructive", + "--output-columns=120", + ] + try: + pytest_args = ( + common_pytest_args[:] + + [ + f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", + f"--log-file=artifacts/logs/{runtests_log_filename}.log", + ] + + chunk_cmd + ) + _pytest(session, track_code_coverage, pytest_args) + except CommandFailed: + if rerun_failures is False: + raise + + # Don't print the system information, not the test selection on reruns + global PRINT_TEST_SELECTION + global PRINT_SYSTEM_INFO + PRINT_TEST_SELECTION = False + PRINT_SYSTEM_INFO = False + + pytest_args = ( + common_pytest_args[:] + + [ + "--lf", + f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}-rerun.xml", + f"--log-file=artifacts/logs/{runtests_log_filename}-rerun.log", + ] + + chunk_cmd + ) + _pytest(session, track_code_coverage, pytest_args) + + +@nox.session(python=_PYTHON_VERSIONS, name="ci-test") +def ci_test(session): + _ci_test(session, "zeromq") + + +@nox.session(python=_PYTHON_VERSIONS, name="ci-test-tcp") +def ci_test_tcp(session): + _ci_test(session, "tcp") + + @nox.session(python="3", name="report-coverage") def report_coverage(session): _report_coverage(session) +@nox.session(python=False, name="decompress-dependencies") +def decompress_dependencies(session): + if not session.posargs: + session.error( + "Please pass the distro-slug to run tests against. " + "Check cicd/images.yml for what's available." + ) + distro_slug = session.posargs.pop(0) + if IS_WINDOWS: + nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz" + else: + nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz" + nox_dependencies_tarball_path = REPO_ROOT / nox_dependencies_tarball + if not nox_dependencies_tarball_path.exists(): + session.error( + f"The {nox_dependencies_tarball} file" + "does not exist. Not decompressing anything." + ) + + session_run_always(session, "tar", "xpf", nox_dependencies_tarball) + nox_dependencies_tarball_path.unlink() + + +@nox.session(python=False, name="compress-dependencies") +def compress_dependencies(session): + if not session.posargs: + session.error( + "Please pass the distro-slug to run tests against. " + "Check cicd/images.yml for what's available." + ) + distro_slug = session.posargs.pop(0) + if IS_WINDOWS: + nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz" + else: + nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz" + nox_dependencies_tarball_path = REPO_ROOT / nox_dependencies_tarball + if nox_dependencies_tarball_path.exists(): + session_warn( + session, f"Found existing {nox_dependencies_tarball}. Deleting it." + ) + nox_dependencies_tarball_path.unlink() + + session_run_always(session, "tar", "-caf", nox_dependencies_tarball, ".nox") + + +@nox.session(python="3", name="combine-coverage") +def combine_coverage(session): + _install_coverage_requirement(session) + env = { + # The full path to the .coverage data file. Makes sure we always write + # them to the same directory + "COVERAGE_FILE": str(COVERAGE_FILE), + } + + # Always combine and generate the XML coverage report + try: + session.run("coverage", "combine", env=env) + except CommandFailed: + # Sometimes some of the coverage files are corrupt which would trigger a CommandFailed + # exception + pass + + class Tee: """ Python class to mimic linux tee behaviour diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt new file mode 100644 index 00000000000..074f48a6b9b --- /dev/null +++ b/requirements/static/ci/py3.10/tools.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.10/tools.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.24.80 + # via -r requirements/static/ci/tools.in +botocore==1.27.80 + # via + # boto3 + # s3transfer +commonmark==0.9.1 + # via rich +jmespath==1.0.1 + # via + # boto3 + # botocore +pygments==2.13.0 + # via rich +python-dateutil==2.8.2 + # via botocore +python-tools-scripts==0.9.0rc3 + # via -r requirements/static/ci/tools.in +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.6.0 + # via boto3 +six==1.16.0 + # via python-dateutil +urllib3==1.26.12 + # via botocore diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt new file mode 100644 index 00000000000..340c40c0c2b --- /dev/null +++ b/requirements/static/ci/py3.9/tools.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.9/tools.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.24.80 + # via -r requirements/static/ci/tools.in +botocore==1.27.80 + # via + # boto3 + # s3transfer +commonmark==0.9.1 + # via rich +jmespath==1.0.1 + # via + # boto3 + # botocore +pygments==2.13.0 + # via rich +python-dateutil==2.8.2 + # via botocore +python-tools-scripts==0.9.0rc3 + # via -r requirements/static/ci/tools.in +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.6.0 + # via boto3 +six==1.16.0 + # via python-dateutil +urllib3==1.26.12 + # via botocore diff --git a/requirements/static/ci/tools.in b/requirements/static/ci/tools.in new file mode 100644 index 00000000000..cb798beaba9 --- /dev/null +++ b/requirements/static/ci/tools.in @@ -0,0 +1,3 @@ +python-tools-scripts >= 0.9.0rc3 +attrs +boto3 diff --git a/setup.cfg b/setup.cfg index c3559b9aa15..ae12e49a0c1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,12 @@ [sdist] owner = root group = root + +[mypy] +python_version = 3.9 +show_error_codes = True +warn_return_any = True +warn_unused_configs = True + +[mypy.tools] +ignore_missing_imports = True diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 00000000000..66a6b7293ca --- /dev/null +++ b/tools/__init__.py @@ -0,0 +1,6 @@ +import logging + +import tools.vm + +for name in ("boto3", "botocore", "urllib3"): + logging.getLogger(name).setLevel(logging.INFO) diff --git a/tools/vm.py b/tools/vm.py new file mode 100644 index 00000000000..1e5ee2049c7 --- /dev/null +++ b/tools/vm.py @@ -0,0 +1,1009 @@ +""" +These commands are used to create/destroy VMs, sync the local checkout +to the VM and to run commands on the VM. +""" +from __future__ import annotations + +import hashlib +import logging +import os +import pathlib +import platform +import random +import shutil +import subprocess +import sys +import textwrap +import time +from datetime import datetime +from functools import lru_cache +from typing import TYPE_CHECKING, cast + +from ptscripts import Context, command_group + +try: + import attr + import boto3 + import yaml + from rich.progress import Progress +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + + +if TYPE_CHECKING: + # pylint: disable=no-name-in-module + from boto3.resources.factory.ec2 import Instance + + try: + from io import TextIO # type: ignore[attr-defined] + except ImportError: + from io import TextIOBase as TextIO + # pylint: enable=no-name-in-module + +log = logging.getLogger(__name__) + +REPO_ROOT = pathlib.Path(__file__).parent.parent +STATE_DIR = REPO_ROOT / ".vms-state" +with REPO_ROOT.joinpath("cicd", "images.yml").open() as rfh: + AMIS = yaml.safe_load(rfh) +REPO_CHECKOUT_ID = hashlib.sha256( + "|".join(list(platform.uname()) + [str(REPO_ROOT)]).encode() +).hexdigest() + +# Define the command group +vm = command_group(name="vm", help="VM Related Commands", description=__doc__) +vm.add_argument("--region", help="The AWS region.", default="eu-central-1") + + +@vm.command( + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + "choices": list(AMIS), + }, + "key_name": { + "help": "The SSH key name.", + }, + "instance_type": { + "help": "The instance type to use.", + }, + } +) +def create( + ctx: Context, + name: str, + key_name: str = None, + instance_type: str = None, +): + """ + Create VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.create(key_name=key_name, instance_type=instance_type) + + +@vm.command( + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + } +) +def destroy(ctx: Context, name: str): + """ + Destroy VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.destroy() + + +@vm.command( + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + "command": { + "help": "Command to run in VM", + "nargs": "*", + }, + "sudo": { + "help": "Run command as sudo", + "action": "store_true", + }, + } +) +def ssh(ctx: Context, name: str, command: list[str], sudo: bool = False): + """ + SSH into the VM, or run 'command' in VM + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.run(command, sudo=sudo) + + +@vm.command( + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + } +) +def rsync(ctx: Context, name: str): + """ + Sync local checkout to VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.upload_checkout() + + +@vm.command( + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + "nox_session": { + "flags": [ + "-e", + "--nox-session", + ], + "help": "The nox session name to run in the VM", + }, + "nox_session_args": { + "help": "Extra CLI arguments to pass to pytest", + "nargs": "*", + "metavar": "NOX_SESSION_ARGS", + }, + "rerun_failures": { + "help": "Re-run test failures", + "action": "store_true", + }, + "skip_requirements_install": { + "help": "Skip requirements installation", + "action": "store_true", + "flags": [ + "--sri", + "--skip-requirements-install", + ], + }, + "print_tests_selection": { + "help": "Print the tests selection", + "action": "store_true", + "flags": [ + "--pts", + "--print-tests-selection", + ], + }, + } +) +def test( + ctx: Context, + name: str, + nox_session_args: list[str] = None, + nox_session: str = "ci-test-3", + rerun_failures: bool = False, + skip_requirements_install: bool = False, + print_tests_selection: bool = False, +): + """ + Run test in the VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + env = [] + if rerun_failures: + env.append("RERUN_FAILURES=1") + if print_tests_selection: + env.append("PRINT_TEST_SELECTION=1") + if ( + skip_requirements_install + or os.environ.get("SKIP_REQUIREMENTS_INSTALL", "0") == "1" + ): + env.append("SKIP_REQUIREMENTS_INSTALL=1") + vm.run_nox(nox_session=nox_session, session_args=nox_session_args, env=env) + + +@vm.command( + name="install-dependencies", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + "nox_session": { + "flags": [ + "-e", + "--nox-session", + ], + "help": "The nox environ to run in the VM", + }, + }, +) +def install_dependencies(ctx: Context, name: str, nox_session: str = "ci-test-3"): + """ + Install test dependencies on VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.install_dependencies(nox_session) + + +@vm.command( + name="compress-dependencies", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def compress_dependencies(ctx: Context, name: str): + """ + Compress the .nox/ directory in the VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.compress_dependencies() + + +@vm.command( + name="decompress-dependencies", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def decompress_dependencies(ctx: Context, name: str): + """ + Decompress a dependencies archive into the .nox/ directory in the VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.decompress_dependencies() + + +@vm.command( + name="download-dependencies", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def download_dependencies(ctx: Context, name: str): + """ + Download a compressed .nox/ directory from VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.download_dependencies() + + +@vm.command( + name="combine-coverage", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def combine_coverage(ctx: Context, name: str): + """ + Combine the several code coverage files into a single one in the VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.combine_coverage() + + +@vm.command( + name="download-artifacts", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def download_artifacts(ctx: Context, name: str): + """ + Download test artifacts from VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + vm.download_artifacts() + + +@attr.s(frozen=True, kw_only=True) +class AMIConfig: + ami: str = attr.ib() + ssh_username: str = attr.ib() + create_timeout: int = attr.ib(default=5 * 60) + connect_timeout: int = attr.ib(default=10 * 60) + terminate_timeout: int = attr.ib(default=5 * 60) + upload_path: str = attr.ib(default=None) + + +@attr.s(slots=True, kw_only=True, hash=True, repr=False) +class VM: + ctx: Context = attr.ib() + name: str = attr.ib() + region_name: str = attr.ib(default=None) + # Internal + config: AMIConfig = attr.ib(init=False) + instance: Instance = attr.ib(init=False, hash=False, default=None) + state_dir: pathlib.Path = attr.ib(init=False) + ssh_config_file: pathlib.Path = attr.ib(init=False) + + def __attrs_post_init__(self): + self.read_state() + if self.is_running: + self.write_ssh_config() + + @config.default + def _config_default(self): + config = AMIConfig(**AMIS[self.name]) + log.info(f"Loaded VM Configuration:\n{config}") + return config + + @state_dir.default + def _state_dir_default(self): + state_dir = STATE_DIR / self.name + state_dir.mkdir(parents=True, exist_ok=True) + return state_dir + + @ssh_config_file.default + def _ssh_config_file_default(self): + return self.state_dir / "ssh-config" + + def read_state(self): + self.get_ec2_resource.cache_clear() + instance = None + ec2_region_path = self.state_dir / "ec2-region" + if ec2_region_path.exists(): + self.region_name = ec2_region_path.read_text().strip() + instance_id_path = self.state_dir / "instance-id" + if instance_id_path.exists(): + instance_id = instance_id_path.read_text().strip() + _instance = self.ec2.Instance(instance_id) + + if _instance.state["Name"] == "running": + instance = _instance + else: + filters = [ + {"Name": "tag:vm-name", "Values": [self.name]}, + {"Name": "tag:instance-client-id", "Values": [REPO_CHECKOUT_ID]}, + ] + log.info(f"Checking existing instance of {self.name}({self.config.ami})...") + instances = list( + self.ec2.instances.filter( + Filters=filters, + ) + ) + for _instance in instances: + if _instance.state["Name"] == "running": + instance = _instance + break + if instance: + self.instance = instance + + def write_state(self): + ec2_region_path = self.state_dir / "ec2-region" + if self.region_name: + ec2_region_path.write_text(self.region_name) + instance_id_path = self.state_dir / "instance-id" + if self.id: + instance_id_path.write_text(self.id) + self.write_ssh_config() + + def write_ssh_config(self): + if self.ssh_config_file.exists(): + return + ssh_config = textwrap.dedent( + f"""\ + Host {self.name} + Hostname {self.instance.public_ip_address or self.instance.private_ip_address} + User {self.config.ssh_username} + Compression=yes + LogLevel=FATAL + StrictHostKeyChecking=no + UserKnownHostsFile=/dev/null + ForwardAgent=yes + """ + ) + self.ssh_config_file.write_text(ssh_config) + + def create(self, key_name=None, instance_type=None): + if self.is_running: + log.info(f"{self!r} is already running...") + return + self.get_ec2_resource.cache_clear() + + create_timeout = self.config.create_timeout + create_timeout_progress = 0 + ssh_connection_timeout = self.config.connect_timeout + ssh_connection_timeout_progress = 0 + + network_interfaces = None + if key_name is None: + key_name = "github-actions" + log.info("Starting CI configured VM") + else: + # This is a developer running + log.info("Starting Developer configured VM") + + # Grab the public subnet of the vpc used on the template + client = boto3.client("ec2", region_name=self.region_name) + data = client.describe_launch_template_versions( + LaunchTemplateName=self.config.ami + ) + # The newest template comes first + template_data = data["LaunchTemplateVersions"][0]["LaunchTemplateData"] + subnet_id = template_data["NetworkInterfaces"][0]["SubnetId"] + # Grab the subnet instance + subnet = self.ec2.Subnet(subnet_id) + # Grabt the VPC instance from the subnet + vpc = self.ec2.Vpc(subnet.vpc_id) + # Collect all public subnets on the VPC + public_subnets = [] + for subnet in vpc.subnets.all(): + for tag in subnet.tags: + if tag["Key"] != "Name": + continue + if "-public-" in tag["Value"]: + public_subnets.append(subnet) + break + # Randomly choose one of the subnets + chosen_public_subnet = random.choice(public_subnets) + # Override the launch template network interfaces config + network_interfaces = [ + { + "AssociatePublicIpAddress": True, + "DeleteOnTermination": True, + "DeviceIndex": 0, + "SubnetId": chosen_public_subnet.id, + } + ] + + progress = Progress( + # transient=True, + expand=True, + ) + create_tast = progress.add_task( + f"Starting {self!r} in {self.region_name!r} with ssh key named {key_name!r}...", + total=create_timeout, + ) + connect_task = progress.add_task( + "Waiting for ssh connection", + start=False, + total=ssh_connection_timeout, + ) + + tags = [ + {"Key": "vm-name", "Value": self.name}, + {"Key": "instance-client-id", "Value": REPO_CHECKOUT_ID}, + ] + if os.environ.get("CI") is not None: + job = os.environ["GITHUB_JOB"] + ref = os.environ["GITHUB_REF"] + repo = os.environ["GITHUB_REPOSITORY"] + actor = ( + os.environ.get("GITHUB_TRIGGERING_ACTOR") or os.environ["GITHUB_ACTOR"] + ) + if "pull" in ref: + ref = f"pr-{ref.split('/')[2]}" + elif "tags" in ref: + ref = f"tag-{ref.split('/')[-1]}" + else: + ref = ref.split("/")[-1] + name = f"{self.name} - {repo} - {ref} - {job} - {actor}" + for key in os.environ: + if not key.startswith("GITHUB_"): + continue + if key in ( + "GITHUB_ACTIONS", + "GITHUB_API_URL", + "GITHUB_ENV", + "GITHUB_EVENT_PATH", + "GITHUB_GRAPHQL_URL", + "GITHUB_OUTPUT", + "GITHUB_PATH", + "GITHUB_REPOSITORY_OWNER", + "GITHUB_RETENTION_DAYS", + "GITHUB_STATE", + "GITHUB_STEP_SUMMARY", + ): + continue + value = os.environ.get(key) + if not value: + continue + tags.append( + { + "Key": f"gh:{key}", + "Value": value, + } + ) + else: + name = f"{self.name} started on {datetime.utcnow()}" + tags.append( + { + "Key": "Name", + "Value": name, + } + ) + with progress: + start = time.time() + create_kwargs = dict( + ImageId=self.config.ami, + MinCount=1, + MaxCount=1, + KeyName=key_name, + TagSpecifications=[ + { + "ResourceType": "instance", + "Tags": tags, + } + ], + LaunchTemplate={"LaunchTemplateName": self.config.ami}, + ) + if instance_type: + # The user provided a custom instance type + create_kwargs["InstanceType"] = instance_type + if network_interfaces is not None: + # This is a developer configured VM + create_kwargs["NetworkInterfaces"] = network_interfaces + + # Create the VM + response = self.ec2.create_instances(**create_kwargs) + for _instance in response: + self.instance = _instance + stop = time.time() + create_timeout_progress += stop - start + progress.update( + create_tast, + description=f"{self!r} created...", + completed=create_timeout_progress, + ) + + # Wait until the VM is running + while create_timeout_progress <= create_timeout: + time.sleep(1) + create_timeout_progress += 1 + if self.is_running: + progress.update( + create_tast, + description=f"{self!r} is running.", + completed=create_timeout, + ) + self.write_state() + break + progress.update( + create_tast, + description=f"Waiting until {self!r} is running...", + completed=create_timeout_progress, + ) + else: + error = f"Failed to create {self!r}" + progress.update( + create_tast, + description=error, + completed=create_timeout, + ) + self.ctx.exit(1, error) + + # Wait until we can SSH into the VM + host = self.instance.public_ip_address or self.instance.private_ip_address + progress.update( + connect_task, + description=f"Waiting for SSH to become available at {host} ...", + ) + progress.start_task(connect_task) + + proc = None + checks = 0 + while ssh_connection_timeout_progress <= ssh_connection_timeout: + start = time.time() + if proc is None: + checks = 0 + stderr = None + proc = subprocess.Popen( + self.ssh_command_args( + "exit", "0", log_command_level=logging.DEBUG + ), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + shell=False, + ) + checks += 1 + try: + wait_start = time.time() + proc.wait(timeout=1) + if proc.returncode == 0: + progress.update( + connect_task, + description=f"SSH connection to {host} available!", + completed=ssh_connection_timeout, + ) + break + stderr = proc.stderr.read().strip() + if stderr: + stderr = f" Last Error: {stderr}" + proc = None + if time.time() - wait_start < 1: + # Process exited too fast, sleep a little longer + time.sleep(1) + except subprocess.TimeoutExpired: + pass + + ssh_connection_timeout_progress += time.time() - start + progress.update( + connect_task, + completed=ssh_connection_timeout_progress, + description=f"Waiting for SSH to become available at {host} ...{stderr or ''}", + ) + + if checks >= 10 and proc is not None: + proc.kill() + proc = None + else: + self.ctx.exit(1, f"Failed to establish an ssh connection to {host}") + + def destroy(self): + try: + if not self.is_running: + log.info(f"{self!r} is not running...") + return + timeout = self.config.terminate_timeout + timeout_progress = 0 + progress = Progress( + # transient=True, + expand=True, + ) + task = progress.add_task(f"Terminatting {self!r}...", total=timeout) + self.instance.terminate() + try: + with progress: + while timeout_progress <= timeout: + start = time.time() + time.sleep(1) + if self.state == "terminated": + progress.update( + task, + description=f"{self!r} terminated.", + completed=timeout, + ) + break + timeout_progress += time.time() - start + progress.update( + task, + description=f"Terminating {self!r}...", + completed=timeout_progress, + ) + else: + progress.update( + task, + description=f"Failed to terminate {self!r}.", + completed=timeout, + ) + except KeyboardInterrupt: + pass + finally: + shutil.rmtree(self.state_dir, ignore_errors=True) + self.instance = None + + def upload_checkout(self, verbose=True): + rsync_flags = [ + "--delete", + "--no-group", + "--no-owner", + "--exclude", + ".nox/", + "--exclude", + ".pytest_cache/", + "--exclude", + "artifacts/", + "--exclude", + f"{STATE_DIR.relative_to(REPO_ROOT)}{os.path.sep}", + "--exclude", + "*.py~", + ] + # Local repo path + source = f"{REPO_ROOT}{os.path.sep}" + # Remote repo path + remote_path = self.upload_path.as_posix() + if self.is_windows: + for drive in ("c:", "C:"): + remote_path = remote_path.replace(drive, "/cygdrive/c") + destination = f"{self.name}:{remote_path}" + description = "Rsync local checkout to VM..." + self.rsync(source, destination, description, rsync_flags) + + def run( + self, + command: list[str], + check: bool = True, + sudo: bool = False, + capture: bool = False, + pseudo_terminal: bool = False, + env: list[str] = None, + log_command_level: int = logging.INFO, + ): + if not self.is_running: + self.ctx.exit(1, message=f"{self!r} is not running") + self.write_ssh_config() + stdout: TextIO | int + stderr: TextIO | int + if capture: + stdout = subprocess.PIPE + stderr = subprocess.PIPE + else: + stdout = sys.stdout + stderr = sys.stderr + try: + ssh_command = self.ssh_command_args( + *command, + sudo=sudo, + pseudo_terminal=pseudo_terminal, + env=env, + log_command_level=log_command_level, + ) + return subprocess.run( + ssh_command, + stdin=sys.stdin, + stdout=stdout, + stderr=stderr, + universal_newlines=True, + shell=False, + check=check, + bufsize=0, + ) + except (KeyboardInterrupt, SystemExit): + pass + + def run_nox( + self, + nox_session: str, + session_args: list[str] = None, + nox_args: list[str] = None, + env: list[str] = None, + ): + cmd = [ + "nox", + "--force-color", + "-f", + f"{self.upload_path.joinpath('noxfile.py').as_posix()}", + "-e", + nox_session, + ] + if nox_args: + cmd += nox_args + if session_args: + cmd += ["--"] + session_args + if env is None: + env = [] + if "CI" in os.environ: + env.append(f"CI={os.environ['CI']}") + if self.is_windows is False: + sudo = True + else: + sudo = False + ret = self.run( + cmd, sudo=sudo, check=False, capture=False, pseudo_terminal=True, env=env + ) + self.ctx.exit(ret.returncode) + + def combine_coverage(self): + """ + Combine the code coverage databases + """ + self.run_nox("combine-coverage", session_args=[self.name]) + + def compress_dependencies(self): + """ + Compress .nox/ into nox..tar.* in the VM + """ + self.run_nox("compress-dependencies", session_args=[self.name]) + + def decompress_dependencies(self): + """ + Decompress nox..tar.* if it exists in the VM + """ + self.run_nox("decompress-dependencies", session_args=[self.name]) + + def download_dependencies(self): + """ + Download nox..tar.* from VM + """ + if self.is_windows: + dependencies_filename = f"nox.{self.name}.tar.gz" + else: + dependencies_filename = f"nox.{self.name}.tar.xz" + remote_path = self.upload_path.joinpath(dependencies_filename).as_posix() + if self.is_windows: + for drive in ("c:", "C:"): + remote_path = remote_path.replace(drive, "/cygdrive/c") + source = f"{self.name}:{remote_path}" + destination = "." + description = f"Downloading {dependencies_filename} ..." + self.rsync(source, destination, description) + + def download_artifacts(self): + """ + Download /artifacts from VM + """ + remote_path = self.upload_path.joinpath("artifacts").as_posix() + if self.is_windows: + for drive in ("c:", "C:"): + remote_path = remote_path.replace(drive, "/cygdrive/c") + source = f"{self.name}:{remote_path}/" + destination = "artifacts/" + description = f"Downloading {source} ..." + self.rsync(source, destination, description) + + def rsync(self, source, destination, description, rsync_flags: list[str] = None): + """ + Rsync source into destination while showing progress. + """ + rsync = shutil.which("rsync") + if not rsync: + self.ctx.exit(1, "Could find the 'rsync' binary") + if TYPE_CHECKING: + assert rsync + cmd: list[str] = [ + rsync, + "-az", + "--info=none,progress2", + "-e", + " ".join( + self.ssh_command_args( + include_vm_target=False, log_command_level=logging.NOTSET + ) + ), + ] + if rsync_flags: + cmd.extend(rsync_flags) + cmd.extend( + [ + source, + destination, + ] + ) + log.info(f"Running {' '.join(cmd)!r}") # type: ignore[arg-type] + progress = Progress(transient=True, expand=True) + task = progress.add_task(description, total=100) + with progress: + proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, text=True) + completed = 0 + while proc.poll() is None: + if TYPE_CHECKING: + assert proc.stdout + parts = proc.stdout.readline().strip().split() + if parts: + completed = int(parts[1][:-1]) + progress.update(task, completed=completed) + progress.update(task, completed=100) + + def install_dependencies(self, nox_session: str): + """ + Install test dependencies in VM. + """ + return self.run_nox( + nox_session, + nox_args=["--install-only"], + env=["PRINT_TEST_SELECTION=0", "PRINT_SYSTEM_INFO=0"], + ) + + def __repr__(self): + return ( + f"VM(name={self.name!r}, ami={self.config.ami!r}, id={self.id!r}, " + f"region={self.region_name!r} state={self.state!r})" + ) + + def ssh_command_args( + self, + *command: str, + sudo: bool = False, + include_vm_target: bool = True, + pseudo_terminal: bool = False, + env: list[str] = None, + log_command_level: int = logging.INFO, + ) -> list[str]: + ssh = shutil.which("ssh") + if TYPE_CHECKING: + assert ssh + _ssh_command_args = [ + ssh, + "-a", + "-F", + str(self.ssh_config_file.relative_to(REPO_ROOT)), + ] + if pseudo_terminal is True: + _ssh_command_args.append("-t") + if include_vm_target: + _ssh_command_args.append(self.name) + remote_command = [] + if command: + remote_command.append("--") + if sudo: + remote_command.append("sudo") + if env: + remote_command.append("env") + remote_command.extend(env) + remote_command.extend(list(command)) + log.log( + log_command_level, + f"Running {' '.join(remote_command[1:])!r} in {self.name}", + ) + _ssh_command_args.extend(remote_command) + return _ssh_command_args + + @property + def is_windows(self): + return "windows" in self.name + + @lru_cache(maxsize=1) + def get_ec2_resource(self): + return boto3.resource("ec2", region_name=self.region_name) + + @property + def ec2(self): + return self.get_ec2_resource() + + @property + def id(self) -> str | None: + if self.is_running: + return cast(str, self.instance.id) + return None + + @property + def is_running(self) -> bool: + if self.instance is None: + return False + running: bool = self.state == "running" + return running + + @property + def state(self) -> str | None: + _state: str | None = None + if self.instance: + self.instance.reload() + _state = self.instance.state["Name"] + return _state + + @property + def tempdir(self): + return self.get_remote_tempdir() + + @lru_cache(maxsize=1) + def get_remote_tempdir(self): + cmd = [ + "-c", + "import sys,tempfile; sys.stdout.write(tempfile.gettempdir()); sys.stdout.flush();", + ] + if self.is_windows is False: + cmd.insert(0, "python3") + else: + cmd.insert(0, "python") + ret = self.run(cmd, capture=True, check=False) + if ret.returncode != 0: + self.ctx.exit(ret.returncode, ret.stderr.strip()) + return ret.stdout.strip() + + @property + def upload_path(self): + return self.get_remote_upload_path() + + @lru_cache(maxsize=1) + def get_remote_upload_path(self): + if self.config.upload_path: + return pathlib.Path(self.config.upload_path) + if self.is_windows: + return pathlib.PureWindowsPath(r"c:\Windows\Temp\testing") + return pathlib.Path("/tmp/testing")