diff --git a/.github/workflows/check-api-for-breaking-changes.yml b/.github/workflows/check-api-for-breaking-changes.yml index 243342ee..f31fbab0 100644 --- a/.github/workflows/check-api-for-breaking-changes.yml +++ b/.github/workflows/check-api-for-breaking-changes.yml @@ -3,40 +3,8 @@ name: Check Public API for Breaking Changes on: pull_request: branches: [main] - workflow_call: - inputs: - package-name: - description: The name of the package to check. - required: true - type: string jobs: check-api-for-breaking-changes: - name: Check API for breaking changes - runs-on: ubuntu-latest - env: - PACKAGE_NAME: ${{ inputs.package-name || 'tm_devices' }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x # any version - check-latest: true - - name: Install package to check - run: | - pip install --upgrade . - pip install griffe - - name: Check API for breaking changes - continue-on-error: true - run: | - echo "## Breaking API Changes" > breaking_changes.md - echo "\`\`\`" >> breaking_changes.md - griffe check --format=verbose --against=$(git rev-parse origin/main) --search=src "$PACKAGE_NAME" 2>&1 | tee -a breaking_changes.md - - name: Finish writing summary file - run: echo "\`\`\`" >> breaking_changes.md - - uses: actions/upload-artifact@v4 - with: - name: breaking_changes - path: breaking_changes.md + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-check-api-for-breaking-changes.yml@v1.0.0 + with: + package-name: tm_devices diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 1691e278..76f76e36 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -4,32 +4,16 @@ on: push: branches: [main] pull_request: - # The branches below must be a subset of the branches above branches: [main] schedule: - cron: 17 16 * * 4 - workflow_call: jobs: analyze: - name: Analyze - runs-on: ubuntu-latest + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-codeql-analysis.yml@v1.0.0 + with: + languages-array: '["python", "javascript"]' + codeql-queries: security-extended,security-and-quality permissions: actions: read contents: read security-events: write - strategy: - fail-fast: false - matrix: - language: [python, javascript] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - queries: security-extended,security-and-quality - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: /language:${{matrix.language}} diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 7960fad6..dab66770 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -3,7 +3,6 @@ name: Pull Request Dependency Review on: pull_request: branches: [main] - workflow_call: permissions: contents: read pull-requests: write diff --git a/.github/workflows/enforce-community-standards.yml b/.github/workflows/enforce-community-standards.yml new file mode 100644 index 00000000..c13b0f3b --- /dev/null +++ b/.github/workflows/enforce-community-standards.yml @@ -0,0 +1,10 @@ +--- +name: Enforce Open Source Community Standards +on: + push: + branches: [main] + pull_request: + branches: [main] +jobs: + enforce-community-standards: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-enforce-community-standards.yml@v1.0.0 diff --git a/.github/workflows/package-build.yml b/.github/workflows/package-build.yml index a2155b16..60954a49 100644 --- a/.github/workflows/package-build.yml +++ b/.github/workflows/package-build.yml @@ -6,68 +6,18 @@ on: tags: ['*'] pull_request: branches: [main] - workflow_call: - inputs: - package-name: - description: The name of the package being installed. - required: true - type: string -env: - PACKAGE_NAME: ${{ inputs.package-name || 'tm_devices' }} +# Cancel running jobs for the same workflow and branch. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} jobs: - # Verify the package builds fine - build-package: - name: Build package - runs-on: ubuntu-latest - environment: package-build + package-build: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-package-build.yml@v1.0.0 + with: + package-name: tm_devices + python-versions-array: '["3.8", "3.9", "3.10", "3.11", "3.12"]' # when updating this, make sure to update all workflows that use this strategy + operating-systems-array: '["ubuntu", "windows", "macos"]' permissions: + contents: read id-token: write attestations: write - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: hynek/build-and-inspect-python-package@v2.8.0 - id: build-pkg - with: - attest-build-provenance-github: ${{ !(github.event.pull_request.head.repo.fork || github.event.workflow_call.pull_request.head.repo.fork) && github.actor != 'dependabot[bot]' }} - install-package: - name: Install package - needs: build-package - runs-on: ${{ matrix.platform }} - strategy: - fail-fast: false - matrix: - platform: [ubuntu-latest, windows-latest, macos-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] # when updating this, make sure to update all workflows that use this strategy - steps: - - name: Download built packages - uses: actions/download-artifact@v4 - with: - name: Packages - path: dist - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - check-latest: true - - name: Test installing wheel - shell: bash - run: pip install dist/*.whl - - name: Uninstall wheel - run: pip uninstall --yes "${{ env.PACKAGE_NAME }}" - - name: Test installing tarball - shell: bash - run: pip install dist/*.tar.gz - - name: Uninstall tarball - run: pip uninstall --yes "${{ env.PACKAGE_NAME }}" - # Check that all jobs passed - check-build-and-install-passed: - if: ${{ !cancelled() }} - needs: [build-package, install-package] - runs-on: ubuntu-latest - steps: - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 - with: - jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/package-release.yml b/.github/workflows/package-release.yml index 385e943f..566e90d3 100644 --- a/.github/workflows/package-release.yml +++ b/.github/workflows/package-release.yml @@ -3,208 +3,36 @@ name: Publish to GitHub & PyPI on: workflow_dispatch: inputs: - release_level: + release-level: type: choice required: true description: | - Select the release level, + Select the release level: patch for backward compatible minor changes and bug fixes, minor for backward compatible larger changes, major for non-backward compatible changes. options: [patch, minor, major] - workflow_call: - inputs: - package-name: - description: The name of the package to use to gate uploads. - required: true - type: string - release_level: - description: | - Select the release level, - patch for backward compatible minor changes and bug fixes, - minor for backward compatible larger changes, - major for non-backward compatible changes. - required: true - type: string concurrency: group: pypi -env: - REPO_NAME: tektronix/${{ inputs.package-name || 'tm_devices' }} - PACKAGE_NAME: ${{ inputs.package-name || 'tm_devices' }} jobs: - print-inputs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x - - name: Check for unreleased entries in the Changelog - run: python scripts/check_unreleased_changelog_items.py - - name: Create summary of workflow inputs and incoming changes - run: | - echo "## Workflow Inputs" >> $GITHUB_STEP_SUMMARY - echo "- release_level: ${{ inputs.release_level }}" >> $GITHUB_STEP_SUMMARY - echo "## Incoming Changes" >> $GITHUB_STEP_SUMMARY - cat python_semantic_release_templates/.previous_release_notes_for_template.md >> $GITHUB_STEP_SUMMARY - - name: Set outputs - id: variables - run: echo "repo-name=$REPO_NAME" >> $GITHUB_OUTPUT - outputs: - repo-name: ${{ steps.variables.outputs.repo-name }} - # This job requires a Personal Access Token (Classic) with - # the public_repo permission. It also needs a private/public - # ssh key pair that can be used for signing. The public key must - # be attached to the account as an SSH signing key. - pypi-version: - name: Update package version - needs: [print-inputs] - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ubuntu-latest - environment: package-release-gate + package-release: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-package-release.yml@v1.0.0 + with: + package-name: tm_devices + repo-name: tektronix/tm_devices + commit-user-name: ${{ vars.TEK_OPENSOURCE_NAME }} + commit-user-email: ${{ vars.TEK_OPENSOURCE_EMAIL }} + release-level: ${{ inputs.release-level }} + build-and-publish-python-package: true + python-versions-array: '["3.8", "3.9", "3.10", "3.11", "3.12"]' # when updating this, make sure to update all workflows that use this strategy + operating-systems-array: '["ubuntu", "windows", "macos"]' + previous-changelog-filepath: python_semantic_release_templates/.previous_changelog_for_template.md + previous-release-notes-filepath: python_semantic_release_templates/.previous_release_notes_for_template.md permissions: - id-token: write contents: write - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - token: ${{ secrets.TEK_OPENSOURCE_TOKEN }} - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x - check-latest: true - - name: Check for unreleased entries in the Changelog and copy files to templates - run: | - python scripts/check_unreleased_changelog_items.py - git config --global tag.gpgSign true - - name: Python Semantic Release - uses: python-semantic-release/python-semantic-release@v9.8.3 - id: release - with: - force: ${{ inputs.release_level }} - root_options: -v --strict - github_token: ${{ secrets.TEK_OPENSOURCE_TOKEN }} - git_committer_email: ${{ vars.TEK_OPENSOURCE_EMAIL }} - git_committer_name: ${{ vars.TEK_OPENSOURCE_NAME }} - ssh_public_signing_key: ${{ secrets.TEK_OPENSOURCE_SSH_SIGNING_KEY_PUBLIC }} - ssh_private_signing_key: ${{ secrets.TEK_OPENSOURCE_SSH_SIGNING_KEY_PRIVATE }} - outputs: - built-version: ${{ steps.release.outputs.version }} - pypi-build: - name: Build package - needs: [print-inputs, pypi-version] - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ubuntu-latest - permissions: id-token: write attestations: write - steps: - - uses: actions/checkout@v4 - with: - ref: main # Make sure to check out the latest commit on main, not the original commit that triggered the workflow - fetch-depth: 0 - - name: Build package - uses: hynek/build-and-inspect-python-package@v2.8.0 - with: - attest-build-provenance-github: 'true' - upload-testpypi: - name: Upload package to TestPyPI - needs: [print-inputs, pypi-build] - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ubuntu-latest - environment: package-testpypi - permissions: - id-token: write - steps: - - name: Download built packages - uses: actions/download-artifact@v4 - with: - name: Packages - path: dist - - name: Upload package to Test PyPI - uses: pypa/gh-action-pypi-publish@v1.9.0 - with: - repository-url: https://test.pypi.org/legacy/ - upload-pypi: - name: Upload package to PyPI - needs: [print-inputs, upload-testpypi] - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ubuntu-latest - environment: package-release - permissions: - id-token: write - steps: - - name: Download built packages - uses: actions/download-artifact@v4 - with: - name: Packages - path: dist - - name: Upload package to PyPI - uses: pypa/gh-action-pypi-publish@v1.9.0 - upload-github: - name: Upload package to GitHub Release - needs: [print-inputs, upload-pypi] - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ubuntu-latest - permissions: - id-token: write - contents: write - steps: - - uses: actions/checkout@v4 - with: - ref: main # Make sure to check out the latest commit on main, not the original commit that triggered the workflow - fetch-depth: 0 - - name: Download built packages - uses: actions/download-artifact@v4 - with: - name: Packages - path: dist - - name: Publish package distributions to GitHub Releases - uses: python-semantic-release/upload-to-gh-release@main - with: - root_options: -v --strict - github_token: ${{ secrets.GITHUB_TOKEN }} - pypi-install: - name: Install package - needs: - - print-inputs - - pypi-version - - pypi-build - - upload-testpypi - - upload-pypi - - upload-github - if: github.repository == needs.print-inputs.outputs.repo-name && github.ref == - 'refs/heads/main' - runs-on: ${{ matrix.platform }} - strategy: - fail-fast: false - matrix: - platform: [ubuntu-latest, windows-latest, macos-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] # when updating this, make sure to update all workflows that use this strategy - index_urls: - - '' - - ' --index-url=https://test.pypi.org/simple/ --extra-index-url=https://pypi.org/simple' - steps: - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - check-latest: true - - name: Test installing package - # A retry is used to allow for some downtime before the package is installable - uses: nick-fields/retry@v3 - with: - timeout_minutes: 10 - max_attempts: 5 - retry_wait_seconds: 30 - warning_on_retry: false - command: pip install${{ matrix.index_urls }} "${{ env.PACKAGE_NAME }}==${{ - needs.pypi-version.outputs.built-version }}" + secrets: + checkout-token: ${{ secrets.TEK_OPENSOURCE_TOKEN }} + ssh-signing-key-private: ${{ secrets.TEK_OPENSOURCE_SSH_SIGNING_KEY_PRIVATE }} + ssh-signing-key-public: ${{ secrets.TEK_OPENSOURCE_SSH_SIGNING_KEY_PUBLIC }} diff --git a/.github/workflows/package-testpypi.yml b/.github/workflows/package-testpypi.yml index 294cc597..a806b25d 100644 --- a/.github/workflows/package-testpypi.yml +++ b/.github/workflows/package-testpypi.yml @@ -1,100 +1,17 @@ --- -# Upload to Test PyPI on every push to main name: Publish to TestPyPI on: push: branches: [main] - workflow_call: - inputs: - package-name: - description: The name of the package to use to gate uploads. - required: true - type: string concurrency: group: pypi -env: - REPO_NAME: tektronix/${{ inputs.package-name || 'tm_devices' }} - PACKAGE_NAME: ${{ inputs.package-name || 'tm_devices' }} jobs: - job-variables: - name: Set variables for other jobs to use - runs-on: ubuntu-latest - steps: - - name: Set variables - id: variables - run: echo "repo-name=$REPO_NAME" >> $GITHUB_OUTPUT - outputs: - repo-name: ${{ steps.variables.outputs.repo-name }} - test-pypi-build: - name: Build package with unique version for test.pypi.org - needs: [job-variables] - if: github.repository == needs.job-variables.outputs.repo-name - runs-on: ubuntu-latest + package-testpypi: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-package-testpypi.yml@v1.0.0 + with: + package-name: tm_devices + repo-name: tektronix/tm_devices permissions: + contents: read id-token: write attestations: write - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x - check-latest: true - - name: Install workflow dependencies - run: pip install -r scripts/requirements.txt - - name: Create unique package version - id: create-version - run: | - CURRENT_VERSION=$(python scripts/pypi_latest_version.py --package="$PACKAGE_NAME" --index=test.pypi) - echo CURRENT_VERSION: $CURRENT_VERSION - NEW_VERSION=$(python scripts/create_post_version_for_testpypi.py --version=$CURRENT_VERSION) - echo NEW_VERSION: $NEW_VERSION - python scripts/project_version.py --set-version=$NEW_VERSION - echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT - - name: Build package - uses: hynek/build-and-inspect-python-package@v2.8.0 - with: - attest-build-provenance-github: 'true' - outputs: - built-version: ${{ steps.create-version.outputs.NEW_VERSION }} - test-pypi-upload: - name: Upload package to test.pypi.org - needs: [job-variables, test-pypi-build] - if: github.repository == needs.job-variables.outputs.repo-name - runs-on: ubuntu-latest - environment: package-testpypi - permissions: - id-token: write - steps: - - name: Download built packages - uses: actions/download-artifact@v4 - with: - name: Packages - path: dist - - name: Upload package to Test PyPI - uses: pypa/gh-action-pypi-publish@v1.9.0 - with: - repository-url: https://test.pypi.org/legacy/ - test-pypi-install: - name: Install package from test.pypi.org - needs: [job-variables, test-pypi-build, test-pypi-upload] - if: github.repository == needs.job-variables.outputs.repo-name - runs-on: ubuntu-latest - steps: - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x - check-latest: true - - name: Test installing from test.pypi.org - # A retry is used to allow for some downtime before the package is installable - uses: nick-fields/retry@v3 - with: - timeout_minutes: 10 - max_attempts: 5 - retry_wait_seconds: 30 - warning_on_retry: false - command: pip install --index-url=https://test.pypi.org/simple/ --extra-index-url=https://pypi.org/simple - "$PACKAGE_NAME==${{ needs.test-pypi-build.outputs.built-version }}" diff --git a/.github/workflows/publish-api-comparison.yml b/.github/workflows/publish-api-comparison.yml index 12b5c062..13de9bcb 100644 --- a/.github/workflows/publish-api-comparison.yml +++ b/.github/workflows/publish-api-comparison.yml @@ -4,83 +4,9 @@ on: workflow_run: workflows: [Check Public API for Breaking Changes] types: [completed] - workflow_call: jobs: - publish-test-results: - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.event == 'pull_request' && !contains(fromJSON('["skipped", "cancelled", "failed"]'), github.event.workflow_run.conclusion) }} + publish-api-comparison: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-publish-api-comparison.yml@v1.0.0 permissions: checks: write pull-requests: write - steps: - - name: Download and Extract Artifacts - uses: dawidd6/action-download-artifact@v6 - with: - run_id: ${{ github.event.workflow_run.id }} - name: breaking_changes - path: artifacts - - name: Check for breaking changes - run: | - if grep -Pzl '\n```\n```' artifacts/breaking_changes.md; then - echo "BREAKING_CHANGES=false" >> $GITHUB_ENV - else - echo "BREAKING_CHANGES=true" >> $GITHUB_ENV - fi - - name: Fetch PR number - id: pr - uses: actions/github-script@v7 - with: - script: | - const maxAttempts = 5; - let attempt = 0; - let pullRequestNumber; - while (attempt < maxAttempts) { - try { - const response = await github.rest.search.issuesAndPullRequests({ - q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}', - per_page: 1, - }); - const items = response.data.items; - if (items.length < 1) { - throw new Error('No PRs found'); - } - pullRequestNumber = items[0].number; - console.info("Pull request number is", pullRequestNumber); - break; // Exit loop on success - } catch (error) { - console.error(`Attempt ${attempt + 1} failed:`, error.message); - if (attempt < maxAttempts - 1) { // Check if not last attempt - console.log(`Waiting for 2 minutes before retrying...`); - await new Promise(resolve => setTimeout(resolve, 120000)); // Wait for 2 minutes - } - } - attempt++; - } - if (!pullRequestNumber) { - core.setFailed("Failed to fetch PR number after 5 attempts"); - } - return pullRequestNumber; - - name: Publish API Breaking Changes Check Results - uses: marocchino/sticky-pull-request-comment@v2 - if: ${{ env.BREAKING_CHANGES == 'true' }} - with: - header: breaking-api-changes - number: ${{ steps.pr.outputs.result }} - recreate: true - path: artifacts/breaking_changes.md - - name: Add workflow link to comment - if: ${{ env.BREAKING_CHANGES == 'true' }} - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: breaking-api-changes - number: ${{ steps.pr.outputs.result }} - append: true - message: |- -

Link to workflow run

- - name: Delete comment if no breaking changes are found - if: ${{ env.BREAKING_CHANGES == 'false' }} - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: breaking-api-changes - number: ${{ steps.pr.outputs.result }} - delete: true diff --git a/.github/workflows/publish-test-results.yml b/.github/workflows/publish-test-results.yml index 41c5fbe3..e8413528 100644 --- a/.github/workflows/publish-test-results.yml +++ b/.github/workflows/publish-test-results.yml @@ -4,71 +4,11 @@ on: workflow_run: workflows: [Test code] types: [completed] - workflow_call: jobs: publish-test-results: - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.event == 'pull_request' && !contains(fromJSON('["skipped", "cancelled"]'), github.event.workflow_run.conclusion) }} - strategy: - fail-fast: false - matrix: - os_name: [ubuntu, windows, macos] + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-publish-test-results.yml@v1.0.0 + with: + operating-systems-array: '["ubuntu", "windows", "macos"]' permissions: checks: write pull-requests: write - steps: - - name: Download and Extract Artifacts - uses: dawidd6/action-download-artifact@v6 - with: - run_id: ${{ github.event.workflow_run.id }} - name: artifact_${{ matrix.os_name }}_tests - path: artifacts - - name: Fetch PR number - id: pr - uses: actions/github-script@v7 - with: - script: | - const maxAttempts = 5; - let attempt = 0; - let pullRequestNumber; - while (attempt < maxAttempts) { - try { - const response = await github.rest.search.issuesAndPullRequests({ - q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}', - per_page: 1, - }); - const items = response.data.items; - if (items.length < 1) { - throw new Error('No PRs found'); - } - pullRequestNumber = items[0].number; - console.info("Pull request number is", pullRequestNumber); - break; // Exit loop on success - } catch (error) { - console.error(`Attempt ${attempt + 1} failed:`, error.message); - if (attempt < maxAttempts - 1) { // Check if not last attempt - console.log(`Waiting for 2 minutes before retrying...`); - await new Promise(resolve => setTimeout(resolve, 120000)); // Wait for 2 minutes - } - } - attempt++; - } - if (!pullRequestNumber) { - core.setFailed("Failed to fetch PR number after 5 attempts"); - } - return pullRequestNumber; - - name: Publish Test Results - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: test-results-${{ matrix.os_name }} - number: ${{ steps.pr.outputs.result }} - recreate: true - path: artifacts/.results_tests/github_report.md - - name: Add workflow link to comment - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: test-results-${{ matrix.os_name }} - number: ${{ steps.pr.outputs.result }} - append: true - message: |- -

Link to workflow run

diff --git a/.github/workflows/sbom-scan.yml b/.github/workflows/sbom-scan.yml index 0fc093f5..bbd9a355 100644 --- a/.github/workflows/sbom-scan.yml +++ b/.github/workflows/sbom-scan.yml @@ -7,48 +7,11 @@ on: branches: [main] release: types: [published] - workflow_call: jobs: - create-and-scan-sbom: - runs-on: ubuntu-latest + sbom-scan: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-sbom-scan.yml@v1.0.0 permissions: security-events: write contents: write id-token: write attestations: write - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: x # any version - - name: Create lockfile - run: | - pip install poetry - poetry lock - - name: Create SBOM - uses: anchore/sbom-action@v0 - with: - format: spdx-json - output-file: ${{ github.event.repository.name }}-sbom.spdx.json - - uses: actions/attest-build-provenance@v1 - if: ${{ !(github.event.pull_request.head.repo.fork || github.event.workflow_call.pull_request.head.repo.fork) && github.actor != 'dependabot[bot]' }} - with: - subject-path: ${{ github.event.repository.name }}-sbom.spdx.json - - name: Scan SBOM - uses: anchore/scan-action@v4 - id: scan - with: - sbom: ${{ github.event.repository.name }}-sbom.spdx.json - fail-build: true - severity-cutoff: low - - name: Upload SBOM scan SARIF report as a workflow artifact - uses: actions/upload-artifact@v4 - with: - name: sarif_artifact - path: ${{ steps.scan.outputs.sarif }} - if-no-files-found: error - - name: Upload SBOM scan SARIF report to GitHub UI Security tab - if: ${{ github.event_name != 'pull_request' }} - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: ${{ steps.scan.outputs.sarif }} diff --git a/.github/workflows/tek-repo-lint.yml b/.github/workflows/tek-repo-lint.yml deleted file mode 100644 index 3f121da4..00000000 --- a/.github/workflows/tek-repo-lint.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- -name: tek-repo-lint -on: - push: - branches: [main] - pull_request: - branches: [main] - workflow_dispatch: - workflow_call: -# IMPORTANT: Any new jobs need to be added to the check-repo-lint-passed job to ensure they correctly gate code changes -jobs: - enforce-community-standards: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - filename: - - .github/CODEOWNERS - - README.@(md|rst) - - CODE_OF_CONDUCT.@(md|rst) - - CONTRIBUTING.@(md|rst) - - LICENSE.@(md|rst) - - SECURITY.@(md|rst) - - .github/ISSUE_TEMPLATE/bug_report.yml - - .github/ISSUE_TEMPLATE/feature_request.yml - - .github/PULL_REQUEST_TEMPLATE.md - - .github/dependabot.yml - - .github/workflows/codeql-analysis.yml - steps: - - uses: actions/checkout@v4 - - name: Ensure ${{ matrix.filename }} exists - uses: andstor/file-existence-action@v3 - with: - files: ${{ matrix.filename }} - ignore_case: false - follow_symbolic_links: false - fail: true # Set the step to fail if the file doesn't exist - # Check that all jobs passed - check-repo-lint-passed: - if: ${{ !cancelled() }} - needs: [enforce-community-standards] - runs-on: ubuntu-latest - steps: - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 - with: - jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/test-code.yml b/.github/workflows/test-code.yml index d3826123..748ae64d 100644 --- a/.github/workflows/test-code.yml +++ b/.github/workflows/test-code.yml @@ -5,118 +5,16 @@ on: branches: [main] pull_request: branches: [main] - workflow_call: - inputs: - repo-name: - description: The name of the repository to use to gate Codecov uploads. - required: true - type: string -# Cancel running jobs for the same workflow and branch. concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} -# IMPORTANT: Any new jobs need to be added to the check-tests-passed job to ensure they correctly gate code changes jobs: - # Basic testing & linting - test-general: - runs-on: ${{ matrix.platform }} - strategy: - fail-fast: false - matrix: - platform: [ubuntu-latest, windows-latest, macos-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] # when updating this, make sure to update all workflows that use this strategy - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - check-latest: true - - name: Install dependencies - run: python -m pip install tox tox-gh-actions - - name: Run tox - run: tox -v - - uses: actions/upload-artifact@v4 - if: ${{ !cancelled() }} - with: - name: artifact_${{ matrix.platform }}_${{ matrix.python-version }}_tests_and_linting - path: | - .results_*/** - .coverage* - # Quick testing with coverage (no linting) - test-fast: - runs-on: ${{ matrix.os_name }}-latest - env: - REPO_NAME: tektronix/${{ inputs.repo-name || 'tm_devices' }} - pytest_report_title: Test Results (${{ matrix.os_name }}) - strategy: - fail-fast: false - matrix: - os_name: [ubuntu, windows, macos] - steps: - - uses: actions/checkout@v4 - - if: ${{ inputs.node-version }} - uses: actions/setup-node@v4 - with: - node-version: ${{ inputs.node-version }} - - if: ${{ inputs.node-version }} - name: Install non-python documentation dependencies - run: | - npm install -g @mermaid-js/mermaid-cli - sudo apt install --no-install-recommends --assume-yes graphviz - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x # any version - check-latest: true - - name: Install tox - run: python -m pip install tox - - name: Test - run: tox -ve tests - - uses: actions/upload-artifact@v4 - if: ${{ !cancelled() }} - with: - name: artifact_${{ matrix.os_name }}_tests - path: | - .results_*/** - .coverage* - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4.5.0 - if: ${{ github.repository == env.REPO_NAME && !cancelled() }} - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./.coverage_tests.xml - name: codecov-${{ matrix.os_name }} - fail_ci_if_error: true - verbose: true - create-job-summary: - name: Test Results - if: ${{ !cancelled() }} - needs: test-fast - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/download-artifact@v4 - with: - path: artifacts - - name: Generate Summary - uses: phoenix-actions/test-reporting@v15 - with: - name: Test Results - only-summary: false - output-to: step-summary - path: artifacts/**/.results_tests/results.xml - reporter: java-junit - fail-on-error: false - max-annotations: 0 - # Check that all jobs passed - check-tests-passed: - if: ${{ !cancelled() }} - needs: [test-general, test-fast, create-job-summary] - runs-on: ubuntu-latest - steps: - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 - with: - jobs: ${{ toJSON(needs) }} + test-code: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-test-code.yml@v1.0.0 + with: + repo-name: tektronix/tm_devices + operating-systems-array: '["ubuntu", "windows", "macos"]' + python-versions-array: '["3.8", "3.9", "3.10", "3.11", "3.12"]' # when updating this, make sure to update all workflows that use this strategy + upload-to-codecov: true + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/test-docs.yml b/.github/workflows/test-docs.yml index 038e5ba2..da2fd59e 100644 --- a/.github/workflows/test-docs.yml +++ b/.github/workflows/test-docs.yml @@ -5,67 +5,13 @@ on: branches: [main] pull_request: branches: [main] - workflow_call: # In order to use this workflow, the caller must have a tox configuration that includes both a "docs" and "doctests" environment. - inputs: - node-version: - description: The version of Node.js to install. This workflow also installs - mermaid-cli via npm and graphviz via apt. - required: true - type: number - python-version: - description: The version of Python to use for this workflow. - required: true - type: string -# Cancel running jobs for the same workflow and branch. concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} -# IMPORTANT: Any new jobs need to be added to the check-docs-passed job to ensure they correctly gate code changes jobs: test-docs: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - tox_env: [docs, doctests] - env: - NODE_VERSION: ${{ inputs.node-version || 20 }} # The node version needs to stay in sync with .readthedocs.yml - PYTHON_VERSION: ${{ inputs.node-version || '3.11' }} # This needs to stay in sync with .readthedocs.yml and the tox config in pyproject.toml - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - - name: Install non-python documentation dependencies - run: | - npm install --global @mermaid-js/mermaid-cli - sudo apt install --no-install-recommends --assume-yes graphviz - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Install tox - run: python -m pip install tox - - name: Test - run: tox -ve ${{ matrix.tox_env }} - - uses: thedoctor0/zip-release@0.7.6 - if: ${{ !cancelled() }} - with: - type: zip - filename: ${{ matrix.tox_env }}_results.zip - path: .results_${{ matrix.tox_env }}/ - - uses: actions/upload-artifact@v4 - if: ${{ !cancelled() }} - with: - name: artifact_${{ matrix.tox_env }} - path: ${{ matrix.tox_env }}_results.zip - # Check that all jobs passed - check-docs-passed: - if: ${{ !cancelled() }} - needs: [test-docs] - runs-on: ubuntu-latest - steps: - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 - with: - jobs: ${{ toJSON(needs) }} + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-test-docs.yml@v1.0.0 + with: + node-version: 20 # The node version needs to stay in sync with .readthedocs.yml + python-version: '3.11' # This needs to stay in sync with .readthedocs.yml and the tox config in pyproject.toml + tox-env-array: '["docs", "doctests"]' diff --git a/.github/workflows/update-python-and-pre-commit-dependencies.yml b/.github/workflows/update-python-and-pre-commit-dependencies.yml index a68551ea..1cfb7d92 100644 --- a/.github/workflows/update-python-and-pre-commit-dependencies.yml +++ b/.github/workflows/update-python-and-pre-commit-dependencies.yml @@ -3,49 +3,20 @@ name: Update python linting dependencies in-sync with pre-commit on: pull_request: branches: [main] - workflow_call: - inputs: - pre-commit-hook-skip-list: - description: A comma-separated list of pre-commit hooks to skip during this - job. - required: false - type: string jobs: - update-python-and-pre-commit-deps: - name: Update python linters and pre-commit dependencies - runs-on: ubuntu-latest - if: ${{ github.actor == 'dependabot[bot]' && contains(github.head_ref, '/pip/') }} + update-python-and-pre-commit-dependencies: + uses: tektronix/python-package-ci-cd/.github/workflows/_reusable-update-python-and-pre-commit-dependencies.yml@v1.0.0 + with: + commit-user-name: ${{ vars.TEK_OPENSOURCE_NAME }} + commit-user-email: ${{ vars.TEK_OPENSOURCE_EMAIL }} + dependency-dict: '{"dev": ["pylint", "pyright"], "tests": ["ruff"]}' + update-pre-commit: true + run-pre-commit: true + pre-commit-hook-skip-list: pylint,pyright,pyright-verifytypes,pyroma,poetry-audit + export-dependency-groups: docs,tests permissions: contents: write - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.head_ref }} - token: ${{ secrets.TEK_OPENSOURCE_TOKEN }} - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: x # any version - check-latest: true - - name: Install workflow dependencies - run: pip install -r scripts/requirements.txt - - uses: crazy-max/ghaction-import-gpg@v6 - with: - gpg_private_key: ${{ secrets.TEK_OPENSOURCE_GPG_SIGNING_KEY_PRIVATE }} - passphrase: ${{ secrets.TEK_OPENSOURCE_GPG_SIGNING_KEY_PASSPHRASE }} - git_user_signingkey: true - git_commit_gpgsign: true - - name: Run updater script - run: python scripts/update_development_dependencies.py --no-install - - name: Run pre-commit - continue-on-error: true - env: - SKIP: ${{ inputs.pre-commit-hook-skip-list || 'pylint,pyright,pyright-verifytypes,pyroma,poetry-audit' }} - run: python -m pre_commit run --all - - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_message: 'ci: Update python linters and pre-commit dependencies.' - commit_user_name: ${{ vars.TEK_OPENSOURCE_NAME }} - commit_user_email: ${{ vars.TEK_OPENSOURCE_EMAIL }} - commit_author: ${{ vars.TEK_OPENSOURCE_NAME }} <${{ vars.TEK_OPENSOURCE_EMAIL }}> + secrets: + checkout-token: ${{ secrets.TEK_OPENSOURCE_TOKEN }} + gpg-signing-key-private: ${{ secrets.TEK_OPENSOURCE_GPG_SIGNING_KEY_PRIVATE }} + gpg-signing-key-passphrase: ${{ secrets.TEK_OPENSOURCE_GPG_SIGNING_KEY_PASSPHRASE }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a557b518..4d66886d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: remove-tabs - id: forbid-tabs - repo: https://github.com/python-jsonschema/check-jsonschema - rev: e2dde74d0702d15f4f43e4f4fb93e301b4bc1e30 # frozen: 0.29.1 + rev: 5c70e3d884fdbe99af42f2714a444e39f321498d # frozen: 0.29.2 hooks: - id: check-readthedocs - id: check-dependabot @@ -59,11 +59,9 @@ repos: additional_dependencies: [black==24.4.2] # This may need to be updated/removed in the future once ruff supports formatting python code blocks in markdown args: [--line-length=100] - repo: https://github.com/lyz-code/yamlfix - rev: 47039c9bf8039e81f092c9777a1bc8be32fb7870 # frozen: 1.16.0 + rev: 8072181c0f2eab9f2dd8db2eb3b9556d7cd0bd74 # frozen: 1.17.0 hooks: - id: yamlfix - additional_dependencies: - - maison<2.0.0 # TODO: get this working # - repo: https://github.com/motet-a/jinjalint # rev: "0.5" @@ -141,7 +139,7 @@ repos: always_run: true args: [audit, --json, --ignore-code=CVE-2019-8341] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 8b5112a3b2ad121439a2092f8ff548c0d80f2514 # frozen: v0.6.1 + rev: 24d039e647a08707e6cb31e75e01844eeff925e7 # frozen: v0.6.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fc5bf474..e386dde7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -180,26 +180,6 @@ Ready to contribute? Here's how to set up `tm_devices` for local development. [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) website for more details on this format. -## Updating requirements and commit hooks - -To update the required python packages and commit hooks run the following -commands: - -!!! note - Always remember to activate the virtual environment before attempting to run tests or other code. - - ```console - # Linux - source .venv/bin/activate - - # Windows - .venv\Scripts\activate.bat - ``` - -```console -python scripts/update_development_dependencies.py -``` - ## Pull Request Guidelines Before you submit a pull request, check that it meets these guidelines: diff --git a/README.md b/README.md index 8d4c8919..0fb7d296 100644 --- a/README.md +++ b/README.md @@ -81,59 +81,59 @@ _Device Support Levels_ | Type | Series/Model | Command
Type | Basic
Control | Python API
Validation
Status | | ------ | ------------------------ | --------------- | ---------------- | ---------------------------------- | -| AFGs | **AFG3000** | PI | ✅ | | -| | **AFG31xxx** | PI | ✅ | | -| AWGs | **AWG5000** | PI | ✅ | | -| | **AWG5200** | PI | ✅ | | -| | **AWG7000** | PI | ✅ | ✅ | -| | **AWG70000** | PI | ✅ | ✅ | -| Scopes | **2 Series MSO** | PI | ✅ | ✅ | -| | **3 Series MDO** | PI | ✅ | | -| | **4 Series MSO** | PI | ✅ | ✅ | -| | **4 Series B MSO** | PI | ✅ | ✅ | -| | **5 Series MSO** | PI | ✅ | ✅ | -| | **5 Series B MSO** | PI | ✅ | ✅ | -| | **5 Series MSO (LP)** | PI | ✅ | ✅ | -| | **6 Series MSO** | PI | ✅ | ✅ | -| | **6 Series B MSO** | PI | ✅ | ✅ | -| | **6 Series LPD** | PI | ✅ | ✅ | -| | **MSO2000/B** | PI | ✅ | | -| | **DPO2000/B** | PI | ✅ | | -| | **MDO3000** | PI | ✅ | ✅ | -| | **MDO4000/B/C** | PI | ✅ | ✅ | -| | **MSO4000/B** | PI | ✅ | ✅ | -| | **DPO4000/B** | PI | ✅ | ✅ | -| | **MSO5000/B** | PI | ✅ | ✅ | -| | **DPO5000/B** | PI | ✅ | ✅ | -| | **DPO7000/C** | PI | ✅ | ✅ | -| | **DPO70000/C/D/DX/SX** | PI | ✅ | ✅ | -| | **DSA70000/C/D** | PI | ✅ | ✅ | -| | **MSO70000/C/DX** | PI | ✅ | ✅ | -| | **TSOVu** | PI | ✅ | | -| | **TekScope** | PI | ✅ | ✅ | -| PSUs | **2200** | PI | ✅ | | -| | **2220** | PI | ✅ | | -| | **2230** | PI | ✅ | | -| | **2231** | PI | ✅ | | -| | **2280S** | PI | ✅ | | -| | **2281S** | PI | ✅ | | -| SMUs | **24xx Standard** | PI | ✅ | | -| | **24xx Interactive** | TSP | ✅ | ✅ | -| | **26xxB** | TSP | ✅ | 🚧 | -| | **2636B** | TSP | ✅ | ✅ | -| | **Model 2601B-PULSE** | TSP | ✅ | | -| | **Model 2606B** | TSP | ✅ | 🚧 | -| | **2651A** | TSP | ✅ | ✅ | -| | **2657A** | TSP | ✅ | | -| | **6430 (electrometer)** | PI | ✅ | | -| | **6514 (electrometer)** | PI | ✅ | | -| | **6517B (electrometer)** | PI | ✅ | | -| MTs | **TMT4** | API | ✅ | | -| DMMs | **DMM6500** | TSP | ✅ | | -| | **DMM7510** | TSP | ✅ | | -| | **DMM7512** | TSP | ✅ | | -| DAQs | **DAQ6510** | TSP | ✅ | | -| SSs | **3706A** | TSP | ✅ | | +| AFGs | **AFG3000** | PI | ✅ | | +| | **AFG31xxx** | PI | ✅ | | +| AWGs | **AWG5000** | PI | ✅ | | +| | **AWG5200** | PI | ✅ | | +| | **AWG7000** | PI | ✅ | ✅ | +| | **AWG70000** | PI | ✅ | ✅ | +| Scopes | **2 Series MSO** | PI | ✅ | ✅ | +| | **3 Series MDO** | PI | ✅ | | +| | **4 Series MSO** | PI | ✅ | ✅ | +| | **4 Series B MSO** | PI | ✅ | ✅ | +| | **5 Series MSO** | PI | ✅ | ✅ | +| | **5 Series B MSO** | PI | ✅ | ✅ | +| | **5 Series MSO (LP)** | PI | ✅ | ✅ | +| | **6 Series MSO** | PI | ✅ | ✅ | +| | **6 Series B MSO** | PI | ✅ | ✅ | +| | **6 Series LPD** | PI | ✅ | ✅ | +| | **MSO2000/B** | PI | ✅ | | +| | **DPO2000/B** | PI | ✅ | | +| | **MDO3000** | PI | ✅ | ✅ | +| | **MDO4000/B/C** | PI | ✅ | ✅ | +| | **MSO4000/B** | PI | ✅ | ✅ | +| | **DPO4000/B** | PI | ✅ | ✅ | +| | **MSO5000/B** | PI | ✅ | ✅ | +| | **DPO5000/B** | PI | ✅ | ✅ | +| | **DPO7000/C** | PI | ✅ | ✅ | +| | **DPO70000/C/D/DX/SX** | PI | ✅ | ✅ | +| | **DSA70000/C/D** | PI | ✅ | ✅ | +| | **MSO70000/C/DX** | PI | ✅ | ✅ | +| | **TSOVu** | PI | ✅ | | +| | **TekScope** | PI | ✅ | ✅ | +| PSUs | **2200** | PI | ✅ | | +| | **2220** | PI | ✅ | | +| | **2230** | PI | ✅ | | +| | **2231** | PI | ✅ | | +| | **2280S** | PI | ✅ | | +| | **2281S** | PI | ✅ | | +| SMUs | **24xx Standard** | PI | ✅ | | +| | **24xx Interactive** | TSP | ✅ | ✅ | +| | **26xxB** | TSP | ✅ | 🚧 | +| | **2636B** | TSP | ✅ | ✅ | +| | **Model 2601B-PULSE** | TSP | ✅ | | +| | **Model 2606B** | TSP | ✅ | 🚧 | +| | **2651A** | TSP | ✅ | ✅ | +| | **2657A** | TSP | ✅ | | +| | **6430 (electrometer)** | PI | ✅ | | +| | **6514 (electrometer)** | PI | ✅ | | +| | **6517B (electrometer)** | PI | ✅ | | +| MTs | **TMT4** | API | ✅ | | +| DMMs | **DMM6500** | TSP | ✅ | | +| | **DMM7510** | TSP | ✅ | | +| | **DMM7512** | TSP | ✅ | | +| DAQs | **DAQ6510** | TSP | ✅ | | +| SSs | **3706A** | TSP | ✅ | | @@ -147,8 +147,8 @@ _Software Solution Support Levels_ | Software
Solution | Command
Type | Basic
Control | Python API
Validation
Status | | -------------------- | --------------- | ---------------- | ---------------------------------- | -| DPOJET | PI | ✅ | ✅ | -| SourceXpress | PI | ✅ | ✅ | +| DPOJET | PI | ✅ | ✅ | +| SourceXpress | PI | ✅ | ✅ | diff --git a/pyproject.toml b/pyproject.toml index 8127aae6..ec1ebba1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,15 +92,8 @@ urllib3 = "^2.0" zeroconf = "^0.132.2" [tool.poetry.group.dev.dependencies] -docformatter = {extras = ["tomli"], version = "1.7.5"} docutils = "^0.20" # TODO: remove this when the minimum Python version is >=3.9 docutils-stubs = "^0.0.22" -maison = "^1.4.3" # yamlfix is broken with v2.0+ -matplotlib = [ - {python = ">=3.9", version = "^3.8"}, - {python = "3.8", version = "^3.7"} -] -memory-profiler = "^0.61.0" nodeenv = "^1.9.1" pip = "^24.0" poetry = "^1.8.0" @@ -111,21 +104,15 @@ pre-commit = [ {python = ">=3.9", version = "^3.7"}, {python = "3.8", version = "^3.5"} ] -pyclean = "^3.0.0" pylint = "3.2.6" pyright = "1.1.376" pyroma = "^4.2" -python-semantic-release = "^9.6.0" -ruff = "0.6.1" -toml-sort = "^0.23.0" tox = "^4.0" tox-gh-actions = "^3.1.0" twine = "^5.0.0" types-python-dateutil = "^2.9" types-pyyaml = "^6.0" types-requests = "^2.31" -wheel = "^0.44" -yamlfix = "^1.16.0" [tool.poetry.group.docs.dependencies] black = "^24.4.2" @@ -161,7 +148,7 @@ pytest-env = "^1.1.3" pytest-github-report = "^0.0.1" pytest-html = "^4.1.1" pytest-order = "^1.2.1" -ruff = "0.6.1" +ruff = "0.6.2" tomli = "^2.0.1" [tool.poetry.scripts] @@ -440,8 +427,8 @@ passenv = PYRIGHT_PYTHON_GLOBAL_NODE # If set outside of tox, this will cause python-pyright to use nodeenv to install node rather than use the system node setenv = DOC_PYTHON_VERSION = python3.11 # Keep this in sync with .readthedocs.yml and any CI scripts - # Skip pre-commit checks that are not needed - SKIP = file-contents-sorter + # Skip pre-commit checks that are not needed (yamlfix should be removed from this list once Python 3.8 support is dropped) + SKIP = file-contents-sorter,yamlfix commands_pre = poetry install --no-root --without=main commands = diff --git a/python_semantic_release_templates/.release_notes.md.j2 b/python_semantic_release_templates/.release_notes.md.j2 index 86b17467..8d0767fe 100644 --- a/python_semantic_release_templates/.release_notes.md.j2 +++ b/python_semantic_release_templates/.release_notes.md.j2 @@ -1,7 +1,9 @@ {%- import ".macros.j2" as macros %} {%- call(output) macros.populate_variables() %} - {%- filter replace("Things to be included in the next release go here.", "# " + output[0] + " (" + output[1] + ")" + output[2])|replace("##", "#") %} - {%- include ".previous_release_notes_for_template.md" %} + {%- filter replace("## Unreleased", "# " + output[0] + " (" + output[1] + ")" + output[2])|replace("##", "#") %} + {%- filter replace("Things to be included in the next release go here.\n\n", "") %} + {%- include ".previous_release_notes_for_template.md" %} + {%- endfilter %} {%- endfilter %} {% endcall %} diff --git a/python_semantic_release_templates/CHANGELOG.md.j2 b/python_semantic_release_templates/CHANGELOG.md.j2 index 318ce170..73bf2c1e 100644 --- a/python_semantic_release_templates/CHANGELOG.md.j2 +++ b/python_semantic_release_templates/CHANGELOG.md.j2 @@ -1,7 +1,9 @@ {%- import ".macros.j2" as macros %} {%- call(output) macros.populate_variables() %} - {%- filter replace("Things to be included in the next release go here.", "Things to be included in the next release go here.\n\n---\n\n## " + output[0] + " (" + output[1] + ")" + output[2]) %} - {%- include ".previous_changelog_for_template.md" %} + {%- filter replace("## Unreleased", "## Unreleased\n\nThings to be included in the next release go here.\n\n---\n\n## " + output[0] + " (" + output[1] + ")" + output[2]) %} + {%- filter replace("Things to be included in the next release go here.\n\n", "") %} + {%- include ".previous_changelog_for_template.md" %} + {%- endfilter %} {%- endfilter %} {% endcall %} diff --git a/scripts/check_unreleased_changelog_items.py b/scripts/check_unreleased_changelog_items.py deleted file mode 100644 index e69d7617..00000000 --- a/scripts/check_unreleased_changelog_items.py +++ /dev/null @@ -1,70 +0,0 @@ -"""This script will check for unreleased entries in the CHANGELOG.md file. - -It will exit with a non-zero exit code if there are no unreleased entries. - -It will also copy the necessary files into the template directory to properly render the CHANGELOG -and Release Notes. -""" - -import pathlib -import re -import shutil - -CHANGELOG_FILEPATH = pathlib.Path(__file__).parent.parent / "CHANGELOG.md" -TEMPLATE_CHANGELOG_FILEPATH = ( - pathlib.Path(__file__).parent.parent - / "python_semantic_release_templates" - / ".previous_changelog_for_template.md" -) -TEMPLATE_RELEASE_NOTES_FILEPATH = ( - pathlib.Path(__file__).parent.parent - / "python_semantic_release_templates" - / ".previous_release_notes_for_template.md" -) - - -def main() -> None: - """Check for entries in the Unreleased section of the CHANGELOG.md file. - - Raises: - SystemExit: Indicates no new entries were found. - """ - release_notes_content = "" - found_entries = False - with CHANGELOG_FILEPATH.open(mode="r", encoding="utf-8") as changelog_file: - tracking_unreleased = False - tracking_entries = False - for line in changelog_file: - if line.startswith(("___", "---")): - tracking_unreleased = False - tracking_entries = False - if tracking_unreleased: - release_notes_content += line - if line.startswith("## Unreleased"): - tracking_unreleased = True - if tracking_unreleased and line.startswith( - ( - "### Added\n", - "### Changed\n", - "### Deprecated\n", - "### Removed\n", - "### Fixed\n", - "### Security\n", - ) - ): - tracking_entries = True - if tracking_entries and not found_entries: - found_entries = bool(re.match(r"^- \w+", line)) - - if not found_entries: - msg = f"No unreleased entries were found in {CHANGELOG_FILEPATH}." - raise SystemExit(msg) - - # Copy the files to the correct location - shutil.copy(CHANGELOG_FILEPATH, TEMPLATE_CHANGELOG_FILEPATH) - with TEMPLATE_RELEASE_NOTES_FILEPATH.open("w", encoding="utf-8") as template_release_notes: - template_release_notes.write(release_notes_content.strip() + "\n") - - -if __name__ == "__main__": - main() diff --git a/scripts/contributor_setup.py b/scripts/contributor_setup.py index 42d5ddae..eb16dbcf 100644 --- a/scripts/contributor_setup.py +++ b/scripts/contributor_setup.py @@ -5,90 +5,38 @@ from __future__ import annotations -import argparse import glob import os import platform import shlex -import shutil import subprocess import sys from pathlib import Path -from typing import Dict, List, Optional, Union RUNNING_ON_LINUX = platform.system().upper() != "WINDOWS" +RUNNING_IN_VIRTUALENV = sys.prefix != sys.base_prefix -def parse_arguments() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - The parsed Namespace. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--reset", - action="store_true", - dest="reset", - help="Specify if the virtual environment should be completely reset", - ) - - return parser.parse_args() - - -def running_in_virtualenv() -> bool: - """Check if the current script is being executed in a virtual environment. - - Returns: - Boolean indicating if the script is running in a virtualenv - """ - return sys.prefix != sys.base_prefix - - -def create_virtual_environment( - virtual_env_dir: Union[str, os.PathLike[str]], reset_env: bool -) -> None: +def create_virtual_environment(virtual_env_dir: str | os.PathLike[str]) -> None: """Create a virtual environment. Args: virtual_env_dir: The directory where the virtual environment should be created - reset_env: Indicate if the virtual environment should be completely reset """ - virtual_env_dir = Path(virtual_env_dir) - added_newline = False - if ( - reset_env - and virtual_env_dir.exists() - and not sys.prefix.startswith(str(virtual_env_dir.resolve())) - and not running_in_virtualenv() - ): - if not added_newline: - added_newline = True - print() - print(f"Removing virtualenv located at '{virtual_env_dir}'") - shutil.rmtree(virtual_env_dir) - if not virtual_env_dir.exists() and not running_in_virtualenv(): - if not added_newline: - print() - print(f"Creating virtualenv located at '{virtual_env_dir}'") - _run_cmd_in_subprocess(f"{sys.executable} -m venv {virtual_env_dir}") - - -def _run_cmd_in_subprocess(command: str, env_dict: Optional[Dict[str, str]] = None) -> None: + print(f"\nCreating virtualenv located at '{virtual_env_dir}'") + _run_cmd_in_subprocess(f"{sys.executable} -m venv {virtual_env_dir} --clear") + + +def _run_cmd_in_subprocess(command: str) -> None: """Run the given command in a subprocess. Args: command: The command string to send. - env_dict: A mapping of environment variables to use in the subprocess. """ command = command.replace("\\", "/") - if RUNNING_ON_LINUX: - command_to_send: Union[str, List[str]] = shlex.split(command) - else: - command_to_send = command print(f"\nExecuting command: {command}") - subprocess.check_call(command_to_send, env=env_dict) # noqa: S603 + subprocess.check_call(shlex.split(command)) # noqa: S603 def main() -> None: @@ -97,15 +45,17 @@ def main() -> None: Raises: SystemExit: Indicates that the setup failed for some reason. """ - starting_dir = os.getcwd() + starting_dir = Path.cwd() try: - args = parse_arguments() - if running_in_virtualenv(): + if RUNNING_IN_VIRTUALENV: raise IndexError - if sys.version_info < (3, 8): # noqa: UP036 + # This requires contributors to use newer versions of Python even + # though the package supports older versions. + if sys.version_info < (3, 9): msg = ( "Unable to set up the environment. " - "Please use a Python version greater than or equal to 3.8." + "Please use a Python version greater than 3.8 for " + "local development on this package." ) raise SystemExit(msg) # Windows systems require the 64 bit python @@ -113,9 +63,9 @@ def main() -> None: msg = "Unable to set up the environment. Please use a 64-bit Python version." raise SystemExit(msg) # Create the virtual environment - virtual_env_dir = os.path.sep.join([starting_dir, ".venv"]) - create_virtual_environment(virtual_env_dir, args.reset) - os.environ["VIRTUAL_ENV"] = virtual_env_dir + virtual_env_dir = starting_dir / ".venv" + create_virtual_environment(virtual_env_dir) + os.environ["VIRTUAL_ENV"] = virtual_env_dir.as_posix() # Delete the previous poetry lock file lock_file = Path(starting_dir) / "poetry.lock" @@ -137,7 +87,7 @@ def main() -> None: f"{python_executable} -m pip install -U pip wheel poetry", f"{python_executable} -m poetry install", f"{python_executable} -m nodeenv --python-virtualenv --clean-src", - f"{python_executable} -m pre_commit install", + f"{python_executable} -m pre_commit install --install-hooks", f"{python_executable} -m tox -e tests", ) for command in commands_to_send: diff --git a/scripts/create_post_version_for_testpypi.py b/scripts/create_post_version_for_testpypi.py deleted file mode 100644 index 3c3f530d..00000000 --- a/scripts/create_post_version_for_testpypi.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Create a post-release version for test.pypi.org.""" - -import argparse - -from poetry.core.constraints.version import Version - - -def parse_arguments() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - The parsed Namespace. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--version", - required=True, - type=Version.parse, - action="store", - dest="version", - help="Provide the current, latest version of the package", - ) - - return parser.parse_args() - - -def main() -> None: - """Create and print a post-release version string for test.pypi.org.""" - args = parse_arguments() - version: Version = args.version - - new_post_release_num = 1 - if version.post: - new_post_release_num += version.post.number - - print(f"{'.'.join(str(x) for x in version.parts)}.post{new_post_release_num}") - - -if __name__ == "__main__": - main() diff --git a/scripts/project_version.py b/scripts/project_version.py deleted file mode 100644 index 4bf7a17d..00000000 --- a/scripts/project_version.py +++ /dev/null @@ -1,54 +0,0 @@ -"""This script modifies or gets the current project version in the pyproject.toml file.""" - -import argparse -import pathlib - -import tomli -import tomli_w - -from poetry.core.constraints.version import Version - -PYPROJECT_FILE = pathlib.Path(f"{pathlib.Path(__file__).parent}/../pyproject.toml") - - -def parse_arguments() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - The parsed Namespace. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--set-version", - required=False, - type=Version.parse, - action="store", - dest="set_version", - help="Provide the version to write to the pyproject.toml file", - ) - - return parser.parse_args() - - -def main() -> None: - """Modify or get the project version.""" - args = parse_arguments() - new_version: Version = args.set_version - - # Read in the current data - with open(PYPROJECT_FILE, "rb") as file_handle: - pyproject_data = tomli.load(file_handle) - - if new_version: - # Modify the version value - pyproject_data["tool"]["poetry"]["version"] = new_version.to_string() - - # Write back the data to the file - with open(PYPROJECT_FILE, "wb") as file_handle: - tomli_w.dump(pyproject_data, file_handle) - else: - print(pyproject_data["tool"]["poetry"]["version"]) - - -if __name__ == "__main__": - main() diff --git a/scripts/pypi_latest_version.py b/scripts/pypi_latest_version.py deleted file mode 100644 index 907743f5..00000000 --- a/scripts/pypi_latest_version.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Get the latest version from the index server.""" - -import argparse -import json - -import requests - -from poetry.core.constraints.version import Version - - -def parse_arguments() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - The parsed Namespace. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--package", - required=True, - action="store", - dest="package", - help="Provide the package to get the latest version for", - ) - parser.add_argument( - "--index", - action="store", - dest="index", - choices=["pypi", "test.pypi"], - default="pypi", - help="Provide the index to query for the latest version, one of (pypi|test.pypi)", - ) - - return parser.parse_args() - - -def get_latest_version(package_name: str, index: str) -> str: - """Get the latest version of the provided package. - - Args: - package_name: The name of the package to get the latest version of. - index: The index to check for the package, one of (pypi|test.pypi). - - Returns: - A string containing the latest version of the package from the given index. - - Raises: - SystemExit: Indicates there were no versions for the package. - """ - # This code mirrors code found in src/tm_devices/helpers/functions.py, - # in the check_for_update() function. - # If this code is updated, the helper function should be updated too. - url = f"https://{index}.org/pypi/{package_name}/json" - try: - response = requests.get(url, timeout=10) - releases = json.loads(response.text)["releases"] - version_list = sorted(releases, key=Version.parse, reverse=True) - latest_version = version_list[0] - except (IndexError, json.decoder.JSONDecodeError) as error: - msg = f"There were no versions found for the {package_name} package." - raise SystemExit(msg) from error - - return latest_version - - -def main() -> None: - """Get the latest version of the provided package.""" - args = parse_arguments() - package = args.package - index = args.index - latest_version = get_latest_version(package, index) - print(latest_version) - - -if __name__ == "__main__": - main() diff --git a/scripts/requirements.txt b/scripts/requirements.txt deleted file mode 100644 index ef3ea208..00000000 --- a/scripts/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -maison<2.0.0 -poetry -poetry-core -poetry-plugin-export -pre-commit -requests -toml-sort -tomli -tomli_w -yamlfix diff --git a/scripts/update_development_dependencies.py b/scripts/update_development_dependencies.py deleted file mode 100644 index 3a714492..00000000 --- a/scripts/update_development_dependencies.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Update the development dependencies. - -This script will update the development dependencies that are pinned in the pyproject.toml and .pre- -commit-config.yaml files. -""" - -import argparse -import contextlib -import shlex -import subprocess -import sys -import warnings - -from pathlib import Path -from typing import List - -from yamlfix import fix_files # pyright: ignore[reportUnknownVariableType] - -from pypi_latest_version import get_latest_version - -DEPENDENCIES_TO_UPDATE = { - "dev": ( - "docformatter[tomli]", - "pylint", - "pyright", - "ruff", - ), - "tests": ("ruff",), -} - - -def parse_arguments() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - The parsed Namespace. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--no-install", - action="store_true", - dest="no_install", - help="Indicate if packages should not be installed via poetry (Primarily used in CI).", - ) - - return parser.parse_args() - - -def _run_cmd_in_subprocess(command: str) -> None: - """Run the given command in a subprocess. - - Args: - command: The command string to send. - """ - command = command.replace("\\", "/") - print(f"\nExecuting command: {command}") - subprocess.check_call(shlex.split(command)) # noqa: S603 - - -def main() -> None: - """Run the script to update the development dependencies.""" - script_location = Path(__file__) - python_executable = sys.executable - python_script_location = Path(python_executable).parent - repository_root_directory = script_location.parent.parent - - args = parse_arguments() - lock_only = args.no_install - - # Remove the dependencies from poetry to avoid issues if they are in multiple groups - for group, dependencies_list in DEPENDENCIES_TO_UPDATE.items(): - dependencies = " ".join(f'"{x.split("[", maxsplit=1)[0]}"' for x in dependencies_list) - _run_cmd_in_subprocess( - f'"{python_executable}" -m poetry remove --lock --group={group} {dependencies}' - ) - - # Get the latest versions for each of the dependencies to update - for group, dependencies_list in DEPENDENCIES_TO_UPDATE.items(): - latest_dependency_versions: List[str] = [] - for dependency in dependencies_list: - latest_dep_version = get_latest_version(dependency.split("[", maxsplit=1)[0], "pypi") - latest_dependency_versions.append(dependency + f"=={latest_dep_version}") - - # Update dependencies in pyproject.toml using poetry - dependencies = " ".join(f'"{x}"' for x in latest_dependency_versions) - poetry_add_cmd = f'"{python_executable}" -m poetry add --group={group} {dependencies}' - if lock_only: - poetry_add_cmd += " --lock" - _run_cmd_in_subprocess(poetry_add_cmd) - - # Run poetry update - poetry_update_cmd = f'"{python_executable}" -m poetry update' - if lock_only: - poetry_update_cmd += " --lock" - _run_cmd_in_subprocess(poetry_update_cmd) - - # Update pre-commit config file - _run_cmd_in_subprocess(f'"{python_executable}" -m pre_commit autoupdate --freeze') - - # Fix the formatting of the pre-commit config file - with warnings.catch_warnings(): - warnings.simplefilter("ignore", UserWarning) - fix_files([f"{repository_root_directory}/.pre-commit-config.yaml"]) - - # Fix the formatting of the pyproject.toml file - _run_cmd_in_subprocess( - f'"{python_script_location}/toml-sort" "{repository_root_directory}/pyproject.toml"' - ) - - # Update the docs and tests dependency files - for group in ("docs", "tests"): - _run_cmd_in_subprocess( - f'"{python_executable}" -m poetry export --only {group} ' - f"--without-hashes --output {group}/requirements.txt" - ) - # Sort the requirements files (ignore failures due to changed files - with contextlib.suppress(subprocess.CalledProcessError): - _run_cmd_in_subprocess( - f'"{python_executable}" -m pre_commit run --all requirements-txt-fixer' - ) - - -if __name__ == "__main__": - main()