Skip to content

Format ruff & fix secure blob access #513

Format ruff & fix secure blob access

Format ruff & fix secure blob access #513

Workflow file for this run

name: Python Tests
on:
workflow_dispatch:
push:
branches: [ main ]
pull_request:
branches: [ main ]
permissions:
contents: read
checks: write
pull-requests: write
jobs:
test:
runs-on: ubuntu-latest
env:
PYTHONPATH: ${{ github.workspace }}/shared/python:${{ github.workspace }}
strategy:
matrix:
python-version: [ '3.12', '3.13', '3.14' ]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "pyproject.toml"
- name: Install dependencies
run: |
uv venv
uv sync
# Verify coverage HTML templates are present
uv run python -c "import coverage.html; print(coverage.html.__file__)"
# Lint the Python files & upload the result statistics
- name: Run ruff analysis
id: ruff
run: |
mkdir -p tests/python/ruff/reports
uv run ruff check infrastructure samples setup shared 2>&1 | tee tests/python/ruff/reports/latest.txt
uv run ruff check --output-format json infrastructure samples setup shared > tests/python/ruff/reports/latest.json 2>/dev/null || true
- name: Upload ruff reports
uses: actions/upload-artifact@v4
with:
name: ruff-reports-${{ matrix.python-version }}
path: tests/python/ruff/reports/
# Static code analysis through simple compilation to ensure code is syntactically sound
- name: Verify bytecode compilation
run: |
uv run python -m compileall infrastructure samples setup shared
# Run tests and generate coverage reports
- name: Run pytest with coverage and generate JUnit XML
id: pytest
run: |
COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} uv run pytest --cov=shared/python --cov-config=tests/python/.coveragerc --cov-report=html:tests/python/htmlcov-${{ matrix.python-version }} --cov-report=term-missing --junitxml=tests/python/junit-${{ matrix.python-version }}.xml tests/python/
- name: Upload coverage HTML report
uses: actions/upload-artifact@v4
with:
name: coverage-html-${{ matrix.python-version }}
path: tests/python/htmlcov-${{ matrix.python-version }}/
- name: Upload JUnit test results
uses: actions/upload-artifact@v4
with:
name: junit-results-${{ matrix.python-version }}
path: tests/python/junit-${{ matrix.python-version }}.xml
# Extract all linting and coverage results in preparation for publish
- name: Extract and Summarize Metrics
id: metrics
run: |
# Ruff Issue Count
JSON_REPORT="tests/python/ruff/reports/latest.json"
if [ -f "$JSON_REPORT" ] && command -v jq &> /dev/null; then
RUFF_ISSUES=$(jq 'length' "$JSON_REPORT" 2>/dev/null || echo "N/A")
echo "ruff_issues=$RUFF_ISSUES" >> "$GITHUB_OUTPUT"
else
echo "ruff_issues=N/A" >> "$GITHUB_OUTPUT"
fi
# Coverage Percentage
if [ -f "tests/python/.coverage-${{ matrix.python-version }}" ]; then
TOTAL_COV=$(COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} uv run python -m coverage report | grep TOTAL | awk '{print $NF}')
echo "coverage=$TOTAL_COV" >> "$GITHUB_OUTPUT"
else
echo "coverage=N/A" >> "$GITHUB_OUTPUT"
fi
# Publish general statistics for linting, test success, and code coverage as well as detailed tests results
- name: Publish Consolidated Results to PR
if: github.event_name == 'pull_request'
uses: marocchino/sticky-pull-request-comment@v2
with:
header: python-results-${{ matrix.python-version }}
message: |
## 🐍 Python ${{ matrix.python-version }} Results
| Metric | Status | Value |
| :--- | :---: | :--- |
| **Ruff** | ${{ steps.ruff.outcome == 'success' && '✅' || '⚠️' }} | `${{ steps.metrics.outputs.ruff_issues }} issue(s)` |
| **Unit Tests** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | `${{ steps.pytest.outcome }}` |
| **Code Coverage** | 📊 | `${{ steps.metrics.outputs.coverage }}` |
[Full Workflow Logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
- name: Generate Job Summary
run: |
RUFF_ISSUES="${{ steps.metrics.outputs.ruff_issues }}"
PYTEST_OUTCOME="${{ steps.pytest.outcome }}"
COVERAGE="${{ steps.metrics.outputs.coverage }}"
echo "## 🐍 Python ${{ matrix.python-version }} Execution Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Category | Status | Detail |" >> $GITHUB_STEP_SUMMARY
echo "| :--- | :---: | :--- |" >> $GITHUB_STEP_SUMMARY
echo "| **Ruff** | ${{ steps.ruff.outcome == 'success' && '✅' || '⚠️' }} | Issues: \`${RUFF_ISSUES:-N/A}\` |" >> $GITHUB_STEP_SUMMARY
echo "| **Pytest** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | Outcome: \`${PYTEST_OUTCOME:-N/A}\` |" >> $GITHUB_STEP_SUMMARY
echo "| **Coverage** | 📊 | Total: \`${COVERAGE:-N/A}\` |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
with:
files: tests/python/junit-${{ matrix.python-version }}.xml
comment_title: Python ${{ matrix.python-version }} Detailed Test Results