[Build] Upgrade the Spark version from 4.1.0-SNAPSHOT to 4.1.0 #15896
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: "Delta Kernel" | |
| on: [push, pull_request] | |
| # Cancel previous runs when new commits are pushed | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
| cancel-in-progress: true | |
| env: | |
| # Point SBT to our cache directories for consistency | |
| SBT_OPTS: "-Dsbt.coursier.home-dir=/home/runner/.cache/coursier -Dsbt.ivy.home=/home/runner/.ivy2" | |
| jobs: | |
| test: | |
| name: "DK: Shard ${{ matrix.shard }}" | |
| runs-on: ubuntu-24.04 | |
| strategy: | |
| fail-fast: false # Allow all shards to run even if one fails | |
| matrix: | |
| shard: [0, 1, 2, 3] | |
| env: | |
| SCALA_VERSION: 2.13.16 | |
| NUM_SHARDS: 4 | |
| DISABLE_UNIDOC: true # Another unidoc workflow will test unidoc. | |
| TEST_PARALLELISM_COUNT: 4 | |
| steps: | |
| - name: Show runner specs | |
| run: | | |
| echo "=== GitHub Runner Specs ===" | |
| echo "CPU cores: $(nproc)" | |
| echo "CPU info: $(lscpu | grep 'Model name' | cut -d':' -f2 | xargs)" | |
| echo "Total RAM: $(free -h | grep '^Mem:' | awk '{print $2}')" | |
| echo "Available RAM: $(free -h | grep '^Mem:' | awk '{print $7}')" | |
| echo "Disk space: $(df -h / | tail -1 | awk '{print $2 " total, " $4 " available"}')" | |
| echo "Runner OS: ${{ runner.os }}" | |
| echo "Runner arch: ${{ runner.arch }}" | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| # Run unit tests with JDK 17. These unit tests depend on Spark, and Spark 4.0+ is JDK 17. | |
| - name: install java | |
| uses: actions/setup-java@v4 | |
| with: | |
| distribution: "zulu" | |
| java-version: "17" | |
| - name: Cache SBT and dependencies | |
| id: cache-sbt | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ~/.sbt | |
| ~/.ivy2/cache | |
| ~/.coursier/cache | |
| ~/.cache/coursier | |
| key: sbt-kernel-${{ runner.os }}-scala${{ env.SCALA_VERSION }} | |
| - name: Check cache status | |
| run: | | |
| if [ "${{ steps.cache-sbt.outputs.cache-hit }}" == "true" ]; then | |
| echo "✅ Cache HIT - using cached dependencies" | |
| else | |
| echo "❌ Cache MISS - will download dependencies" | |
| fi | |
| - name: Run unit tests | |
| run: | | |
| python run-tests.py --group kernel --coverage --shard ${{ matrix.shard }} | |
| integration-test: | |
| name: "DK: Integration" | |
| runs-on: ubuntu-24.04 | |
| steps: | |
| - uses: actions/checkout@v3 | |
| # Run integration tests with JDK 11, as they have no Spark dependency | |
| - name: install java | |
| uses: actions/setup-java@v3 | |
| with: | |
| distribution: "zulu" | |
| java-version: "11" | |
| - name: Run integration tests | |
| run: | | |
| cd kernel/examples && python run-kernel-examples.py --use-local |