diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 83db3ecb8102b..55791cedc6290 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -261,8 +261,8 @@ jobs: # Build: build Spark and run the tests for specified modules. build: name: "Build modules: ${{ matrix.modules }} ${{ matrix.comment }}" - needs: precondition - if: fromJson(needs.precondition.outputs.required).build == 'true' + needs: [precondition, precompile] + if: (!cancelled()) && fromJson(needs.precondition.outputs.required).build == 'true' runs-on: ubuntu-latest timeout-minutes: 150 strategy: @@ -399,6 +399,20 @@ jobs: run: | python3.12 -m pip install 'numpy>=1.22' pyarrow 'pandas==2.3.3' pyyaml scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.76.0' 'grpcio-status==1.76.0' 'protobuf==6.33.5' 'zstandard==0.25.0' python3.12 -m pip list + - name: Download precompiled artifact + id: download-precompiled + if: needs.precompile.result == 'success' + continue-on-error: true + uses: actions/download-artifact@v6 + with: + name: spark-compile-${{ inputs.branch }}-${{ github.run_id }} + - name: Extract precompiled artifact + id: extract-precompiled + if: steps.download-precompiled.outcome == 'success' + continue-on-error: true + run: | + tar -xzf compile-artifact.tar.gz + rm compile-artifact.tar.gz # Run the tests. - name: Run tests env: ${{ fromJSON(inputs.envs) }} @@ -409,9 +423,13 @@ jobs: # Hive "other tests" test needs larger metaspace size based on experiment. if [[ "$MODULES_TO_TEST" == "hive" ]] && [[ "$EXCLUDED_TAGS" == "org.apache.spark.tags.SlowHiveTest" ]]; then export METASPACE_SIZE=2g; fi # SPARK-46283: should delete the following env replacement after SPARK 3.x EOL - if [[ "$MODULES_TO_TEST" == *"streaming-kinesis-asl"* ]] && [[ "${{ inputs.branch }}" =~ ^branch-3 ]]; then + if [[ "$MODULES_TO_TEST" == *"streaming-kinesis-asl"* ]] && [[ "${{ inputs.branch }}" =~ ^branch-3 ]]; then MODULES_TO_TEST=${MODULES_TO_TEST//streaming-kinesis-asl, /} fi + if [ "${{ steps.extract-precompiled.outcome }}" = "success" ]; then + export SKIP_SCALA_BUILD=true + echo "Reusing precompiled artifact, skipping local SBT build." + fi export SERIAL_SBT_TESTS=1 ./dev/run-tests --parallelism 1 --modules "$MODULES_TO_TEST" --included-tags "$INCLUDED_TAGS" --excluded-tags "$EXCLUDED_TAGS" - name: Upload test results to report @@ -541,6 +559,7 @@ jobs: needs: precondition if: >- (!cancelled()) && ( + fromJson(needs.precondition.outputs.required).build == 'true' || fromJson(needs.precondition.outputs.required).pyspark == 'true' || fromJson(needs.precondition.outputs.required).pyspark-pandas == 'true' || fromJson(needs.precondition.outputs.required).pyspark-install == 'true')