[Spark] Use the unitycatalog-client to implement the UCTokenBasedRestClient #13850
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: "Delta Spark Publishing and Examples" | |
| on: [push, pull_request] | |
| jobs: | |
| # Generate Spark versions matrix from CrossSparkVersions.scala | |
| # This workflow tests against released versions only (no snapshots) | |
| generate-matrix: | |
| name: "Generate Released Spark Versions Matrix" | |
| runs-on: ubuntu-24.04 | |
| outputs: | |
| spark_versions: ${{ steps.generate.outputs.spark_versions }} | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: install java | |
| uses: actions/setup-java@v3 | |
| with: | |
| distribution: "zulu" | |
| java-version: "17" | |
| - name: Generate released Spark versions matrix | |
| id: generate | |
| run: | | |
| # Get only released versions (exclude snapshots) | |
| SPARK_VERSIONS=$(python3 project/scripts/get_spark_version_info.py --released-spark-versions) | |
| echo "spark_versions=$SPARK_VERSIONS" >> $GITHUB_OUTPUT | |
| echo "Generated released Spark versions: $SPARK_VERSIONS" | |
| test: | |
| name: "DSP&E: Spark ${{ matrix.spark_version }}, Scala ${{ matrix.scala }}" | |
| runs-on: ubuntu-24.04 | |
| needs: generate-matrix | |
| strategy: | |
| matrix: | |
| # Spark versions are dynamically generated - released versions only | |
| spark_version: ${{ fromJson(needs.generate-matrix.outputs.spark_versions) }} | |
| # These Scala versions must match those in the build.sbt | |
| scala: [2.13.16] | |
| env: | |
| SCALA_VERSION: ${{ matrix.scala }} | |
| SPARK_VERSION: ${{ matrix.spark_version }} | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - uses: technote-space/get-diff-action@v4 | |
| id: git-diff | |
| with: | |
| PATTERNS: | | |
| ** | |
| .github/workflows/** | |
| !kernel/** | |
| !connectors/** | |
| - name: Get Spark version details | |
| id: spark-details | |
| run: | | |
| # Get JVM version, package suffix, iceberg support for this Spark version | |
| JVM_VERSION=$(python3 project/scripts/get_spark_version_info.py --get-field "${{ matrix.spark_version }}" targetJvm | jq -r) | |
| SPARK_PACKAGE_SUFFIX=$(python3 project/scripts/get_spark_version_info.py --get-field "${{ matrix.spark_version }}" packageSuffix | jq -r) | |
| SUPPORT_ICEBERG=$(python3 project/scripts/get_spark_version_info.py --get-field "${{ matrix.spark_version }}" supportIceberg | jq -r) | |
| echo "jvm_version=$JVM_VERSION" >> $GITHUB_OUTPUT | |
| echo "spark_package_suffix=$SPARK_PACKAGE_SUFFIX" >> $GITHUB_OUTPUT | |
| echo "support_iceberg=$SUPPORT_ICEBERG" >> $GITHUB_OUTPUT | |
| echo "Using JVM $JVM_VERSION for Spark ${{ matrix.spark_version }}, package suffix: '$SPARK_PACKAGE_SUFFIX', support iceberg: '$SUPPORT_ICEBERG'" | |
| - name: install java | |
| uses: actions/setup-java@v3 | |
| with: | |
| distribution: "zulu" | |
| java-version: ${{ steps.spark-details.outputs.jvm_version }} | |
| - name: Cache Scala, SBT | |
| uses: actions/cache@v3 | |
| with: | |
| path: | | |
| ~/.sbt | |
| ~/.ivy2 | |
| ~/.cache/coursier | |
| # Change the key if dependencies are changed. For each key, GitHub Actions will cache the | |
| # the above directories when we use the key for the first time. After that, each run will | |
| # just use the cache. The cache is immutable so we need to use a new key when trying to | |
| # cache new stuff. | |
| key: delta-sbt-cache-spark${{ matrix.spark_version }}-scala${{ matrix.scala }} | |
| - name: Install Job dependencies | |
| run: | | |
| sudo apt-get update | |
| sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python3-openssl git | |
| sudo apt install libedit-dev | |
| if: steps.git-diff.outputs.diff | |
| - name: Run Delta Spark Local Publishing and Examples Compilation | |
| # examples/scala/build.sbt will compile against the local Delta release version (e.g. 3.2.0-SNAPSHOT). | |
| # Thus, we need to publishM2 first so those jars are locally accessible. | |
| # The SPARK_PACKAGE_SUFFIX env var tells examples/scala/build.sbt which artifact naming to use. | |
| env: | |
| SPARK_PACKAGE_SUFFIX: ${{ steps.spark-details.outputs.spark_package_suffix }} | |
| SUPPORT_ICEBERG: ${{ steps.spark-details.outputs.support_iceberg }} | |
| run: | | |
| build/sbt clean | |
| build/sbt -DsparkVersion=${{ matrix.spark_version }} publishM2 | |
| cd examples/scala && build/sbt "++ $SCALA_VERSION compile" | |
| if: steps.git-diff.outputs.diff |