Skip to content

[dependabot] Bump the actions group across 1 directory with 3 updates #1010

[dependabot] Bump the actions group across 1 directory with 3 updates

[dependabot] Bump the actions group across 1 directory with 3 updates #1010

name: build
on:
push:
branches:
- main
- '[0-9]+.[0-9]+.x'
tags:
- '**'
pull_request:
permissions: # added using https://github.com/step-security/secure-repo
contents: read
env:
JAVA_VERSION: 17
MAVEN_CLI_OPTS: -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false --batch-mode -Dlicense.skip=true
MAVEN_COMPILE_ARGS: clean install -Dmaven.test.skip -Dmaven.assembly.skip=true -Dmaven.source.skip -Pskip-spark-runtimes -T4
MAVEN_COMPILE_SPARK_ARGS: clean install -Dmaven.test.skip -Dmaven.assembly.skip=true -Dmaven.source.skip -T4
MAVEN_TEST_ARGS: test -Dmaven.main.skip
MAVEN_TEST_IT_ARGS: test failsafe:integration-test failsafe:verify -Dmaven.main.skip -DskipTests
MAVEN_TEST_SPARK_ARGS: test -Dmaven.main.skip -Dtest.fork.count=2
jobs:
build:
name: test / ${{ matrix.projects.name }} / ${{ matrix.scala-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
scala-version: [ "2.12", "2.13" ]
projects:
- name: accumulo-ds-1
list: geomesa-accumulo/geomesa-accumulo-datastore -Dtest="AccumuloDataStoreTest,AccumuloDataStoreQueryTest,AccumuloDataStoreStatsTest,AttributeIndexStrategyTest,AccumuloDataStoreAlterSchemaTest,ArrowBatchIteratorTest,MultiIteratorTest,AccumuloDataStoreTransformsTest,ArrowDictionaryTest,VisibilitiesTest"
- name: accumulo-ds-2
list: geomesa-accumulo/geomesa-accumulo-datastore -Dtest="AccumuloDataStoreAttributeVisibilityTest,AttributeIndexFilteringIteratorTest,AccumuloPartitioningTest,AccumuloFeatureReaderTest,AccumuloDataStoreAtomicWriteTest,AttributeIndexValuesTest,AccumuloDataStoreColumnGroupsTest,ZIntervalTest,S2IndexTest,AccumuloFeatureWriterTest,KryoLazyStatsIteratorTest,AccumuloDataStoreSortTest,AccumuloDataStoreUuidTest,AccumuloDataStoreAuthTest,DensityIteratorTest,S3IndexTest"
- name: accumulo-ds-3
list: geomesa-accumulo/geomesa-accumulo-datastore -Dtest.excludes="AccumuloDataStoreTest,AccumuloDataStoreQueryTest,AccumuloDataStoreStatsTest,AttributeIndexStrategyTest,AccumuloDataStoreAlterSchemaTest,ArrowBatchIteratorTest,MultiIteratorTest,AccumuloDataStoreTransformsTest,ArrowDictionaryTest,VisibilitiesTest,AccumuloDataStoreAttributeVisibilityTest,AttributeIndexFilteringIteratorTest,AccumuloPartitioningTest,AccumuloFeatureReaderTest,AccumuloDataStoreAtomicWriteTest,AttributeIndexValuesTest,AccumuloDataStoreColumnGroupsTest,ZIntervalTest,S2IndexTest,AccumuloFeatureWriterTest,KryoLazyStatsIteratorTest,AccumuloDataStoreSortTest,AccumuloDataStoreUuidTest,AccumuloDataStoreAuthTest,DensityIteratorTest,S3IndexTest"
- name: accumulo-other
# we exclude distributed runtime as it breaks the classpath loading, but it doesn't have any tests anyway
list: geomesa-accumulo/geomesa-accumulo-dist,geomesa-accumulo/geomesa-accumulo-gs-plugin,geomesa-accumulo/geomesa-accumulo-indices,geomesa-accumulo/geomesa-accumulo-iterators,geomesa-accumulo/geomesa-accumulo-jobs,geomesa-accumulo/geomesa-accumulo-spark,geomesa-accumulo/geomesa-accumulo-tools
- name: arrow
list: geomesa-arrow/geomesa-arrow-datastore,geomesa-arrow/geomesa-arrow-dist,geomesa-arrow/geomesa-arrow-gt,geomesa-arrow/geomesa-arrow-jts,geomesa-arrow/geomesa-arrow-tools
- name: cassandra
list: geomesa-cassandra/geomesa-cassandra-datastore,geomesa-cassandra/geomesa-cassandra-dist,geomesa-cassandra/geomesa-cassandra-gs-plugin,geomesa-cassandra/geomesa-cassandra-tools
- name: convert
list: geomesa-convert/geomesa-convert-all,geomesa-convert/geomesa-convert-avro,geomesa-convert/geomesa-convert-avro-schema-registry,geomesa-convert/geomesa-convert-common,geomesa-convert/geomesa-convert-fixedwidth,geomesa-convert/geomesa-convert-jdbc,geomesa-convert/geomesa-convert-json,geomesa-convert/geomesa-convert-parquet,geomesa-convert/geomesa-convert-redis-cache,geomesa-convert/geomesa-convert-shp,geomesa-convert/geomesa-convert-simplefeature,geomesa-convert/geomesa-convert-text,geomesa-convert/geomesa-convert-xml
- name: features
list: geomesa-features/geomesa-feature-all,geomesa-features/geomesa-feature-avro,geomesa-features/geomesa-feature-common,geomesa-features/geomesa-feature-exporters,geomesa-features/geomesa-feature-kryo
- name: fs
list: geomesa-fs/geomesa-fs-datastore,geomesa-fs/geomesa-fs-dist,geomesa-fs/geomesa-fs-gs-plugin,geomesa-fs/geomesa-fs-spark,geomesa-fs/geomesa-fs-tools
- name: fs-storage
list: geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-api,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-common,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-convert,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-orc,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-orc-io,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-parquet,geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-parquet-io
- name: gt-pg15
list: geomesa-gt/geomesa-gt-dist,geomesa-gt/geomesa-gt-gs-plugin,geomesa-gt/geomesa-gt-partitioning,geomesa-gt/geomesa-gt-spark,geomesa-gt/geomesa-gt-tools -Dtest.postgis.docker.tag=15-3.4
- name: gt-pg16
list: geomesa-gt/geomesa-gt-dist,geomesa-gt/geomesa-gt-gs-plugin,geomesa-gt/geomesa-gt-partitioning,geomesa-gt/geomesa-gt-spark,geomesa-gt/geomesa-gt-tools -Dtest.postgis.docker.tag=16-3.5
- name: gt-pg17
list: geomesa-gt/geomesa-gt-dist,geomesa-gt/geomesa-gt-gs-plugin,geomesa-gt/geomesa-gt-partitioning,geomesa-gt/geomesa-gt-spark,geomesa-gt/geomesa-gt-tools -Dtest.postgis.docker.tag=17-3.5
- name: hbase-ds
list: geomesa-hbase/geomesa-hbase-datastore
- name: hbase-other
# we exclude distributed runtime as it breaks the classpath loading, but it doesn't have any tests anyway
list: geomesa-hbase/geomesa-hbase-dist,geomesa-hbase/geomesa-hbase-gs-plugin,geomesa-hbase/geomesa-hbase-jobs,geomesa-hbase/geomesa-hbase-rpc,geomesa-hbase/geomesa-hbase-server,geomesa-hbase/geomesa-hbase-server-hbase2,geomesa-hbase/geomesa-hbase-spark,geomesa-hbase/geomesa-hbase-tools
- name: kafka
list: geomesa-kafka/geomesa-kafka-confluent,geomesa-kafka/geomesa-kafka-datastore,geomesa-kafka/geomesa-kafka-dist,geomesa-kafka/geomesa-kafka-gs-plugin,geomesa-kafka/geomesa-kafka-tools,geomesa-kafka/geomesa-kafka-utils
- name: lambda
list: geomesa-lambda/geomesa-lambda-datastore,geomesa-lambda/geomesa-lambda-dist,geomesa-lambda/geomesa-lambda-gs-plugin,geomesa-lambda/geomesa-lambda-tools
- name: misc-1
list: geomesa-memory/geomesa-cqengine,geomesa-memory/geomesa-cqengine-datastore,geomesa-metrics/geomesa-metrics-micrometer,geomesa-process/geomesa-process-vector
- name: misc-2
list: geomesa-filter,geomesa-index-api,geomesa-jobs,geomesa-security,geomesa-tools,geomesa-z3
- name: redis
list: geomesa-redis/geomesa-redis-datastore,geomesa-redis/geomesa-redis-dist,geomesa-redis/geomesa-redis-gs-plugin,geomesa-redis/geomesa-redis-tools
- name: spark
list: geomesa-spark/geomesa_pyspark,geomesa-spark/geomesa-spark-converter,geomesa-spark/geomesa-spark-core,geomesa-spark/geomesa-spark-jts,geomesa-spark/geomesa-spark-jupyter-leaflet,geomesa-spark/geomesa-spark-sql,geomesa-spark/geomesa-spark-test-with-sedona
- name: utils
list: geomesa-utils-parent/geomesa-bom,geomesa-utils-parent/geomesa-hadoop-utils,geomesa-utils-parent/geomesa-utils
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: "${{ env.JAVA_VERSION }}"
- name: Generate cache key
run: echo "MOD_SELECTOR=$(echo '${{ matrix.projects.list }}' | sed 's/[^a-zA-Z0-9_.-]/-/g' | head -c 256)" >> "$GITHUB_ENV"
- uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
key: ${{ hashFiles('**/pom.xml') }}-build-${{ matrix.scala-version }}-${{ env.MOD_SELECTOR }}
path: ~/.m2/repository/
- name: Set Scala version
run: ./build/scripts/change-scala-version.sh ${{ matrix.scala-version }}
- name: Build with Maven
run: mvn $MAVEN_COMPILE_ARGS $MAVEN_CLI_OPTS -am -pl ${{ matrix.projects.list }}
- name: Unit tests
id: test
continue-on-error: true
run: |
set -o pipefail
mvn $MAVEN_TEST_ARGS $MAVEN_CLI_OPTS -pl ${{ matrix.projects.list }} | tee -a test.log
- name: Unit tests (retry)
id: test-retry
if: steps.test.outcome=='failure'
continue-on-error: true
run: |
set -o pipefail
RESUME_FROM="$({ grep --text 'mvn <args> -rf ' test.log || test $? = 1; } | tail -n1 | sed 's/.*-rf/-rf/')"
mvn $MAVEN_TEST_ARGS $MAVEN_CLI_OPTS $RESUME_FROM -pl ${{ matrix.projects.list }} | tee -a test.log
- name: Unit tests (retry)
id: test-retry-retry
if: steps.test-retry.outcome=='failure'
run: |
set -o pipefail
RESUME_FROM="$({ grep --text 'mvn <args> -rf ' test.log || test $? = 1; } | tail -n1 | sed 's/.*-rf/-rf/')"
mvn $MAVEN_TEST_ARGS $MAVEN_CLI_OPTS $RESUME_FROM -pl ${{ matrix.projects.list }} | tee -a test.log
- name: Remove geomesa artifacts
if: success() || failure()
run: rm -rf ~/.m2/repository/org/locationtech/geomesa
integration-tests:
name: integration-tests / ${{ matrix.scala-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
scala-version: [ "2.12", "2.13" ]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: "${{ env.JAVA_VERSION }}"
- uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
key: ${{ hashFiles('**/pom.xml') }}-it-${{ matrix.scala-version }}
path: ~/.m2/repository/
- name: Set Scala version
run: ./build/scripts/change-scala-version.sh ${{ matrix.scala-version }}
- name: Find tests
run: |
export IT_MODULES="$(find . -path '*/src/test/*' -name '*IT.scala' -o -name '*IT.java' | sed 's|\(.*\)src/test.*|\1|' | sort -u | tr '\n' ',')"
echo "IT_MODULES=$IT_MODULES" >> "$GITHUB_ENV"
- name: Build with Maven
run: mvn $MAVEN_COMPILE_ARGS $MAVEN_CLI_OPTS -am -pl "$IT_MODULES"
- name: Integration Tests
run: mvn $MAVEN_TEST_IT_ARGS $MAVEN_CLI_OPTS -pl "$IT_MODULES"
- name: Remove geomesa artifacts
if: success() || failure()
run: rm -rf ~/.m2/repository/org/locationtech/geomesa
spark-integration-tests:
name: spark-integration-tests / ${{ matrix.scala-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
scala-version: [ "2.12", "2.13" ]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: "${{ env.JAVA_VERSION }}"
- uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
key: ${{ hashFiles('**/pom.xml') }}-spark-integration-tests-${{ matrix.scala-version }}
path: ~/.m2/repository/
- name: Set Scala version
run: ./build/scripts/change-scala-version.sh ${{ matrix.scala-version }}
- name: Build with Maven
# TODO we don't have any tests for hbase spark runtime - maybe split up the test modules since this job is already slow
run: mvn $MAVEN_COMPILE_SPARK_ARGS $MAVEN_CLI_OPTS -am -pl geomesa-spark/geomesa-spark-test
- name: Run Spark tests
if: matrix.scala-version == '2.12' # there's no 2.13 spark docker container
run: mvn $MAVEN_TEST_SPARK_ARGS $MAVEN_CLI_OPTS -pl geomesa-spark/geomesa-spark-test
- name: Remove geomesa artifacts
if: success() || failure()
run: rm -rf ~/.m2/repository/org/locationtech/geomesa
validate-ci:
# validates that we haven't missed any projects in our build matrix
name: validate-ci
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate project list
run: |
while IFS= read -r -d '' pom; do
grep -q 'packaging>pom' "$pom" || {
module="${pom%/pom.xml}"
module="${module#./}"
if [[ $module != *distributed-runtime* && $module != geomesa-spark/geomesa-spark-test ]]; then
echo "Checking $module"
yq -e '.jobs.build.strategy.matrix.projects[].list | select(test("(^|[, ])'"${module}"'($|[, ])"))' .github/workflows/build-and-test.yml >/dev/null 2>&1 || exit 1
fi
}
done < <(find . -name pom.xml -print0)
echo "done"