Skip to content

Nightly (beta)

Nightly (beta) #1160

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This workflow is meant as an extended CI run that includes certain features that shall be tested
# and JDK versions that are supported but not considered default.
name: "Nightly (beta)"
on:
workflow_dispatch:
permissions: read-all
jobs:
pre-compile-checks:
name: "Pre-compile Checks"
uses: ./.github/workflows/template.pre-compile-checks.yml
java11:
name: "Java 11"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java11
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Pjava11-target"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java17:
name: "Java 17"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java17
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java21:
name: "Java 21"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java21
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"'
jdk_version: 21
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
hadoop313:
name: "Hadoop 3.1.3"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: hadoop313
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
adaptive-scheduler:
name: "AdaptiveScheduler"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: adaptive-scheduler
environment: 'PROFILE="-Penable-adaptive-scheduler"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
build_python_wheels:
name: "Build Python Wheels on ${{ matrix.os_name }}"
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
os_name: linux
- os: macos-latest
os_name: macos
steps:
- name: "Checkout the repository"
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: "Stringify workflow name"
uses: "./.github/actions/stringify"
id: stringify_workflow
with:
value: ${{ github.workflow }}
# Used to host cibuildwheel
- name: "Setup Python"
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: "Install cibuildwheel"
run: python -m pip install cibuildwheel==2.16.5
- name: "Build python wheels for ${{ matrix.os_name }}"
run: python -m cibuildwheel --output-dir flink-python/dist flink-python
env:
# Skip -musllinux on Linux builds
CIBW_SKIP: "*-musllinux*"
# Use manylinux2014 on Linux
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_BEFORE_ALL_LINUX: pip install patchelf
CIBW_BEFORE_BUILD_LINUX: pip install -r flink-python/dev/dev-requirements.txt
CIBW_ENVIRONMENT_LINUX: CFLAGS="-I. -include ./dev/glibc_version_fix.h"
# Run auditwheel repair on Linux
CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel repair -w {dest_dir} {wheel}"
# Skip repair on MacOS
CIBW_REPAIR_WHEEL_COMMAND_MACOS: ""
- name: "Tar python artifact"
run: tar -czvf python-wheel-${{ matrix.os_name }}.tar.gz -C flink-python/dist .
- name: "Upload python artifact"
uses: actions/upload-artifact@v4
with:
name: wheel_${{ matrix.os_name }}_${{ steps.stringify_workflow.outputs.stringified_value }}-${{ github.run_number }}
path: python-wheel-${{ matrix.os_name }}.tar.gz