Skip to content

Commit 07b4493

Browse files
committed
WIP [skip-vdc][skip-rapids][skip-matx][skip-docs]
1 parent f83cb0c commit 07b4493

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+1779
-132
lines changed
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
{
2+
"shutdownAction": "stopContainer",
3+
"image": "rapidsai/devcontainers:25.08-cpp-gcc13-cuda12.8",
4+
"hostRequirements": {
5+
"gpu": "optional"
6+
},
7+
"initializeCommand": [
8+
"/bin/bash",
9+
"-c",
10+
"mkdir -m 0755 -p ${localWorkspaceFolder}/.{aws,cache,config}; mkdir -m 0755 -p ${localWorkspaceFolder}/{build,wheelhouse}; if test -z ${localEnv:WSLENV}; then docker volume create --driver local --opt type=none --opt device=${localWorkspaceFolder}/build --opt o=bind cccl-build; docker volume create --driver local --opt type=none --opt device=${localWorkspaceFolder}/wheelhouse --opt o=bind cccl-wheelhouse; else docker volume create cccl-build; docker volume create cccl-wheelhouse; fi;"
11+
],
12+
"containerEnv": {
13+
"SCCACHE_REGION": "us-east-2",
14+
"SCCACHE_BUCKET": "rapids-sccache-devs",
15+
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
16+
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
17+
"DEVCONTAINER_NAME": "cuda12.8-gcc13",
18+
"CCCL_CUDA_VERSION": "12.8",
19+
"CCCL_HOST_COMPILER": "gcc",
20+
"CCCL_HOST_COMPILER_VERSION": "13",
21+
"CCCL_BUILD_INFIX": "cuda12.8-gcc13",
22+
"CCCL_CUDA_EXTENDED": "false",
23+
"HOST_WORKSPACE": "${localWorkspaceFolder}"
24+
},
25+
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
26+
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
27+
"mounts": [
28+
"source=${localWorkspaceFolder}/.aws,target=/home/coder/.aws,type=bind,consistency=consistent",
29+
"source=${localWorkspaceFolder}/.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
30+
"source=${localWorkspaceFolder}/.config,target=/home/coder/.config,type=bind,consistency=consistent",
31+
"source=cccl-build,target=/home/coder/cccl/build",
32+
"source=cccl-wheelhouse,target=/home/coder/cccl/wheelhouse"
33+
],
34+
"customizations": {
35+
"vscode": {
36+
"extensions": [
37+
"llvm-vs-code-extensions.vscode-clangd",
38+
"seaube.clangformat",
39+
"nvidia.nsight-vscode-edition",
40+
"ms-vscode.cmake-tools"
41+
],
42+
"settings": {
43+
"editor.defaultFormatter": "seaube.clangformat",
44+
"editor.formatOnSave": true,
45+
"clang-format.executable": "/usr/bin/clang-format",
46+
"clangd.arguments": [
47+
"--header-insertion=never",
48+
"--compile-commands-dir=${workspaceFolder}"
49+
],
50+
"files.eol": "\n",
51+
"files.trimTrailingWhitespace": true
52+
}
53+
}
54+
},
55+
"name": "cuda12.8-gcc13"
56+
}
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
name: "Upload Artifacts"
2+
description: "Upload a small, arbitrary number of artifacts."
3+
4+
inputs:
5+
artifacts:
6+
description: "JSCN array objects produced via ci/util/artifacts/upload/print_matrix.sh. Each object must include 'name', 'path', 'retention_days', and 'compression_level'."
7+
required: true
8+
9+
runs:
10+
using: "composite"
11+
steps:
12+
- name: Parse artifact array
13+
id: parse
14+
shell: bash --noprofile --norc -euo pipefail {0}
15+
run: |
16+
# Save artifacts json for parsing:
17+
jq -r '.' <<< '${{ inputs.artifacts }}' > artifacts.json
18+
19+
echo "::group::List of artifacts to upload"
20+
cat artifacts.json
21+
echo "::endgroup::"
22+
23+
max_artifacts=10
24+
25+
count=$(jq '. | length' artifacts.json)
26+
if [ "$count" -gt "$max_artifacts" ]; then
27+
echo "Error: Too many artifacts to upload. Maximum is $max_artifacts, got $count."
28+
echo "Update the action to support more artifacts if needed."
29+
exit 1
30+
fi
31+
32+
for i in $(seq 0 $((count - 1))); do
33+
name=$(jq -r ".[$i].name" artifacts.json)
34+
path=$(jq -r ".[$i].path" artifacts.json)
35+
retention_days=$(jq -r ".[$i].retention_days" artifacts.json)
36+
compression_level=$(jq -r ".[$i].compression_level" artifacts.json)
37+
38+
echo "enabled$i=true" >> "${GITHUB_OUTPUT}"
39+
echo "name$i=$name" >> "${GITHUB_OUTPUT}"
40+
echo "path$i=$path" >> "${GITHUB_OUTPUT}"
41+
echo "retention_days$i=$retention_days" >> "${GITHUB_OUTPUT}"
42+
echo "compression_level$i=$compression_level" >> "${GITHUB_OUTPUT}"
43+
done
44+
45+
for i in $(seq $((count)) $((max_artifacts - 1))); do
46+
echo "enabled$i=false" >> "${GITHUB_OUTPUT}"
47+
echo "name$i=" >> "${GITHUB_OUTPUT}"
48+
echo "path$i=" >> "${GITHUB_OUTPUT}"
49+
echo "retention_days$i=" >> "${GITHUB_OUTPUT}"
50+
echo "compression_level$i=" >> "${GITHUB_OUTPUT}"
51+
done
52+
53+
- name: Upload artifact 0
54+
if: ${{ steps.parse.outputs.enabled0 == 'true' }}
55+
uses: actions/upload-artifact@v4
56+
with:
57+
name: ${{ steps.parse.outputs.name0 }}
58+
path: ${{ steps.parse.outputs.path0 }}
59+
retention-days: ${{ steps.parse.outputs.retention_days0 }}
60+
compression-level: ${{ steps.parse.outputs.compression_level0 }}
61+
- name: Upload artifact 1
62+
if: ${{ steps.parse.outputs.enabled1 == 'true' }}
63+
uses: actions/upload-artifact@v4
64+
with:
65+
name: ${{ steps.parse.outputs.name1 }}
66+
path: ${{ steps.parse.outputs.path1 }}
67+
retention-days: ${{ steps.parse.outputs.retention_days1 }}
68+
compression-level: ${{ steps.parse.outputs.compression_level1 }}
69+
- name: Upload artifact 2
70+
if: ${{ steps.parse.outputs.enabled2 == 'true' }}
71+
uses: actions/upload-artifact@v4
72+
with:
73+
name: ${{ steps.parse.outputs.name2 }}
74+
path: ${{ steps.parse.outputs.path2 }}
75+
retention-days: ${{ steps.parse.outputs.retention_days2 }}
76+
compression-level: ${{ steps.parse.outputs.compression_level2 }}
77+
- name: Upload artifact 3
78+
if: ${{ steps.parse.outputs.enabled3 == 'true' }}
79+
uses: actions/upload-artifact@v4
80+
with:
81+
name: ${{ steps.parse.outputs.name3 }}
82+
path: ${{ steps.parse.outputs.path3 }}
83+
retention-days: ${{ steps.parse.outputs.retention_days3 }}
84+
compression-level: ${{ steps.parse.outputs.compression_level3 }}
85+
- name: Upload artifact 4
86+
if: ${{ steps.parse.outputs.enabled4 == 'true' }}
87+
uses: actions/upload-artifact@v4
88+
with:
89+
name: ${{ steps.parse.outputs.name4 }}
90+
path: ${{ steps.parse.outputs.path4 }}
91+
retention-days: ${{ steps.parse.outputs.retention_days4 }}
92+
compression-level: ${{ steps.parse.outputs.compression_level4 }}
93+
- name: Upload artifact 5
94+
if: ${{ steps.parse.outputs.enabled5 == 'true' }}
95+
uses: actions/upload-artifact@v4
96+
with:
97+
name: ${{ steps.parse.outputs.name5 }}
98+
path: ${{ steps.parse.outputs.path5 }}
99+
retention-days: ${{ steps.parse.outputs.retention_days5 }}
100+
compression-level: ${{ steps.parse.outputs.compression_level5 }}
101+
- name: Upload artifact 6
102+
if: ${{ steps.parse.outputs.enabled6 == 'true' }}
103+
uses: actions/upload-artifact@v4
104+
with:
105+
name: ${{ steps.parse.outputs.name6 }}
106+
path: ${{ steps.parse.outputs.path6 }}
107+
retention-days: ${{ steps.parse.outputs.retention_days6 }}
108+
compression-level: ${{ steps.parse.outputs.compression_level6 }}
109+
- name: Upload artifact 7
110+
if: ${{ steps.parse.outputs.enabled7 == 'true' }}
111+
uses: actions/upload-artifact@v4
112+
with:
113+
name: ${{ steps.parse.outputs.name7 }}
114+
path: ${{ steps.parse.outputs.path7 }}
115+
retention-days: ${{ steps.parse.outputs.retention_days7 }}
116+
compression-level: ${{ steps.parse.outputs.compression_level7 }}
117+
- name: Upload artifact 8
118+
if: ${{ steps.parse.outputs.enabled8 == 'true' }}
119+
uses: actions/upload-artifact@v4
120+
with:
121+
name: ${{ steps.parse.outputs.name8 }}
122+
path: ${{ steps.parse.outputs.path8 }}
123+
retention-days: ${{ steps.parse.outputs.retention_days8 }}
124+
compression-level: ${{ steps.parse.outputs.compression_level8 }}
125+
- name: Upload artifact 9
126+
if: ${{ steps.parse.outputs.enabled9 == 'true' }}
127+
uses: actions/upload-artifact@v4
128+
with:
129+
name: ${{ steps.parse.outputs.name9 }}
130+
path: ${{ steps.parse.outputs.path9 }}
131+
retention-days: ${{ steps.parse.outputs.retention_days9 }}
132+
compression-level: ${{ steps.parse.outputs.compression_level9 }}

.github/actions/workflow-build/build-workflow.py

Lines changed: 40 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -284,6 +284,8 @@ def get_job_type_info(job):
284284
result["gpu"] = False
285285
if "cuda_ext" not in result:
286286
result["cuda_ext"] = False
287+
if "force_producer_ctk" not in result:
288+
result["force_producer_ctk"] = None
287289
if "needs" not in result:
288290
result["needs"] = None
289291
if "invoke" not in result:
@@ -378,7 +380,6 @@ def is_nvhpc(matrix_job):
378380

379381
def generate_dispatch_group_name(matrix_job):
380382
project = get_project(matrix_job["project"])
381-
ctk = matrix_job["ctk"]
382383
device_compiler = get_device_compiler(matrix_job)
383384
host_compiler = get_host_compiler(matrix_job["cxx"])
384385

@@ -390,11 +391,12 @@ def generate_dispatch_group_name(matrix_job):
390391
else:
391392
compiler_info = f"{device_compiler['name']}-{device_compiler['version']} {host_compiler['name']}"
392393

393-
return f"{project['name']} CTK{ctk} {compiler_info}"
394+
return f"{project['name']} {compiler_info}"
394395

395396

396397
def generate_dispatch_job_name(matrix_job, job_type):
397398
job_info = get_job_type_info(job_type)
399+
ctk = matrix_job["ctk"]
398400
std_str = ("C++" + str(matrix_job["std"]) + " ") if "std" in matrix_job else ""
399401
cpu_str = matrix_job["cpu"]
400402
gpu_str = (", " + matrix_job["gpu"].upper()) if job_info["gpu"] else ""
@@ -410,7 +412,7 @@ def generate_dispatch_job_name(matrix_job, job_type):
410412

411413
host_compiler = get_host_compiler(matrix_job["cxx"])
412414

413-
config_tag = f"{std_str}{host_compiler['name']}{host_compiler['version']}"
415+
config_tag = f"CTK{ctk} {std_str}{host_compiler['name']}{host_compiler['version']}"
414416

415417
extra_info = (
416418
f":{cuda_compile_arch}{cmake_options}"
@@ -549,7 +551,38 @@ def generate_dispatch_job_json(matrix_job, job_type):
549551

550552
# Create a single build producer, and a separate consumer for each test_job_type:
551553
def generate_dispatch_two_stage_json(matrix_job, producer_job_type, consumer_job_types):
552-
producer_json = generate_dispatch_job_json(matrix_job, producer_job_type)
554+
# If any consumer job types have 'force_producer_ctk' set, use that CTK for the producer job.
555+
producer_ctk = matrix_job["ctk"]
556+
for consumer_job_type in consumer_job_types:
557+
job_info = get_job_type_info(consumer_job_type)
558+
if job_info["force_producer_ctk"]:
559+
producer_ctk = job_info["force_producer_ctk"]
560+
561+
# Verify that all consumer jobs require the same producer ctk.
562+
# If this is needed down the road, we'll need to detect this and split the two-stage job
563+
# at an earlier level.
564+
for consumer_job_type in consumer_job_types:
565+
job_info = get_job_type_info(consumer_job_type)
566+
if job_info["force_producer_ctk"] and job_info["force_producer_ctk"] != producer_ctk:
567+
raise Exception(
568+
f"'force_producer_ctk' value mismatch for consumer job '{consumer_job_type}': "
569+
f"expected '{producer_ctk}', got '{job_info['force_producer_ctk']}'"
570+
f" in matrix job: {matrix_job['origin']['original_matrix_job']}"
571+
)
572+
573+
if producer_ctk != matrix_job["ctk"]:
574+
print(
575+
f"Producer job '{producer_job_type}' for matrix job '{matrix_job['origin']['workflow_name']}' "
576+
+ f"will use a forced CTK version '{producer_ctk}' instead of the matrix job version '{matrix_job['ctk']}'",
577+
file=sys.stderr,
578+
)
579+
580+
producer_matrix_job = copy.deepcopy(matrix_job)
581+
producer_matrix_job["ctk"] = producer_ctk
582+
else:
583+
producer_matrix_job = matrix_job
584+
585+
producer_json = generate_dispatch_job_json(producer_matrix_job, producer_job_type)
553586

554587
consumers_json = []
555588
for consumer_job_type in consumer_job_types:
@@ -1089,9 +1122,9 @@ def parse_workflow_matrix_jobs(args, workflow_name):
10891122
matrix_jobs = preprocess_matrix_jobs(matrix_jobs, is_exclusion_matrix)
10901123

10911124
if args and args.dirty_projects is not None and workflow_name != "override":
1092-
matrix_jobs = [
1093-
job for job in matrix_jobs if job["project"] in args.dirty_projects
1094-
]
1125+
matrix_jobs = [
1126+
job for job in matrix_jobs if job["project"] in args.dirty_projects
1127+
]
10951128

10961129
# Don't remove excluded jobs if we're currently parsing them:
10971130
if not is_exclusion_matrix:

.github/actions/workflow-results/action.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ runs:
3838
uses: actions/download-artifact@v4
3939
with:
4040
path: jobs
41-
pattern: jobs-*
41+
pattern: zz_jobs-*
4242
merge-multiple: true
4343

4444
- name: Clean up job artifacts

0 commit comments

Comments
 (0)