From 54f182492bcb0ca7ce98c16d57e36a32a38991bd Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 9 Dec 2025 19:43:39 +0100 Subject: [PATCH 01/45] Updated README.md to include release info --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 8e182c7ed5..c632e9b29f 100644 --- a/README.md +++ b/README.md @@ -95,3 +95,8 @@ this repository. The documents in that folder are not well maintained and may be outdated. We are currently working on an own `qlever-docs` repository that will provide extensive documentation and tutorials. However, for the RDF/SPARQL specialist, the self-documenting `qlever` CLI should be sufficient. + +# Release + +Testing manual Github release for QLever on this fork. The version number used +is v0.5.35 to match version number of Qlever-control. From 56541b04f733c4036ddfb350f104374c90d32f0e Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Thu, 11 Dec 2025 15:45:59 +0100 Subject: [PATCH 02/45] Update CMakeLists.txt for debian --- CMakeLists.txt | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 4728fe867d..316f3604c7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -153,6 +153,7 @@ FetchContent_Declare( spatialjoin GIT_REPOSITORY https://github.com/ad-freiburg/spatialjoin GIT_TAG 26fcfeb831bae912cfe2c4d8e14f5f7ce8a2fe30 + EXCLUDE_FROM_ALL ) # disable bzip2 and zlib support in spatialjoin, we don't need it add_compile_definitions(SPATIALJOIN_NO_BZIP2=True SPATIALJOIN_NO_ZLIB=True) @@ -485,3 +486,32 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) + +install(TARGETS + ServerMain + IndexBuilderMain + RUNTIME DESTINATION bin +) + +############################################################### +# CPack packaging +############################################################### + +set(CPACK_PACKAGE_NAME "qlever") +set(CPACK_PACKAGE_VERSION "0.5.35") +set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") + +set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_BINARY_DIR}/packages") +set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) + +set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) + +# 3. Handle FetchContent (Important!) +# Prevent CPack from packaging files installed by your dependencies (like GoogleTest or Abseil) +# unless you explicitly want them. +set(CPACK_VERBATIM_VARIABLES YES) + +set(CPACK_PACKAGE_DESCRIPTION "The QLever SPARQL engine") +set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever High-performance SPARQL Engine") + +include(CPack) \ No newline at end of file From fe0c95de760ca20ff5b3f00926efc8de6c674b67 Mon Sep 17 00:00:00 2001 From: Hannah Bast Date: Thu, 11 Dec 2025 15:54:23 +0100 Subject: [PATCH 03/45] Add space --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 316f3604c7..7b87047b42 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -490,7 +490,7 @@ qlever_target_link_libraries(PrintIndexVersionMain util) install(TARGETS ServerMain IndexBuilderMain - RUNTIME DESTINATION bin + RUNTIME DESTINATION bin ) ############################################################### @@ -514,4 +514,4 @@ set(CPACK_VERBATIM_VARIABLES YES) set(CPACK_PACKAGE_DESCRIPTION "The QLever SPARQL engine") set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever High-performance SPARQL Engine") -include(CPack) \ No newline at end of file +include(CPack) From 64a5aea8551c700af95203bdd97fd2b01155cd02 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Sun, 21 Dec 2025 14:57:08 +0100 Subject: [PATCH 04/45] First attempt at adding version number to IndexBuilderMain and ServerMain binaries --- CMakeLists.txt | 12 +++++++++++- ProjectVersion.cmake | 32 ++++++++++++++++++++++++++++++++ src/ServerMain.cpp | 6 ++++++ src/index/IndexBuilderMain.cpp | 6 ++++++ 4 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 ProjectVersion.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 1554dad090..bb5b8ccc7f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -417,6 +417,13 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () +# Generate ProjectVersion.h from git describe +add_custom_target(project_version_header ALL + COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.cmake + BYPRODUCTS ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h + COMMENT "Generating ProjectVersion.h" +) + set(LOG_LEVEL_FATAL FATAL) set(LOG_LEVEL_ERROR ERROR) set(LOG_LEVEL_WARN WARN) @@ -487,6 +494,9 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) +add_dependencies(ServerMain project_version_header) +add_dependencies(IndexBuilderMain project_version_header) + install(TARGETS ServerMain IndexBuilderMain @@ -498,7 +508,7 @@ install(TARGETS ############################################################### set(CPACK_PACKAGE_NAME "qlever") -set(CPACK_PACKAGE_VERSION "0.5.35") +# set(CPACK_PACKAGE_VERSION "0.5.35") set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_BINARY_DIR}/packages") diff --git a/ProjectVersion.cmake b/ProjectVersion.cmake new file mode 100644 index 0000000000..d004b5bc42 --- /dev/null +++ b/ProjectVersion.cmake @@ -0,0 +1,32 @@ +# A small CMake script that writes the current version (from git tags) to a +# header file ProjectVersion.h + +# Try to run `git describe` +execute_process( + COMMAND git describe --tags --dirty + OUTPUT_VARIABLE PROJECT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +# Fallback if `git describe` failed or returned empty +if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) + set(PROJECT_VERSION "0.0.0-unknown") +endif() + +message(STATUS "QLever PROJECT_VERSION is ${PROJECT_VERSION}") + +set(CPACK_PACKAGE_VERSION "${PROJECT_VERSION}") + +# Escape the version string into a quoted literal +set(QLEVER_PROJECTVERSION "\"${PROJECT_VERSION}\"") + +# Create a header file with include guards and a define +set(CONTENTS "#ifndef QLEVER_SRC_PROJECTVERSION_H +#define QLEVER_SRC_PROJECTVERSION_H + +#define QLEVER_SRC_PROJECTVERSION ${QLEVER_PROJECTVERSION} + +#endif // QLEVER_SRC_PROJECTVERSION_H +") + +file(WRITE ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h "${CONTENTS}") diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index 2b63d5330b..fce5d8f683 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -12,6 +12,7 @@ #include #include "CompilationInfo.h" +#include "ProjectVersion.h" #include "engine/Server.h" #include "global/Constants.h" #include "global/RuntimeParameters.h" @@ -64,6 +65,7 @@ int main(int argc, char** argv) { options.add_options()(AD_FWD(args)...); }; add("help,h", "Produce this help message."); + add("version,v", "Print version information."); // TODO Can we output the "required" automatically? add("index-basename,i", po::value(&indexBasename)->required(), "The basename of the index files (required)."); @@ -178,6 +180,10 @@ int main(int argc, char** argv) { std::cout << options << '\n'; return EXIT_SUCCESS; } + if (optionsMap.count("version")) { + std::cout << QLEVER_SRC_PROJECTVERSION << '\n'; + return EXIT_SUCCESS; + } po::notify(optionsMap); } catch (const std::exception& e) { std::cerr << "Error in command-line argument: " << e.what() << '\n'; diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index bb9fee967c..86cb6f529b 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -15,6 +15,7 @@ #include #include "CompilationInfo.h" +#include "ProjectVersion.h" #include "global/Constants.h" #include "index/ConstantsIndexBuilding.h" #include "libqlever/Qlever.h" @@ -166,6 +167,7 @@ int main(int argc, char** argv) { boostOptions.add_options()(AD_FWD(args)...); }; add("help,h", "Produce this help message."); + add("version,v", "Print version information."); add("index-basename,i", po::value(&config.baseName_)->required(), "The basename of the output files (required)."); add("kg-input-file,f", po::value(&inputFile), @@ -260,6 +262,10 @@ int main(int argc, char** argv) { std::cout << boostOptions << '\n'; return EXIT_SUCCESS; } + if (optionsMap.count("version")) { + std::cout << QLEVER_SRC_PROJECTVERSION << '\n'; + return EXIT_SUCCESS; + } po::notify(optionsMap); } catch (const std::exception& e) { std::cerr << "Error in command-line argument: " << e.what() << '\n'; From b48fbdcdeafeb7b5cb7f2900ea7c6e72d87bbcdf Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Sun, 21 Dec 2025 21:30:42 +0100 Subject: [PATCH 05/45] Add ProjectVersion.h.in and move git describe logic to CMakeLists.txt for Cpack version --- CMakeLists.txt | 37 ++++++++++++++++++++++++++++--------- src/ProjectVersion.h.in | 6 ++++++ 2 files changed, 34 insertions(+), 9 deletions(-) create mode 100644 src/ProjectVersion.h.in diff --git a/CMakeLists.txt b/CMakeLists.txt index bb5b8ccc7f..e5df93d23e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -417,11 +417,27 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# Generate ProjectVersion.h from git describe -add_custom_target(project_version_header ALL - COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.cmake - BYPRODUCTS ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h - COMMENT "Generating ProjectVersion.h" +# # Generate ProjectVersion.h from git describe +# add_custom_target(project_version_header ALL +# COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.cmake +# # BYPRODUCTS ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h +# COMMENT "Generating ProjectVersion.h" +# ) + +execute_process( + COMMAND git describe --tags --dirty + OUTPUT_VARIABLE PROJECT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +if(PROJECT_VERSION STREQUAL "") + set(PROJECT_VERSION "0.0.0-unknown") +endif() + +configure_file( + ${CMAKE_SOURCE_DIR}/src/ProjectVersion.h.in + ${CMAKE_CURRENT_BINARY_DIR}/ProjectVersion.h + @ONLY ) set(LOG_LEVEL_FATAL FATAL) @@ -494,8 +510,11 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) -add_dependencies(ServerMain project_version_header) -add_dependencies(IndexBuilderMain project_version_header) +target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) +target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) + +# add_dependencies(ServerMain project_version_header) +# add_dependencies(IndexBuilderMain project_version_header) install(TARGETS ServerMain @@ -508,10 +527,10 @@ install(TARGETS ############################################################### set(CPACK_PACKAGE_NAME "qlever") -# set(CPACK_PACKAGE_VERSION "0.5.35") +set(CPACK_PACKAGE_VERSION "${PROJECT_VERSION}") set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") -set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_BINARY_DIR}/packages") +set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/packages") set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) diff --git a/src/ProjectVersion.h.in b/src/ProjectVersion.h.in new file mode 100644 index 0000000000..a42136bb7f --- /dev/null +++ b/src/ProjectVersion.h.in @@ -0,0 +1,6 @@ +#ifndef QLEVER_SRC_PROJECTVERSION_H +#define QLEVER_SRC_PROJECTVERSION_H + +#define QLEVER_SRC_PROJECTVERSION "@PROJECT_VERSION@" + +#endif \ No newline at end of file From 644cc9c09d7a3f4191835ef6cbd823fe5c1190b6 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Sun, 21 Dec 2025 22:22:09 +0100 Subject: [PATCH 06/45] Change CPACK_PROJECT_VERSION to use Debian packaging naming scheme --- CMakeLists.txt | 32 ++++++++++++++++++++++++++++++-- src/ServerMain.cpp | 2 +- src/index/IndexBuilderMain.cpp | 2 +- 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e5df93d23e..c20ca2ed9f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -425,7 +425,7 @@ endif () # ) execute_process( - COMMAND git describe --tags --dirty + COMMAND git describe --tags --always OUTPUT_VARIABLE PROJECT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE ) @@ -527,7 +527,35 @@ install(TARGETS ############################################################### set(CPACK_PACKAGE_NAME "qlever") -set(CPACK_PACKAGE_VERSION "${PROJECT_VERSION}") + +# Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules +# If git describe output begins with v + semver: parse it. +if (PROJECT_VERSION MATCHES "^v([0-9]+\\.[0-9]+\\.[0-9]+)(-[0-9]+)?(-g[0-9a-f]+)?$") + # Extract `major.minor.patch` + string(REGEX REPLACE "^v([0-9]+\\.[0-9]+\\.[0-9]+).*$" "\\1" TAG_VERSION "${PROJECT_VERSION}") + + # Extract commit ahead count if any + string(REGEX MATCH "^v[0-9.]+-([0-9]+)" _ "${PROJECT_VERSION}") + + if (CMAKE_MATCH_1) + # tag + commits: revision exists + set(CPACK_PACKAGE_VERSION "${TAG_VERSION}-${CMAKE_MATCH_1}") + else() + # exact tag only + set(CPACK_PACKAGE_VERSION "${TAG_VERSION}") + endif() + +else() + # All other tags OR no tag: 0.0.0- + if (PROJECT_VERSION MATCHES "-g([0-9a-f]+)$") + set(HASH "${CMAKE_MATCH_1}") + else() + set(HASH "${PROJECT_VERSION}") + endif() + + set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") +endif() + set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/packages") diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index fce5d8f683..d572f1eae9 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -181,7 +181,7 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << QLEVER_SRC_PROJECTVERSION << '\n'; + std::cout << "QLever ServerMain " << QLEVER_SRC_PROJECTVERSION << '\n'; return EXIT_SUCCESS; } po::notify(optionsMap); diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index 86cb6f529b..d75b641cae 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -263,7 +263,7 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << QLEVER_SRC_PROJECTVERSION << '\n'; + std::cout << "QLever IndexBuilderMain " << QLEVER_SRC_PROJECTVERSION << '\n'; return EXIT_SUCCESS; } po::notify(optionsMap); From be4b568e159bbd7eef6b8570165952662ea65934 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Mon, 22 Dec 2025 11:36:49 +0100 Subject: [PATCH 07/45] Remove old code and improve debian cpack version number logic --- CMakeLists.txt | 61 +++++++++++++++++--------------------------- ProjectVersion.cmake | 32 ----------------------- 2 files changed, 24 insertions(+), 69 deletions(-) delete mode 100644 ProjectVersion.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index c20ca2ed9f..0fcd08e7ec 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -417,13 +417,7 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# # Generate ProjectVersion.h from git describe -# add_custom_target(project_version_header ALL -# COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.cmake -# # BYPRODUCTS ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h -# COMMENT "Generating ProjectVersion.h" -# ) - +# Use git describe to get the github tag-based project version execute_process( COMMAND git describe --tags --always OUTPUT_VARIABLE PROJECT_VERSION @@ -513,9 +507,6 @@ qlever_target_link_libraries(PrintIndexVersionMain util) target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -# add_dependencies(ServerMain project_version_header) -# add_dependencies(IndexBuilderMain project_version_header) - install(TARGETS ServerMain IndexBuilderMain @@ -529,31 +520,28 @@ install(TARGETS set(CPACK_PACKAGE_NAME "qlever") # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules -# If git describe output begins with v + semver: parse it. +# If git describe output begins with v + semver: if (PROJECT_VERSION MATCHES "^v([0-9]+\\.[0-9]+\\.[0-9]+)(-[0-9]+)?(-g[0-9a-f]+)?$") - # Extract `major.minor.patch` - string(REGEX REPLACE "^v([0-9]+\\.[0-9]+\\.[0-9]+).*$" "\\1" TAG_VERSION "${PROJECT_VERSION}") - - # Extract commit ahead count if any - string(REGEX MATCH "^v[0-9.]+-([0-9]+)" _ "${PROJECT_VERSION}") - - if (CMAKE_MATCH_1) - # tag + commits: revision exists - set(CPACK_PACKAGE_VERSION "${TAG_VERSION}-${CMAKE_MATCH_1}") - else() - # exact tag only - set(CPACK_PACKAGE_VERSION "${TAG_VERSION}") - endif() + + # CMAKE_MATCH_1 is "major.minor.patch" + set(TAG_VERSION "${CMAKE_MATCH_1}") + + # CMAKE_MATCH_2 is "-commits_ahead" (or empty) + if (CMAKE_MATCH_2) + string(SUBSTRING "${CMAKE_MATCH_2}" 1 -1 COMMITS_AHEAD) + set(CPACK_PACKAGE_VERSION "${TAG_VERSION}-${COMMITS_AHEAD}") + else() + set(CPACK_PACKAGE_VERSION "${TAG_VERSION}") + endif() else() - # All other tags OR no tag: 0.0.0- - if (PROJECT_VERSION MATCHES "-g([0-9a-f]+)$") - set(HASH "${CMAKE_MATCH_1}") - else() - set(HASH "${PROJECT_VERSION}") - endif() - - set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") + # Fallback for non-tagged builds: Use commit hash + if (PROJECT_VERSION MATCHES "-g([0-9a-f]+)$") + set(HASH "${CMAKE_MATCH_1}") + else() + set(HASH "${PROJECT_VERSION}") + endif() + set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") endif() set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") @@ -563,12 +551,11 @@ set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) -# 3. Handle FetchContent (Important!) -# Prevent CPack from packaging files installed by your dependencies (like GoogleTest or Abseil) -# unless you explicitly want them. +# Handle FetchContent (Important!) +# Prevent CPack from packaging files installed by dependencies (like GoogleTest or Abseil) set(CPACK_VERBATIM_VARIABLES YES) -set(CPACK_PACKAGE_DESCRIPTION "The QLever SPARQL engine") -set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever High-performance SPARQL Engine") +set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") +set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever Graph Database implementing the RDF and SPARQL standards.") include(CPack) diff --git a/ProjectVersion.cmake b/ProjectVersion.cmake deleted file mode 100644 index d004b5bc42..0000000000 --- a/ProjectVersion.cmake +++ /dev/null @@ -1,32 +0,0 @@ -# A small CMake script that writes the current version (from git tags) to a -# header file ProjectVersion.h - -# Try to run `git describe` -execute_process( - COMMAND git describe --tags --dirty - OUTPUT_VARIABLE PROJECT_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE -) - -# Fallback if `git describe` failed or returned empty -if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) - set(PROJECT_VERSION "0.0.0-unknown") -endif() - -message(STATUS "QLever PROJECT_VERSION is ${PROJECT_VERSION}") - -set(CPACK_PACKAGE_VERSION "${PROJECT_VERSION}") - -# Escape the version string into a quoted literal -set(QLEVER_PROJECTVERSION "\"${PROJECT_VERSION}\"") - -# Create a header file with include guards and a define -set(CONTENTS "#ifndef QLEVER_SRC_PROJECTVERSION_H -#define QLEVER_SRC_PROJECTVERSION_H - -#define QLEVER_SRC_PROJECTVERSION ${QLEVER_PROJECTVERSION} - -#endif // QLEVER_SRC_PROJECTVERSION_H -") - -file(WRITE ${CMAKE_CURRENT_SOURCE_DIR}/ProjectVersion.h "${CONTENTS}") From bd45e841bd552207ef05fc89d63f49262ead2d92 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Mon, 22 Dec 2025 15:33:19 +0100 Subject: [PATCH 08/45] Remove README changes --- README.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/README.md b/README.md index c632e9b29f..8e182c7ed5 100644 --- a/README.md +++ b/README.md @@ -95,8 +95,3 @@ this repository. The documents in that folder are not well maintained and may be outdated. We are currently working on an own `qlever-docs` repository that will provide extensive documentation and tutorials. However, for the RDF/SPARQL specialist, the self-documenting `qlever` CLI should be sufficient. - -# Release - -Testing manual Github release for QLever on this fork. The version number used -is v0.5.35 to match version number of Qlever-control. From 2c1dc65d96e0ba631b5f2e2461eefe75d275c8e3 Mon Sep 17 00:00:00 2001 From: Hannah Bast Date: Mon, 22 Dec 2025 16:46:48 +0100 Subject: [PATCH 09/45] Make clang-format happy --- src/ServerMain.cpp | 4 ++-- src/index/IndexBuilderMain.cpp | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index d572f1eae9..6b2d22d8a6 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -181,8 +181,8 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever ServerMain " << QLEVER_SRC_PROJECTVERSION << '\n'; - return EXIT_SUCCESS; + std::cout << "QLever ServerMain " << QLEVER_SRC_PROJECTVERSION << '\n'; + return EXIT_SUCCESS; } po::notify(optionsMap); } catch (const std::exception& e) { diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index d75b641cae..55b0e0d75f 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -263,7 +263,8 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever IndexBuilderMain " << QLEVER_SRC_PROJECTVERSION << '\n'; + std::cout << "QLever IndexBuilderMain " << QLEVER_SRC_PROJECTVERSION + << '\n'; return EXIT_SUCCESS; } po::notify(optionsMap); From b58f63bb0265f63c2e024a3fead6df1a65df62a4 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 6 Jan 2026 18:18:32 +0100 Subject: [PATCH 10/45] Install only necessary components to avoid EXCLUDE_FROM_ALL hack --- CMakeLists.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 0fcd08e7ec..d30d81cd60 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -153,7 +153,6 @@ FetchContent_Declare( spatialjoin GIT_REPOSITORY https://github.com/ad-freiburg/spatialjoin GIT_TAG c358e479ebb5f40df99522e69a0b52d73416020b - EXCLUDE_FROM_ALL ) # disable bzip2 and zlib support in spatialjoin, we don't need it add_compile_definitions(SPATIALJOIN_NO_BZIP2=True SPATIALJOIN_NO_ZLIB=True) @@ -417,7 +416,7 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# Use git describe to get the github tag-based project version +# For the most recent tag return `{tag}-{how many commits back the tag is}-{hash of last commit}` execute_process( COMMAND git describe --tags --always OUTPUT_VARIABLE PROJECT_VERSION @@ -511,6 +510,7 @@ install(TARGETS ServerMain IndexBuilderMain RUNTIME DESTINATION bin + COMPONENT QLeverPackaging ) ############################################################### @@ -518,6 +518,7 @@ install(TARGETS ############################################################### set(CPACK_PACKAGE_NAME "qlever") +set(CPACK_COMPONENTS_ALL QLeverPackaging) # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules # If git describe output begins with v + semver: @@ -545,14 +546,13 @@ else() endif() set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") +set(CPACK_PACKAGE_HOMEPAGE_URL "https://github.com/ad-freiburg/qlever") set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/packages") set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) -# Handle FetchContent (Important!) -# Prevent CPack from packaging files installed by dependencies (like GoogleTest or Abseil) set(CPACK_VERBATIM_VARIABLES YES) set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") From 56bfdd30aaf7cb6b22303d3847c0aff9a4168e92 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 6 Jan 2026 22:35:28 +0100 Subject: [PATCH 11/45] Use EXCLUDE_FROM_ALL in FetchContent_Declare for all external libraries to exclude their install rules from being called when running cpack --- CMakeLists.txt | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d30d81cd60..416c856a9b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -108,6 +108,7 @@ FetchContent_Declare( googletest GIT_REPOSITORY https://github.com/google/googletest.git GIT_TAG 7917641ff965959afae189afb5f052524395525c # main branch on 2025/09/11 + EXCLUDE_FROM_ALL ) ################################ @@ -119,6 +120,7 @@ FetchContent_Declare( nlohmann-json URL https://github.com/nlohmann/json/releases/download/v3.12.0/json.tar.xz URL_HASH SHA3_224=0ad805c53f1769489a9c6bdeefd0a36c19f74239785cdb9c4344e8c1 + EXCLUDE_FROM_ALL ) ############################### @@ -131,6 +133,7 @@ FetchContent_Declare( GIT_REPOSITORY https://github.com/antlr/antlr4.git GIT_TAG cc82115a4e7f53d71d9d905caa2c2dfa4da58899 # 4.13.12 SOURCE_SUBDIR runtime/Cpp + EXCLUDE_FROM_ALL ) ################################# @@ -144,6 +147,7 @@ FetchContent_Declare( # This branch removes some differences in the interface between `range-v3` and `std::ranges` s.t. # the former can be used as an (almost) drop-in replacement for the latter. GIT_TAG 42340ef354f7b4e4660268b788e37008d9cc85aa # branch fork-for-qlever + EXCLUDE_FROM_ALL ) ################################# @@ -153,6 +157,7 @@ FetchContent_Declare( spatialjoin GIT_REPOSITORY https://github.com/ad-freiburg/spatialjoin GIT_TAG c358e479ebb5f40df99522e69a0b52d73416020b + EXCLUDE_FROM_ALL ) # disable bzip2 and zlib support in spatialjoin, we don't need it add_compile_definitions(SPATIALJOIN_NO_BZIP2=True SPATIALJOIN_NO_ZLIB=True) @@ -280,6 +285,7 @@ FetchContent_Declare( ctre GIT_REPOSITORY https://github.com/hanickadot/compile-time-regular-expressions.git GIT_TAG e34c26ba149b9fd9c34aa0f678e39739641a0d1e # v3.10.0 + EXCLUDE_FROM_ALL ) ################################ @@ -292,6 +298,7 @@ FetchContent_Declare( abseil GIT_REPOSITORY https://github.com/abseil/abseil-cpp.git GIT_TAG 93ac3a4f9ee7792af399cebd873ee99ce15aed08 # 2024-05-16 + EXCLUDE_FROM_ALL ) ################################ @@ -303,6 +310,7 @@ FetchContent_Declare( GIT_REPOSITORY https://github.com/google/s2geometry.git GIT_TAG 5b5eccd54a08ae03b4467e79ffbb076d0b5f221e #version 0.11.1 SYSTEM + EXCLUDE_FROM_ALL ) @@ -351,6 +359,7 @@ FetchContent_Declare( fsst GIT_REPOSITORY https://github.com/cwida/fsst.git GIT_TAG b228af6356196095eaf9f8f5654b0635f969661e # main branch from 27th May 2025 + EXCLUDE_FROM_ALL ) @@ -364,6 +373,7 @@ FetchContent_Declare( GIT_TAG bc0faab533e2b27b85b8ad312abf061e33ed6b5d # v.2023-11-01 GIT_SHALLOW TRUE OVERRIDE_FIND_PACKAGE + EXCLUDE_FROM_ALL ) ################################ @@ -416,7 +426,7 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# For the most recent tag return `{tag}-{how many commits back the tag is}-{hash of last commit}` +# For the latest commit return `{most recent tag}-{how many commits back the tag is}-{hash of latest commit}` execute_process( COMMAND git describe --tags --always OUTPUT_VARIABLE PROJECT_VERSION @@ -503,14 +513,17 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) +# For IndexBuilderMain and ServerMain to find generated ProjectVersion.h target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) +# set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-index-builder") +# set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") + install(TARGETS ServerMain IndexBuilderMain RUNTIME DESTINATION bin - COMPONENT QLeverPackaging ) ############################################################### @@ -518,7 +531,6 @@ install(TARGETS ############################################################### set(CPACK_PACKAGE_NAME "qlever") -set(CPACK_COMPONENTS_ALL QLeverPackaging) # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules # If git describe output begins with v + semver: From 0d7e3549ff951194a504dc5c41b1b39be7d47d42 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 7 Jan 2026 11:24:51 +0100 Subject: [PATCH 12/45] Try AppleClang support --- CMakeLists.txt | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 416c856a9b..eaca935fd1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,6 +4,13 @@ if (POLICY CMP0167) endif () project(QLever C CXX) +if (APPLE) + # Add Homebrew paths for M-series + list(APPEND CMAKE_PREFIX_PATH "/opt/homebrew") + link_directories(/opt/homebrew/lib) + include_directories(/opt/homebrew/include) +endif () + # C/C++ Versions set(CMAKE_C_STANDARD 11) set(CMAKE_C_STANDARD_REQUIRED ON) @@ -51,7 +58,7 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") add_compile_options(-fcoroutines) endif () -elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") +elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" ) if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS "16.0.0" AND NOT COMPILER_VERSION_CHECK_DEACTIVATED) MESSAGE(FATAL_ERROR "Clang++ versions older than 16.0 are not supported by QLever") endif () @@ -570,4 +577,11 @@ set(CPACK_VERBATIM_VARIABLES YES) set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever Graph Database implementing the RDF and SPARQL standards.") +if(APPLE) + set(CPACK_GENERATOR "DragNDrop") + set(CPACK_DMG_VOLUME_NAME "QLever") + set(CPACK_PACKAGE_FILE_NAME + "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-macos") +endif() + include(CPack) From 656ea615bd05510702f6601d593707e1d5932f8f Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 7 Jan 2026 14:16:07 +0100 Subject: [PATCH 13/45] More apple-clang changes and O2 optimization --- CMakeLists.txt | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index eaca935fd1..02639678d2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -87,7 +87,7 @@ set(CMAKE_CXX_FLAGS_ASAN if ("${CMAKE_GENERATOR}" STREQUAL "Ninja") if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") add_compile_options(-fdiagnostics-color=always) - elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang") add_compile_options(-fcolor-diagnostics) endif () endif () @@ -278,9 +278,11 @@ if (${ALLOW_SHUTDOWN}) message(STATUS "Adding -DALLOW_SHUTDOWN") endif () - -set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3") - +if (APPLE) + set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O2") +else () + set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3") +endif () set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG}") set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG}") From 4fa172f1d79fa6d92ff1ccfbb6deecbc7a6503df Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Fri, 9 Jan 2026 16:12:19 +0100 Subject: [PATCH 14/45] Fix some compiler issues to make the build work on AppleClang --- src/engine/GroupByImpl.cpp | 20 +++++++++++++------ src/rdfTypes/GeometryInfoHelpersImpl.h | 6 +++++- src/util/JoinAlgorithms/IndexNestedLoopJoin.h | 11 ++++++++-- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/src/engine/GroupByImpl.cpp b/src/engine/GroupByImpl.cpp index bbe9a46c9b..ff1f118226 100644 --- a/src/engine/GroupByImpl.cpp +++ b/src/engine/GroupByImpl.cpp @@ -123,13 +123,21 @@ class LazyGroupByRange currentGroupBlock_); groupSplitAcrossTables_ = false; } else { - // This processes the whole block in batches if possible - IdTableStatic table = - std::move(resultTable_).toStatic(); - parent_->processBlock(table, aggregates_, evaluationContext, - blockStart, blockEnd, - ¤tLocalVocab_, groupByCols_); + // This processes the whole block in batches if possible. + // Note: Use `template` keyword for dependent template name. + IdTableStatic table{ + std::move(resultTable_).template toStatic()}; + parent_->template processBlock( + table, aggregates_, evaluationContext, blockStart, blockEnd, + ¤tLocalVocab_, groupByCols_); resultTable_ = std::move(table).toDynamic(); + // // This processes the whole block in batches if possible + // IdTableStatic table = + // std::move(resultTable_).toStatic(); + // parent_->processBlock(table, aggregates_, evaluationContext, + // blockStart, blockEnd, + // ¤tLocalVocab_, groupByCols_); + // resultTable_ = std::move(table).toDynamic(); } } diff --git a/src/rdfTypes/GeometryInfoHelpersImpl.h b/src/rdfTypes/GeometryInfoHelpersImpl.h index df07dfaf87..b370b61e53 100644 --- a/src/rdfTypes/GeometryInfoHelpersImpl.h +++ b/src/rdfTypes/GeometryInfoHelpersImpl.h @@ -26,6 +26,7 @@ #include "global/Constants.h" #include "rdfTypes/GeoPoint.h" +#include "util/Views.h" #include "rdfTypes/GeometryInfo.h" #include "rdfTypes/Literal.h" #include "util/Exception.h" @@ -341,7 +342,10 @@ struct MetricLengthVisitor { static_assert(ad_utility::similarToInstantiation); return ::ranges::accumulate( - ::ranges::transform_view(multiGeom, MetricLengthVisitor{}), 0); + // ::ranges::transform_view(multiGeom, MetricLengthVisitor{}), 0); + ::ranges::transform_view(ad_utility::allView(multiGeom), + MetricLengthVisitor{}), + 0); } // Compute the length for the custom container type `AnyGeometry` from diff --git a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h index 3e5b68155d..bf2ceacb86 100644 --- a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h +++ b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h @@ -238,8 +238,15 @@ class IndexNestedLoopJoin { for (const auto& rightRow : rightTable) { size_t leftOffset = 0; size_t leftSize = leftTable.size(); - for (const auto& [rightId, leftCol] : - ::ranges::zip_view(rightRow, leftColumns)) { + // for (const auto& [rightId, leftCol] : + // ::ranges::zip_view(rightRow, leftColumns)) { + // Use index-based iteration instead of ranges::zip_view to avoid + // copying RowReference (which has a deleted copy constructor) on + // AppleClang. + AD_CORRECTNESS_CHECK(rightRow.size() == leftColumns.size()); + for (size_t colIdx = 0; colIdx < rightRow.size(); ++colIdx) { + const auto& rightId = rightRow[colIdx]; + const auto& leftCol = leftColumns[colIdx]; AD_EXPENSIVE_CHECK(!rightId.isUndefined()); auto currentStart = leftCol.begin() + leftOffset; auto subrange = ql::ranges::equal_range( From e60648f53b1590430e8493100003c8b1863b1c97 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Mon, 12 Jan 2026 19:13:19 +0100 Subject: [PATCH 15/45] Use std::promise instead of std::packaged_task to work around AppleClang/LLVM Clang compiler crash with packaged_task + Boost.Asio --- CMakeLists.txt | 6 +----- test/util/AsyncTestHelpers.h | 33 ++++++++++++++++++++++++--------- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 02639678d2..2a99a30bd3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -278,11 +278,7 @@ if (${ALLOW_SHUTDOWN}) message(STATUS "Adding -DALLOW_SHUTDOWN") endif () -if (APPLE) - set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O2") -else () - set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3") -endif () +set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG}") set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG}") diff --git a/test/util/AsyncTestHelpers.h b/test/util/AsyncTestHelpers.h index b94daef0e9..8239ca0f4f 100644 --- a/test/util/AsyncTestHelpers.h +++ b/test/util/AsyncTestHelpers.h @@ -28,15 +28,30 @@ concept TestableFunction = std::is_invocable_r_v; template requires(TestableCoroutine || TestableFunction) void runAsyncTest(Func innerRun, size_t numThreads) { - auto ioContext = std::make_shared(); - auto future = [&]() { - if constexpr (TestableCoroutine) { - return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); - } else { - return net::post(*ioContext, std::packaged_task{ - [&] { innerRun(*ioContext); }}); - } - }(); + auto ioContext = std::make_shared(); + + auto future = [&]() { + if constexpr (TestableCoroutine) { + return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); + } else { + // Use std::promise instead of std::packaged_task to work around + // AppleClang/LLVM Clang compiler crash with packaged_task + Boost.Asio + auto promise = std::make_shared>(); + auto fut = promise->get_future(); + net::post(*ioContext, [&innerRun, ioContext, promise]() { + try { + innerRun(*ioContext); + promise->set_value(); + } catch (...) { + promise->set_exception(std::current_exception()); + } + }); + return fut; + // Original code that causes AppleClang segfault: + // return net::post(*ioContext, std::packaged_task{ + // [&] { innerRun(*ioContext); }}); + } + }(); std::vector workers{}; From 751abb08c4b1745ecbc9bbc250613b762dd3fb7b Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 13 Jan 2026 00:32:04 +0100 Subject: [PATCH 16/45] Update qlever binary names and remove old apple .dmg cpack block --- CMakeLists.txt | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2a99a30bd3..c79fff7a12 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -522,8 +522,8 @@ qlever_target_link_libraries(PrintIndexVersionMain util) target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -# set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-index-builder") -# set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") +set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-loader") +set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") install(TARGETS ServerMain @@ -575,11 +575,4 @@ set(CPACK_VERBATIM_VARIABLES YES) set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever Graph Database implementing the RDF and SPARQL standards.") -if(APPLE) - set(CPACK_GENERATOR "DragNDrop") - set(CPACK_DMG_VOLUME_NAME "QLever") - set(CPACK_PACKAGE_FILE_NAME - "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-macos") -endif() - include(CPack) From 10d37d21083e78166db9a5389ecb45b7531b7be2 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 13 Jan 2026 23:38:11 +0100 Subject: [PATCH 17/45] Add dynamic homebrew prefix detection and fix formatting --- CMakeLists.txt | 22 +++++++++--- src/rdfTypes/GeometryInfoHelpersImpl.h | 2 +- test/util/AsyncTestHelpers.h | 46 +++++++++++++------------- 3 files changed, 41 insertions(+), 29 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c79fff7a12..c8cd4efb48 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,10 +5,22 @@ endif () project(QLever C CXX) if (APPLE) - # Add Homebrew paths for M-series - list(APPEND CMAKE_PREFIX_PATH "/opt/homebrew") - link_directories(/opt/homebrew/lib) - include_directories(/opt/homebrew/include) + # Dynamically detect Homebrew prefix + execute_process( + COMMAND brew --prefix + OUTPUT_VARIABLE HOMEBREW_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE BREW_RESULT + ERROR_QUIET + ) + if (BREW_RESULT EQUAL 0 AND HOMEBREW_PREFIX) + message(STATUS "Found Homebrew at: ${HOMEBREW_PREFIX}") + list(APPEND CMAKE_PREFIX_PATH "${HOMEBREW_PREFIX}") + link_directories(${HOMEBREW_PREFIX}/lib) + include_directories(${HOMEBREW_PREFIX}/include) + else() + message(WARNING "Homebrew not found. You may need to manually set CMAKE_PREFIX_PATH to locate dependencies (Boost, ICU, OpenSSL, etc.)") + endif() endif () # C/C++ Versions @@ -69,7 +81,7 @@ elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL set(RANGE_V3_REQUIRED_BY_COMPILER ON) endif () else () - MESSAGE(FATAL_ERROR "QLever currently only supports the G++ or LLVM-Clang++ compilers. Found ${CMAKE_CXX_COMPILER_ID}") + MESSAGE(FATAL_ERROR "QLever currently only supports the G++, LLVM-Clang++ or AppleClang compilers. Found ${CMAKE_CXX_COMPILER_ID}") endif () ## Build targets for address sanitizer diff --git a/src/rdfTypes/GeometryInfoHelpersImpl.h b/src/rdfTypes/GeometryInfoHelpersImpl.h index cbf6c4af35..09a24b47c3 100644 --- a/src/rdfTypes/GeometryInfoHelpersImpl.h +++ b/src/rdfTypes/GeometryInfoHelpersImpl.h @@ -26,13 +26,13 @@ #include "global/Constants.h" #include "rdfTypes/GeoPoint.h" -#include "util/Views.h" #include "rdfTypes/GeometryInfo.h" #include "rdfTypes/Literal.h" #include "util/Exception.h" #include "util/GeoConverters.h" #include "util/Log.h" #include "util/TypeTraits.h" +#include "util/Views.h" // This file contains functions used for parsing and processing WKT geometries // using `pb_util`. To avoid unnecessarily compiling expensive modules, this diff --git a/test/util/AsyncTestHelpers.h b/test/util/AsyncTestHelpers.h index 8239ca0f4f..b816678081 100644 --- a/test/util/AsyncTestHelpers.h +++ b/test/util/AsyncTestHelpers.h @@ -28,30 +28,30 @@ concept TestableFunction = std::is_invocable_r_v; template requires(TestableCoroutine || TestableFunction) void runAsyncTest(Func innerRun, size_t numThreads) { - auto ioContext = std::make_shared(); - - auto future = [&]() { - if constexpr (TestableCoroutine) { - return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); - } else { - // Use std::promise instead of std::packaged_task to work around - // AppleClang/LLVM Clang compiler crash with packaged_task + Boost.Asio - auto promise = std::make_shared>(); - auto fut = promise->get_future(); - net::post(*ioContext, [&innerRun, ioContext, promise]() { - try { - innerRun(*ioContext); - promise->set_value(); - } catch (...) { - promise->set_exception(std::current_exception()); - } - }); - return fut; - // Original code that causes AppleClang segfault: - // return net::post(*ioContext, std::packaged_task{ - // [&] { innerRun(*ioContext); }}); + auto ioContext = std::make_shared(); + + auto future = [&]() { + if constexpr (TestableCoroutine) { + return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); + } else { + // Use std::promise instead of std::packaged_task to work around + // AppleClang/LLVM Clang compiler crash with packaged_task + Boost.Asio + auto promise = std::make_shared>(); + auto fut = promise->get_future(); + net::post(*ioContext, [&innerRun, ioContext, promise]() { + try { + innerRun(*ioContext); + promise->set_value(); + } catch (...) { + promise->set_exception(std::current_exception()); } - }(); + }); + return fut; + // Original code that causes AppleClang segfault: + // return net::post(*ioContext, std::packaged_task{ + // [&] { innerRun(*ioContext); }}); + } + }(); std::vector workers{}; From 7470c5f5dc462e749eb30cf4707aab19ec5705e6 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 11:33:42 +0100 Subject: [PATCH 18/45] Update e2e.sh with new binary names --- e2e/e2e.sh | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/e2e/e2e.sh b/e2e/e2e.sh index 197e55ac21..3e1a39c2d5 100755 --- a/e2e/e2e.sh +++ b/e2e/e2e.sh @@ -21,13 +21,15 @@ function print_usage { echo "Options:" echo " -i Use index from the given directory (which must be the root directory of the working copy of QLever, not the e2e_data subdirectory)" echo " -d Directory of the QLever binaries (relative to the main directory), default: 'build'" - echo " -t Build the text index with a separate explicit call to `IndexBuilderMain`" + echo " -t Build the text index with a separate explicit call to $INDEX_BUILDER_BINARY" } REBUILD_THE_INDEX="YES" INDEX_DIRECTORY="." #if not set, we will build the index ourselves. BINARY_DIRECTORY="build" BUILD_TEXT_INDEX_SEPARATELY="NO" +INDEX_BUILDER_BINARY="qlever-loader" +SERVER_BINARY="qlever-server" while getopts ":i:d:t" arg; do case ${arg} in @@ -104,7 +106,7 @@ if [ ${REBUILD_THE_INDEX} == "YES" ] || ! [ -f "${INDEX}.index.pso" ]; then if [ ${BUILD_TEXT_INDEX_SEPARATELY} == "NO" ]; then echo "Building index $INDEX" - ./IndexBuilderMain -i "$INDEX" \ + ./"$INDEX_BUILDER_BINARY" -i "$INDEX" \ -F ttl \ -f "$INPUT.nt" \ -s "$PROJECT_DIR/e2e/e2e-build-settings.json" \ @@ -113,13 +115,13 @@ if [ ${REBUILD_THE_INDEX} == "YES" ] || ! [ -f "${INDEX}.index.pso" ]; then -d "$INPUT.docsfile.tsv" || bail "Building Index failed" else echo "Building index $INDEX without text index" - ./IndexBuilderMain -i "$INDEX" \ + ./"$INDEX_BUILDER_BINARY" -i "$INDEX" \ -F ttl \ -f "$INPUT.nt" \ -s "$PROJECT_DIR/e2e/e2e-build-settings.json" \ || bail "Building Index failed" echo "Adding text index" - ./IndexBuilderMain -A -i "$INDEX" \ + ./"$INDEX_BUILDER_BINARY" -A -i "$INDEX" \ -s "$PROJECT_DIR/e2e/e2e-build-settings.json" \ -w "$INPUT.wordsfile.tsv" \ -W \ @@ -133,13 +135,13 @@ fi # here because then we can't easily get the SERVER_PID out of that subshell pushd "$BINARY_DIR" echo "Launching server from path $(pwd)" -./ServerMain -i "$INDEX" -p 9099 -m 1GB -t --default-query-timeout 30s &> server_log.txt & +./"$SERVER_BINARY" -i "$INDEX" -p 9099 -m 1GB -t --default-query-timeout 30s &> server_log.txt & SERVER_PID=$! popd # Setup the kill switch so it gets called whatever way we exit trap cleanup_server EXIT -echo "Waiting for ServerMain to launch and open port" +echo "Waiting for $SERVER_BINARY to launch and open port" i=0 until [ $i -eq 60 ] || curl --max-time 1 --output /dev/null --silent http://localhost:9099/; do sleep 1; @@ -147,10 +149,10 @@ until [ $i -eq 60 ] || curl --max-time 1 --output /dev/null --silent http://loca done if [ $i -ge 60 ]; then - echo "ServerMain could not be reached after waiting for 60 seconds, exiting"; + echo "$SERVER_BINARY could not be reached after waiting for 60 seconds, exiting"; exit 1 fi -echo "ServerMain was successfully started, running queries ..." +echo "$SERVER_BINARY was successfully started, running queries ..." $PYTHON_BINARY "$PROJECT_DIR/e2e/queryit.py" "$PROJECT_DIR/e2e/scientists_queries.yaml" "http://localhost:9099" | tee "$BINARY_DIR/query_log.txt" || bail "Querying Server failed" popd From 8782de22e02dff4c0ec52eecb406c3006dede127 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 12:43:08 +0100 Subject: [PATCH 19/45] Add MacOS AppleClang native github workflow --- .github/workflows/macos-appleclang-native.yml | 83 +++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 .github/workflows/macos-appleclang-native.yml diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml new file mode 100644 index 0000000000..3c6967e543 --- /dev/null +++ b/.github/workflows/macos-appleclang-native.yml @@ -0,0 +1,83 @@ +name: Native build with AppleClang on MacOS + +on: + push: + branches: [master] + pull_request: + branches: [master] + merge_group: + +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + build: + # The CMake configure and build commands are platform-agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + strategy: + fail-fast: false + matrix: + build-type: [Release] + runs-on: macos-15 + steps: + - uses: actions/checkout@v4 + + - name: Install dependencies via Homebrew + run: | + brew update + brew install boost icu4c openssl@3 zstd jemalloc pkg-config + + - name: Install python dependencies for E2E tests + run: | + pip3 install --break-system-packages pyaml --no-binary=:pyicu: pyicu + + - name: Print clang version + run: clang++ --version + + - name: Create build directory + run: mkdir ${{github.workspace}}/build + + - name: Configure CMake + working-directory: ${{ github.workspace }}/build + run: | + cmake -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} \ + -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ + -DLOGLEVEL=INFO \ + -DUSE_PARALLEL=false \ + -D_NO_TIMING_TESTS=ON \ + -DCOMPILER_SUPPORTS_MARCH_NATIVE=FALSE \ + -GNinja \ + .. + + - name: Build + # Build your program with the given configuration + run: | + cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j $(sysctl -n hw.ncpu) + id: build + + - name: Test + id: complete_tests + working-directory: ${{github.workspace}}/build/test + run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . + + - name: Running and printing the benchmark examples. + run: ${{github.workspace}}/build/benchmark/BenchmarkExamples -p + + - name: Test + working-directory: ${{github.workspace}}/build/test + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: | + df -h + source ${{github.workspace}}/build/conanrun.sh + env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . + + - name: Running and printing the benchmark examples. + working-directory: ${{github.workspace}}/build + run: benchmark/BenchmarkExamples -p + + - name: E2E + run: ${{github.workspace}}/e2e/e2e.sh From 12a674f7ea8e28f8cd2d019a43efecbdc340a4b2 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 12:47:35 +0100 Subject: [PATCH 20/45] Add icu and pkg-config to path for macos native workflow --- .github/workflows/macos-appleclang-native.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index 3c6967e543..18f2b62029 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -29,6 +29,8 @@ jobs: run: | brew update brew install boost icu4c openssl@3 zstd jemalloc pkg-config + echo PATH="$(brew --prefix icu4c)/bin:$(brew --prefix icu4c)/sbin:$PATH" >> $GITHUB_ENV + echo PKG_CONFIG_PATH="$PKG_CONFIG_PATH:$(brew --prefix icu4c)/lib/pkgconfig" >> $GITHUB_ENV - name: Install python dependencies for E2E tests run: | From 3b749082a9643064df17fbee36054721bcdb4fcd Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 12:52:20 +0100 Subject: [PATCH 21/45] Add build directory explicitly to cmake build in macos native runner --- .github/workflows/macos-appleclang-native.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index 18f2b62029..032eb43bb6 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -45,7 +45,8 @@ jobs: - name: Configure CMake working-directory: ${{ github.workspace }}/build run: | - cmake -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} \ + cmake -B ${{github.workspace}}/build \ + -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} \ -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ -DLOGLEVEL=INFO \ -DUSE_PARALLEL=false \ From b12639cac7ec4dd7f49db7def63bdf3942a08684 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 13:01:08 +0100 Subject: [PATCH 22/45] Fix configure cmake step in macos native runner --- .github/workflows/macos-appleclang-native.yml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index 032eb43bb6..4da3e01a54 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -44,16 +44,7 @@ jobs: - name: Configure CMake working-directory: ${{ github.workspace }}/build - run: | - cmake -B ${{github.workspace}}/build \ - -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} \ - -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ - -DLOGLEVEL=INFO \ - -DUSE_PARALLEL=false \ - -D_NO_TIMING_TESTS=ON \ - -DCOMPILER_SUPPORTS_MARCH_NATIVE=FALSE \ - -GNinja \ - .. + run: cmake -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 -DLOGLEVEL=INFO -DUSE_PARALLEL=false -D_NO_TIMING_TESTS=ON -DCOMPILER_SUPPORTS_MARCH_NATIVE=FALSE -GNinja .. - name: Build # Build your program with the given configuration From 2d7fa61727adbd6308821f548ae2d2f8d66d831e Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 13:44:13 +0100 Subject: [PATCH 23/45] Delete github workflows temporarily --- .github/workflows/check_index_version.yml | 96 ----------- .github/workflows/code-coverage.yml | 117 ------------- .github/workflows/codespell.yml | 25 --- .github/workflows/cpp-17-libqlever.yml | 84 ---------- .github/workflows/docker-publish.yml | 155 ------------------ .github/workflows/format-check.yml | 32 ---- .github/workflows/macos.yml | 96 ----------- .github/workflows/native-build-conan.yml | 75 --------- .github/workflows/native-build.yml | 151 ----------------- .github/workflows/sonarcloud.yml | 34 ---- .../workflows/sparql-conformance-uploader.yml | 65 -------- .github/workflows/sparql-conformance.yml | 86 ---------- .github/workflows/upload-coverage.yml | 95 ----------- .github/workflows/upload-sonarcloud.yml | 144 ---------------- 14 files changed, 1255 deletions(-) delete mode 100644 .github/workflows/check_index_version.yml delete mode 100644 .github/workflows/code-coverage.yml delete mode 100644 .github/workflows/codespell.yml delete mode 100644 .github/workflows/cpp-17-libqlever.yml delete mode 100644 .github/workflows/docker-publish.yml delete mode 100644 .github/workflows/format-check.yml delete mode 100644 .github/workflows/macos.yml delete mode 100644 .github/workflows/native-build-conan.yml delete mode 100644 .github/workflows/native-build.yml delete mode 100644 .github/workflows/sonarcloud.yml delete mode 100644 .github/workflows/sparql-conformance-uploader.yml delete mode 100644 .github/workflows/sparql-conformance.yml delete mode 100644 .github/workflows/upload-coverage.yml delete mode 100644 .github/workflows/upload-sonarcloud.yml diff --git a/.github/workflows/check_index_version.yml b/.github/workflows/check_index_version.yml deleted file mode 100644 index ecba7a725b..0000000000 --- a/.github/workflows/check_index_version.yml +++ /dev/null @@ -1,96 +0,0 @@ -name: Check index version - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - check-index-version: - strategy: - fail-fast: false - matrix: - compiler: [gcc] - compiler-version: [11] - # We deliberately set `Release` and then `O1`, s.t. we get faster compile times (O1, less optimizations), but - # smaller binaries (no debug info), because otherwise we run out of memory on GitHub actions. - warnings: [ "-Wall -Wextra -O1 " ] - build-type: [Release] - - runs-on: ubuntu-22.04 - - - steps: - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - path: 'pr' - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - path: 'master' - ref: 'master' - - - name: Install dependencies - uses: ./pr/.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./pr/.github/workflows/install-compiler-ubuntu - with: - compiler: ${{matrix.compiler}} - compiler-version: ${{matrix.compiler-version}} - - name: Configure CMake Master - working-directory: ${{github.workspace}}/master - run: cmake -B build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true - - name: Configure CMake PR - working-directory: ${{github.workspace}}/pr - run: cmake -B build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true - - - name: Build master - # Build your program with the given configuration - # Only build the actually required executables to save space on the device. - run: | - cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) - cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) - cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) - - - name: E2E on Master - working-directory: ${{github.workspace}}/master - run: e2e/e2e.sh - - name: Get index version master - working-directory: ${{github.workspace}}/master/build - run: | - if test -f "./PrintIndexVersionMain"; then - echo 'index_version_master<> $GITHUB_ENV - ./PrintIndexVersionMain >> $GITHUB_ENV - echo 'EOF' >> $GITHUB_ENV - else - echo 'index_version_master={}' >> $GITHUB_ENV - fi - - name: Clean master - run: cmake --build ${{github.workspace}}/master/build --target clean - - name: BuildPr - # also only build the required executables here. - run: | - cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) - cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) - cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) - - name: Get index version PR - working-directory: ${{github.workspace}}/pr/build - run: | - echo 'index_version_pr<> $GITHUB_ENV - ./PrintIndexVersionMain >> $GITHUB_ENV - echo 'EOF' >> $GITHUB_ENV - - name: Print index versions - run : | - echo '${{ fromJson(env.index_version_master)}}' - echo '${{ fromJson(env.index_version_pr)}}' - - name: E2E on PR using index from Master - if: env.index_version_master == env.index_version_pr - working-directory: ${{github.workspace}}/pr - run: e2e/e2e.sh -i ../master diff --git a/.github/workflows/code-coverage.yml b/.github/workflows/code-coverage.yml deleted file mode 100644 index c9f46032dd..0000000000 --- a/.github/workflows/code-coverage.yml +++ /dev/null @@ -1,117 +0,0 @@ -# Measure the code coverage of QLever's unit tests using LLVMs source -# instrumentation. When this workflow runs for the master branch, the -# coverage report is directly uploaded to Codecov.io. When this workflow -# runs for a pull request, the coverage report is stored as a GitHub artifact -# and a separate workflow from `upload-coverage.yml` is triggered that uploads -# the coverage report. That way the uploader has access to the Codecov token -# in all cases and the upload should never fail. - -# The following name has to be the same as in the `workflows:` key in -# `upload-coverage.yml`, otherwise the uploader won't pick up the result -# of this workflow. -name: measure-code-coverage - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - build: - env: - compiler: clang - compiler-version: 16 - build-type: Debug - warnings: "-Wall -Wextra " - # we disable the `assert()` macro as it messes with the coverage reports - asan-flags: "-DNDEBUG" - ubsan-flags: "" - coverage-flags: "-fprofile-instr-generate -fcoverage-mapping" - cmake-flags: "-DCMAKE_C_COMPILER=clang-16 -DCMAKE_CXX_COMPILER=clang++-16" - - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - with: - submodules: "recursive" - - - name: Install dependencies - uses: ./.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./.github/workflows/install-compiler-ubuntu - with: - compiler: "clang" - compiler-version: "16" - - name: Install coverage tools - run: | - sudo apt install -y llvm-16 - sudo apt install mold - - name: Show path - run: | - which llvm-profdata-16 - which llvm-cov-16 - - name: Create build directory - run: mkdir ${{github.workspace}}/build - - name: Configure CMake - # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. - # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type - run: cmake -B ${{github.workspace}}/build ${{env.cmake-flags}} -DCMAKE_BUILD_TYPE=${{env.build-type}} -DLOGLEVEL=TIMING -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}} ${{env.asan-flags}} ${{env.ubsan-flags}} ${{env.coverage-flags}}" -DADDITIONAL_LINKER_FLAGS="${{env.coverage-flags}}" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=false -DSINGLE_TEST_BINARY=ON -DENABLE_EXPENSIVE_CHECKS=true -DADDITIONAL_LINKER_FLAGS="-fuse-ld=mold" - - - name: Build - # Build your program with the given configuration - run: cmake --build ${{github.workspace}}/build --config ${{env.build-type}} -- -j $(nproc) - - name: Run unit tests - working-directory: ${{github.workspace}}/build/test - env: - LLVM_PROFILE_FILE: "default%p.profraw" - # We have to manually run the test executable to only get a single `.profraw` file. - # Otherwise, the GitHub runner goes out of memory. - run: env ASAN_OPTIONS="alloc_dealloc_mismatch=0" ./QLeverAllUnitTestsMain - - - name: Process coverage info - working-directory: ${{github.workspace}}/build/test - run: > - llvm-profdata-16 merge -sparse *.profraw -o default.profdata; - llvm-cov-16 export ./QLeverAllUnitTestsMain --dump --format=lcov --instr-profile ./default.profdata --ignore-filename-regex="/third_party/" --ignore-filename-regex="/generated/" --ignore-filename-regex="/nlohmann/" --ignore-filename-regex="/ctre/" --ignore-filename-regex="/test/" --ignore-filename-regex="/benchmark/" > ./coverage.lcov - -# Only upload the coverage directly if this is not a pull request. In this -# case we are on the master branch and have access to the Codecov token. - - name: "Submit coverage data to codecov.io" - if: github.event_name != 'pull_request' - uses: codecov/codecov-action@v4 - with: - file: ${{github.workspace}}/build/test/coverage.lcov - # Note: technically, a `token` is not required for codecov.io when - # uploading from a public repository, but specifying it avoids the - # nasty spurious failures due to GitHub's rate limit for codecov's - # public default token. - token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: true - -# For a pull request we store the coverage file as well as some information -# about this PR (number, how to check it out, etc.) and upload it as an artifact. -# This is all the data that is required for running the Codecov uploader manually -# from `upload-coverage.yml`. - - name: Save PR number and coverage file in same directory - if: github.event_name == 'pull_request' - # Note: If you change any of the filenames here, you also have to change them in `upload-coverage.yml` - run : | - mkdir -p coverage-report - echo ${{ github.event.number }} > ./coverage-report/pr - echo ${{ github.repository }} > ./coverage-report/github_repository - echo ${GITHUB_REF} > ./coverage-report/github_ref - mv ${{ github.workspace}}/build/test/coverage.lcov coverage-report - - name: Upload coverage artifact - if: github.event_name == 'pull_request' - uses: actions/upload-artifact@v4 - with: - name: coverage-report - path: coverage-report/ - # Note: for now we do not run the e2e tests for the coverage check - diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml deleted file mode 100644 index b026c855dd..0000000000 --- a/.github/workflows/codespell.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Codespell configuration is within .codespellrc ---- -name: Codespell - -on: - push: - branches: [master] - pull_request: - branches: [master] - -permissions: - contents: read - -jobs: - codespell: - name: Check for spelling errors - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Annotate locations with typos - uses: codespell-project/codespell-problem-matcher@v1 - - name: Codespell - uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/cpp-17-libqlever.yml b/.github/workflows/cpp-17-libqlever.yml deleted file mode 100644 index 7afffd917c..0000000000 --- a/.github/workflows/cpp-17-libqlever.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: CPP17 libQLever - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-22.04 - # The CMake configure and build commands are platform-agnostic and should work equally - # well on Windows or Mac. You can convert this to a matrix build if you need - # cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - strategy: - fail-fast: false - matrix: - include: - - compiler-version: 11 - use-keep-going: false - use-ignore-errors: false - additional-cmake-options: "-DREDUCED_FEATURE_SET_FOR_CPP17=ON" - - compiler-version: 8 - expensive-tests: true - use-keep-going: true - use-ignore-errors: true - additional-cmake-options: "-DREDUCED_FEATURE_SET_FOR_CPP17=ON -DUSE_CPP_17_BACKPORTS=ON -DCMAKE_CXX_STANDARD=17 -DCOMPILER_VERSION_CHECK_DEACTIVATED=ON" - - - env: - warnings: "" - build-type: Release - expensive-tests: true - compiler: gcc - - steps: - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - - name: Install dependencies - uses: ./.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./.github/workflows/install-compiler-ubuntu - with: - compiler: ${{env.compiler}} - compiler-version: ${{matrix.compiler-version}} - - name: Configure CMake - # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. - # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type - run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{env.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=${{env.expensive-tests}} -DENABLE_EXPENSIVE_CHECKS=true ${{matrix.additional-cmake-options}} -DADDITIONAL_LINKER_FLAGS="-B /usr/bin/mold" - - - name: Build - # Build your program with the given configuration - run: | - KEEP_GOING="" - if [[ "${{ matrix.use-keep-going }}" == "true" ]]; then - KEEP_GOING="-k" - fi - IGNORE_ERRORS="" - if [[ "${{ matrix.use-ignore-errors }}" == "true" ]]; then - IGNORE_ERRORS="-i" - fi - set -o pipefail # the `tee` will never fail, but the `build` command might. We want to fail if the build fails. - cmake --build ${{github.workspace}}/build --target QleverTest --config ${{env.build-type}} -- $IGNORE_ERRORS $KEEP_GOING -j $(nproc) 2>&1 | tee /tmp/build.log - id: build - - - name: Run gcc8 log analyzer on gcc8 builds - if: matrix.compiler-version == 8 - run: | - python ${{ github.workspace }}/misc/gcc8_logs_analyzer.py /tmp/build.log --on-github - id: gcc8_log_analyzer - - name: Test - id: runTest - if: (matrix.compiler-version == 11) - working-directory: ${{github.workspace}}/build/test - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . -L QleverTest diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml deleted file mode 100644 index 297546c5b2..0000000000 --- a/.github/workflows/docker-publish.yml +++ /dev/null @@ -1,155 +0,0 @@ -name: Docker build and publish - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - -env: - IMAGE: adfreiburg/qlever - -concurrency: - # When this is not a pull request, then we want all the docker containers to be pushed, we therefore - # directly fall back to the commit hash which will be distinct for each push to master. - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.sha}}' - cancel-in-progress: true - -# This workflow is heavily based on https://docs.docker.com/build/ci/github-actions/multi-platform/#distribute-build-across-multiple-runners . -jobs: - build: - strategy: - matrix: - include: - - platform: linux/amd64 - runner: ubuntu-24.04 - - platform: linux/arm64 - runner: ubuntu-24.04-arm - runs-on: ${{ matrix.runner }} - steps: - - name: Checkout - uses: actions/checkout@v4 - # Generate metadata for the docker image based on the GH Actions environment. - - name: Generate image metadata - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGE }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Login to Docker Hub - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build ${{ matrix.platform }} - uses: docker/build-push-action@v6 - id: build - with: - context: . - platforms: ${{ matrix.platform }} - tags: ${{ env.IMAGE }} - # push-by-digest means that the built image is not associated with a tag. Instead, the only way to refer to it - # is by using its digest (which is basically a unique hash). - outputs: type=image,push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }} - labels: ${{ steps.meta.outputs.labels }} - annotations: ${{ steps.meta.outputs.annotations }} - # Export the digest outside of this job, so that the merge job can pick it up. - - name: Export digest - if: github.event_name != 'pull_request' - run: | - # Strip forward slash from matrix.platform - platform=${{ matrix.platform }} - echo "ARTIFACT_NAME=${platform//\//-}" >> $GITHUB_ENV - mkdir -p ${{ runner.temp }}/digests - digest="${{ steps.build.outputs.digest }}" - touch "${{ runner.temp }}/digests/${digest#sha256:}" - - name: Upload digest - if: github.event_name != 'pull_request' - uses: actions/upload-artifact@v4 - with: - name: digests-${{ env.ARTIFACT_NAME }} - path: ${{ runner.temp }}/digests/* - if-no-files-found: error - retention-days: 1 - # Build image for local use. Since we pushed by digest it has not been loaded into the local docker instance. - # It might be possible to directly push by using a unique tag and make this step redundant, but pushing by digest - # seems to be the recommended way to do it. - - name: Setup E2E test image - if: matrix.platform == 'linux/amd64' - uses: docker/build-push-action@v6 - with: - # The cache should already provide this. So no rebuild should occur. - context: . - load: true - push: false - tags: ${{ env.IMAGE }}:tmp-${{ github.sha }} - - name: E2E in Docker - if: matrix.platform == 'linux/amd64' - run: | - sudo mkdir ${{ github.workspace }}/e2e_data - sudo chmod a+rwx ${{ github.workspace }}/e2e_data - sudo docker run -i --rm -v "${{ github.workspace }}/e2e_data:/app/e2e_data/" --entrypoint e2e/e2e.sh ${{ env.IMAGE }}:tmp-${{ github.sha }} - docker-merge: - if: github.event_name != 'pull_request' - needs: [ build ] - runs-on: ubuntu-24.04 - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Generate image metadata - id: meta - uses: docker/metadata-action@v5 - env: - # We build multiplatform images which have an image index above the - # image manifests. Attach the annotations directly to the image index. - DOCKER_METADATA_ANNOTATIONS_LEVELS: "index" - with: - images: ${{ env.IMAGE }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get short sha - id: sha - run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - - name: Get PR number - id: pr - run: echo "pr_num=$(git log --format=%s -n 1 | sed -nr 's/.*\(\#([0-9]+)\)/\1/p')" >> $GITHUB_OUTPUT - - name: Download digests - uses: actions/download-artifact@v4 - with: - path: ${{ runner.temp }}/digests - pattern: digests-* - merge-multiple: true - - name: Merge amd64 + arm64 images into multi-arch manifest - # Changing the working directory to this folder is important, so that - # the '*' in printf down below is expanded to the simple filenames without - # a leading path in the way. - working-directory: ${{ runner.temp }}/digests - # steps.meta.outputs.annotations contains a line for every annotation. - # To properly handle the expansion of this multi-line value in bash, - # we need to properly transform it into the `EXPANDED_ANNOTATIONS` - # variable, which then no longer contains newlines and properly - # handles other spaces in the annotations. - run: | - ANNOTATIONS=$(cat <<'EOF' - ${{ steps.meta.outputs.annotations }} - EOF - ) - EXPANDED_ANNOTATIONS=() - while IFS= read -r line; do - # Skip empty lines - [[ -n "$line" ]] || continue - EXPANDED_ANNOTATIONS+=( --annotation "$line" ) - done <<< "$ANNOTATIONS" - docker buildx imagetools create \ - -t ${{ env.IMAGE }}:latest \ - -t ${{ env.IMAGE }}:${{ github.ref_name == 'master' && format('pr-{0}', steps.pr.outputs.pr_num) || github.ref_name }} \ - -t ${{ env.IMAGE }}:commit-${{ steps.sha.outputs.sha_short }} \ - "${EXPANDED_ANNOTATIONS[@]}" \ - $(printf '${{ env.IMAGE }}@sha256:%s ' *) diff --git a/.github/workflows/format-check.yml b/.github/workflows/format-check.yml deleted file mode 100644 index 07412916bb..0000000000 --- a/.github/workflows/format-check.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Format check - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -jobs: - build: - # The CMake configure and build commands are platform agnostic and should work equally - # well on Windows or Mac. You can convert this to a matrix build if you need - # cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v4 - - name: Install dependencies - run: | - # The following line currently seems to be necessary to work around a bug in the installation. - sudo apt remove python3-lldb-* - wget https://apt.llvm.org/llvm.sh - sudo chmod +x llvm.sh - sed 's/apt-key del/echo/' llvm.sh -iy - sudo ./llvm.sh 16 - sudo apt install -y clang-format-16 - - - name: Run the format checker - run: ${{github.workspace}}/misc/format-check.sh - diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index b64924e7d9..0000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,96 +0,0 @@ -name: Native build with conan on MacOS - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - build: - # The CMake configure and build commands are platform-agnostic and should work equally - # well on Windows or Mac. You can convert this to a matrix build if you need - # cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - strategy: - fail-fast: false - matrix: - build-type: [Release] - runs-on: macos-15 - env: - LLVM_VERSION: 17 - steps: - - uses: actions/checkout@v4 - - - name: Install ICU dependency for python (only needed for E2E test) - run: | - df -h - brew install pkg-config icu4c - echo PATH="$(brew --prefix icu4c)/bin:$(brew --prefix icu4c)/sbin:$PATH" >> $GITHUB_ENV - echo PKG_CONFIG_PATH="$PKG_CONFIG_PATH:$(brew --prefix icu4c)/lib/pkgconfig" >> $GITHUB_ENV - - - name: Install python dependencies for E2E tests - run: | - pip3 install --break-system-packages pyaml --no-binary=:pyicu: pyicu - - - name: Install dependencies - run: | - brew install llvm@${{env.LLVM_VERSION}} conan@2 - export LLVM_PREFIX=$(brew --prefix llvm@${{env.LLVM_VERSION}}) - echo PATH="$LLVM_PREFIX/bin:$PATH" >> $GITHUB_ENV - echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV - echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV - - - name: Print clang version - run: clang++ --version - - - name: Cache for conan - uses: actions/cache@v3 - env: - cache-name: cache-conan-modules-macos-15 - with: - path: ~/.conan2 - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('conanfile.txt', 'conanprofiles/clang-17-macos')}} - - - name: Create build directory - run: mkdir ${{github.workspace}}/build - - - name: Install and run conan - working-directory: ${{github.workspace}}/build - run: conan install .. -pr:b=../conanprofiles/clang-17-macos -pr:h=../conanprofiles/clang-17-macos -of=. --build=missing - - - name: Configure CMake - # For std::ranges::join_view we need the -fexperimental-library flag on libc++17. - # We currently cannot use the parallel algorithms, as the parallel sort requires a GNU-extension, and we build with `libc++`. - run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/build/conan_toolchain.cmake" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=false -DENABLE_EXPENSIVE_CHECKS=true -DADDITIONAL_COMPILER_FLAGS="-fexperimental-library" -D_NO_TIMING_TESTS=ON - - - name: Build - # Build your program with the given configuration - # Sourcing the conanrun.sh even for building is required to make gtest_discover_tests pass reliably. - run: | - df -h - source ${{github.workspace}}/build/conanrun.sh - cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j 2 - - - name: Test - working-directory: ${{github.workspace}}/build/test - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - run: | - df -h - source ${{github.workspace}}/build/conanrun.sh - env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . - - - name: Running and printing the benchmark examples. - working-directory: ${{github.workspace}}/build - run: benchmark/BenchmarkExamples -p - - - name: E2E - run: | - source ${{github.workspace}}/build/conanrun.sh - ${{github.workspace}}/e2e/e2e.sh diff --git a/.github/workflows/native-build-conan.yml b/.github/workflows/native-build-conan.yml deleted file mode 100644 index ef6ad67f50..0000000000 --- a/.github/workflows/native-build-conan.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Native build with conan - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - build: - # The CMake configure and build commands are platform-agnostic and should work equally - # well on Windows or Mac. You can convert this to a matrix build if you need - # cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - strategy: - fail-fast: false - matrix: - warnings: [ "-Wall -Wextra" ] - build-type: [Release] - runs-on: ubuntu-22.04 - - - steps: - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - - - name: Install dependencies - uses: ./.github/workflows/install-dependencies-ubuntu - with: - install-third-party-libraries: "false" - - - name: Create build directory - run: mkdir ${{github.workspace}}/build - - name: Install and run conan - working-directory: ${{github.workspace}}/build - run: > - sudo pip3 install conan; - conan profile detect; - conan install .. -pr:b=default -of=. --build=missing ; - - name: Configure CMake - # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. - # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type - run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DLOGLEVEL=DEBUG -DCMAKE_TOOLCHAIN_FILE="$(pwd)/build/conan_toolchain.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=false -DENABLE_EXPENSIVE_CHECKS=true - - - name: Build - # Build your program with the given configuration - run: cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j $(nproc) - - - name: Test - working-directory: ${{github.workspace}}/build/test - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - run: > - source ../conanrun.sh; - env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} .; - - - name: Running and printing the benchmark examples. - working-directory: ${{github.workspace}}/build - run: > - source ./conanrun.sh; - benchmark/BenchmarkExamples -p; - - # explicitly specify the binary directory for the E2E script via the `-d` option to also - # test that it works. - - name: E2E - run: > - source ${{github.workspace}}/build/conanrun.sh; - ${{github.workspace}}/e2e/e2e.sh -d build - diff --git a/.github/workflows/native-build.yml b/.github/workflows/native-build.yml deleted file mode 100644 index cec789a10d..0000000000 --- a/.github/workflows/native-build.yml +++ /dev/null @@ -1,151 +0,0 @@ -name: Native build - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - build: - - # The CMake configure and build commands are platform-agnostic and should work equally - # well on Windows or Mac. You can convert this to a matrix build if you need - # cross-platform coverage. - # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix - strategy: - fail-fast: false - matrix: - compiler: [gcc, clang] - compiler-version: [8, 11, 12, 13, 15, 16, 18, 21] - warnings: [ "-Wall -Wextra -Werror " ] - build-type: [Release] - expensive-tests: [true] - additional-cmake-options: [""] - isPr: - - ${{github.event_name == 'pull_request'}} # some checks are only run in the master. - skipIfPr: [false] - exclude: - - compiler: gcc - compiler-version: 8 - - compiler: gcc - compiler-version: 12 - isPr: true - - compiler: gcc - compiler-version: 15 - - compiler: gcc - compiler-version: 16 - - compiler: gcc - compiler-version: 18 - - compiler: gcc - compiler-version: 21 - - compiler: clang - compiler-version: 8 - - compiler: clang - compiler-version: 11 - - compiler: clang - compiler-version: 12 - - compiler: clang - compiler-version: 13 - - compiler: clang - compiler-version: 15 - - compiler: clang - compiler-version: 16 - isPr: true - - compiler: clang - compiler-version: 18 - isPr: true - include: - - compiler: gcc - compiler-version: 11 - additional-cmake-options: "-DUSE_CPP_17_BACKPORTS=ON -DEXPRESSION_GENERATOR_BACKPORTS_FOR_CPP17=ON" - build-type: Release -# TODO Figure out a way to enable this build only for master pushes and not for the PR. -# - compiler: clang -# compiler-version: 15 -# additional-cmake-options: "-DUSE_CPP_17_BACKPORTS=ON -DCMAKE_CXX_STANDARD=17 -DCMAKE_CXX_FLAGS='-ferror-limit=0' -DCOMPILER_VERSION_CHECK_DEACTIVATED=ON" -# build-type: Debug -# skipIfPr: true -# expensive-tests: false -# continue-on-error: true -# use-keep-going: true -# use-ignore-errors: true - - compiler: clang - compiler-version: 16 - asan-flags: "-fsanitize=address -fno-omit-frame-pointer" - build-type: RelWithDebInfo - expensive-tests: false - - compiler: clang - compiler-version: 16 - ubsan-flags: " -fsanitize=undefined" - build-type: RelWithDebInfo - expensive-tests: false - skipIfPr: false - - compiler: clang - compiler-version: 17 - build-type: Debug - expensive-tests: false - ubsan-flags: " -fsanitize=thread -O1 -g" - additional-cmake-options: "-D_NO_TIMING_TESTS=ON" - skipIfPr: false - - runs-on: ubuntu-22.04 - - - steps: - - name: Skip early if conditions are not met - if: (github.event_name == 'pull_request' && matrix.skipIfPr) - run: exit 0; - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - - name: Install dependencies - uses: ./.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./.github/workflows/install-compiler-ubuntu - with: - compiler: ${{matrix.compiler}} - compiler-version: ${{matrix.compiler-version}} - - name: Reduce address randomization to make sanitizers work - # For details see for example `https://stackoverflow.com/questions/77850769/fatal-threadsanitizer-unexpected-memory-mapping-when-running-on-linux-kernels` - run: | - sudo sysctl vm.mmap_rnd_bits - sudo sysctl vm.mmap_rnd_bits=28 - - - - name: Configure CMake - env: - USE_PCH: ${{ matrix.compiler-version == 21 && 'ON' || 'OFF' }} - # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. - # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type - run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=${{matrix.expensive-tests}} -DENABLE_EXPENSIVE_CHECKS=true ${{matrix.additional-cmake-options}} -DADDITIONAL_LINKER_FLAGS="-B /usr/bin/mold" -DSINGLE_TEST_BINARY=ON -DUSE_PRECOMPILED_HEADERS=${{env.USE_PCH}} - - - name: Build - # Build your program with the given configuration - run: | - cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j $(nproc) - id: build - - - name: Test - id: complete_tests - working-directory: ${{github.workspace}}/build/test - run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . - - - name: Running and printing the benchmark examples. - run: ${{github.workspace}}/build/benchmark/BenchmarkExamples -p - - # Run the E2E test only once, but for GCC add the text index separately. - # Note that the dispatch by the compiler is arbitrary, we just want - # to have both cases tested. - - name: E2E with index in one go - if: (matrix.compiler == 'clang') && (steps.complete_tests.outcome == 'success') - run: ${{github.workspace}}/e2e/e2e.sh - - - name: E2E with separate text index build - if: (matrix.compiler != 'clang') && (steps.complete_tests.outcome == 'success') - run: ${{github.workspace}}/e2e/e2e.sh -t diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml deleted file mode 100644 index 0580a783b7..0000000000 --- a/.github/workflows/sonarcloud.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: sonarcloud-analysis - -on: - push: - branches: [master] - pull_request: - branches: [master] - -jobs: - build: - strategy: - fail-fast: false - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - with: - submodules: 'recursive' - - - name: Save PR number and sonarcloud data in same directory - if: github.event_name == 'pull_request' - # Note: If you change any of the filenames here, you also have to change them in `upload-sonarcloud.yml` - run : | - mkdir -p sonarcloud-report - echo ${{ github.event.number }} > ./sonarcloud-report/pr - echo ${{ github.repository }} > ./sonarcloud-report/github_repository - echo ${GITHUB_REF} > ./sonarcloud-report/github_ref - - name: Upload sonarcloud artifact - if: github.event_name == 'pull_request' - uses: actions/upload-artifact@v4 - with: - name: sonarcloud-report - path: sonarcloud-report/ - # Note: for now we do not run the e2e tests for the coverage check - diff --git a/.github/workflows/sparql-conformance-uploader.yml b/.github/workflows/sparql-conformance-uploader.yml deleted file mode 100644 index 5532e3b061..0000000000 --- a/.github/workflows/sparql-conformance-uploader.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: Upload conformance tests result - -on: - workflow_run: - workflows: [sparql-conformance] - types: - - completed - -jobs: - upload: - env: - SERVER_URL: https://qlever.dev/sparql-conformance-uploader - API_KEY: ${{ secrets.SPARQL_CONFORMANCE_TOKEN }} - runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' - steps: - - name: 'Download artifact' - uses: actions/github-script@v6 - with: - script: | - var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, - }); - var matchArtifact = artifacts.data.artifacts.filter((artifact) => { - return artifact.name == "conformance-report" - })[0]; - var download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - var fs = require('fs'); - fs.writeFileSync('${{github.workspace}}/conformance-report.zip', Buffer.from(download.data)); - - run: unzip conformance-report.zip - # Read the metadata into environment variables. - - name: "Read github event" - run: echo "github_event=`cat event`" >> $GITHUB_ENV - - name: "Read PR number" - run: echo "pr_number=`cat pr`" >> $GITHUB_ENV - - name: "Read Github Ref" - run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; - - name: "Read Github SHA" - run: echo "commit_sha=`cat sha`" >> $GITHUB_ENV; - - name: "Read Github Repository" - run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; - - name: "Submit data to server" - run: | - response=$(curl -L -s -o temp_response.txt -w "%{http_code}" \ - -H "x-api-key: $API_KEY" \ - -H "event: ${{ env.github_event }}" \ - -H "sha: ${{ env.commit_sha }}" \ - -H "pr-number: ${{ env.pr_number }}" \ - -F "file=@${{env.commit_sha}}.json.bz2" \ - $SERVER_URL/upload) - - echo "Server response:" - cat temp_response.txt - echo "HTTP Status: $response" - if [ "$response" -gt 200 ]; then - echo "Server did not respond with status 200. Failing the workflow." - exit 1 - fi diff --git a/.github/workflows/sparql-conformance.yml b/.github/workflows/sparql-conformance.yml deleted file mode 100644 index 9103de62bd..0000000000 --- a/.github/workflows/sparql-conformance.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: sparql-conformance - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - merge_group: - -jobs: - build: - env: - compiler: clang - compiler-version: 16 - build-type: Release - cmake-flags: "-DCMAKE_C_COMPILER=clang-16 -DCMAKE_CXX_COMPILER=clang++-16" - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - with: - submodules: "recursive" - path: qlever-code - - name: Checkout sparql-test-suite-files - uses: actions/checkout@v3 - with: - repository: "w3c/rdf-tests" - path: sparql-test-suite - - name: Checkout qlever-test-suite - uses: actions/checkout@v3 - with: - repository: "ad-freiburg/sparql-conformance" - path: qlever-test-suite - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Install python dependencies - run: | - python -m pip install --upgrade pip - pip install requests - pip install rdflib - - name: Install dependencies - uses: ./qlever-code/.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./qlever-code/.github/workflows/install-compiler-ubuntu - with: - compiler: "clang" - compiler-version: "16" - - name: Create build directory - run: mkdir ${{github.workspace}}/qlever-code/build - - name: Configure CMake - run: cmake -S ${{github.workspace}}/qlever-code/ -B ${{github.workspace}}/qlever-code/build ${{env.cmake-flags}} -DCMAKE_BUILD_TYPE=${{env.build-type}} -DLOGLEVEL=INFO -DUSE_PARALLEL=false - - name: Build IndexBuilderMain - run: cmake --build ${{github.workspace}}/qlever-code/build --target IndexBuilderMain --config ${{env.build-type}} -- -j $(nproc) - - name: Build ServerMain - run: cmake --build ${{github.workspace}}/qlever-code/build --target ServerMain --config ${{env.build-type}} -- -j $(nproc) - - name: Execute test suite - run: | - cd qlever-test-suite - python testsuite.py config http://0.0.0.0 7001 ${{github.workspace}}/sparql-test-suite/sparql/sparql11/ ${{github.workspace}}/qlever-code/build/ localhost http-graph-store sparql - python testsuite.py extract - python testsuite.py ${{ github.sha }} - cd .. - - name: Save workflow information - # Note: If you change any of the filenames here, you also have to change them in `upload-conformance.yml` - run : | - mkdir -p conformance-report - echo ${{ github.event_name }} > ./conformance-report/event - echo ${{ github.event.number }} > ./conformance-report/pr - echo ${{ github.repository }} > ./conformance-report/github_repository - echo ${GITHUB_REF} > ./conformance-report/github_ref - - name: Save SHA and conformance report if it is a master commit. - if: github.event_name == 'push' - run : | - echo ${{github.sha}} > ./conformance-report/sha - mv ${{ github.workspace}}/qlever-test-suite/results/${{ github.sha }}.json.bz2 conformance-report/${{ github.sha }}.json.bz2 - - name: Save SHA and conformance report if it is a PR. - if: github.event_name == 'pull_request' - run : | - echo ${{github.event.pull_request.head.sha}} > ./conformance-report/sha - mv ${{ github.workspace}}/qlever-test-suite/results/${{ github.sha }}.json.bz2 conformance-report/${{ github.event.pull_request.head.sha }}.json.bz2 - - name: Upload coverage artifact - uses: actions/upload-artifact@v4 - with: - name: conformance-report - path: conformance-report/ \ No newline at end of file diff --git a/.github/workflows/upload-coverage.yml b/.github/workflows/upload-coverage.yml deleted file mode 100644 index 77eacab868..0000000000 --- a/.github/workflows/upload-coverage.yml +++ /dev/null @@ -1,95 +0,0 @@ -# This workflow runs as soon as the workflow from `code-coverage.yml` has -# successfully finished. It downloads the created artifact and runs the -# Codecov uploader. This workflow uses the `workflow_run` trigger. This -# means that it will always be run from the master branch, meaning that -# the contents of this file will always be taken from the master branch, -# even if a PR changes it. Since this approach disallows several attacks -# from malicious PR authors, such workflows have access to the secrets -# stored on GitHub. For details on the `workflow_run` trigger and this -# security measures, see -# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ -name: Upload code coverage - -on: - workflow_run: - # This has to be the `name:` of the workflow in `code_coverage.yml`. - # Start when this workflow has finished successfully. - workflows: [measure-code-coverage] - types: - - completed - -jobs: - upload: - runs-on: ubuntu-latest - # Only run on successful pull requests. Merge commits to master upload - # their coverage reports directly inside `code-coverage.yml` - if: > - github.event.workflow_run.event == 'pull_request' && - github.event.workflow_run.conclusion == 'success' - steps: - - name: 'Download artifact' - uses: actions/github-script@v7 - # The following script is taken from the link stated at the - # beginning of this file. It manually downloads an artifact - # from another workflow. - with: - script: | - var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, - }); - var matchArtifact = artifacts.data.artifacts.filter((artifact) => { - return artifact.name == "coverage-report" - })[0]; - var download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - var fs = require('fs'); - fs.writeFileSync('${{github.workspace}}/coverage-report.zip', Buffer.from(download.data)); - - run: unzip coverage-report.zip - # Read the metadata into environment variables. - - name: "Read PR number" - run: echo "pr_number=`cat pr`" >> $GITHUB_ENV - - name: "Read Github Ref" - run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; - - name: "Read Github Repository" - run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; - # We have to check out the source code from the PR, otherwise Codecov - # won't process the upload properly. We first check it out into a - # subdirectory `qlever-source`, otherwise the coverage report will - # be overwritten. We then move all the files back into the working - # directory such that Codecov will pick them up properly. - - name: "Checkout" - uses: actions/checkout@v4 - with: - repository: ${{env.original_github_repository}} - submodules: "recursive" - ref: ${{env.original_github_ref}} - path: qlever-source - - name: "Move qlever sources up" - run: shopt -s dotglob && mv qlever-source/* . - # For the new version of the codecov action we have to move the coverage file back to its original location, - # else several things don't work... - - name: "Move coverage file to original location" - run: mkdir build && mkdir build/test && mv coverage.lcov build/test - - name: "Upload coverage report" - uses: codecov/codecov-action@v4 - with: - file: ${{github.workspace}}/build/test/coverage.lcov - # Note: technically, a `token` is not required for codecov.io when - # uploading from a public repository, but specifying it avoids the - # nasty spurious failures due to Github's rate limit for codecov's - # public default token. - token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: true - # Since this workflow runs on the master branch and not in a PR - # we have to specify the following settings manually to make Codecov - # aware of the "actual" origin of the coverage report. - override_branch: ${{github.event.workflow_run.head_branch}} - override_build: ${{github.event.workflow_run.workflow_id}} - override_commit: ${{github.event.workflow_run.head_commit.id}} - override_pr: ${{env.pr_number}} diff --git a/.github/workflows/upload-sonarcloud.yml b/.github/workflows/upload-sonarcloud.yml deleted file mode 100644 index fe365b7384..0000000000 --- a/.github/workflows/upload-sonarcloud.yml +++ /dev/null @@ -1,144 +0,0 @@ -# This workflow runs as soon as the workflow from `sonarcloud.yml` has -# successfully finished. It downloads the created artifact and runs the -# SonarCloud analysis and uploader. This workflow uses the `workflow_run` trigger. This -# means that it will always be run from the master branch, meaning that -# the contents of this file will always be taken from the master branch, -# even if a PR changes it. Since this approach disallows several attacks -# from malicious PR authors, such workflows have access to the secrets -# stored on GitHub. For details on the `workflow_run` trigger and this -# security measures, see -# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ - -# Credit goes to the `1c-syntax` project where I found a code snippet to make -# SonarCloud work with the `workflow_run` trigger: -# https://github.com/1c-syntax/bsl-language-server/blob/21a6bb5172cbc4591a05414d5d0ac221689e45ce/.github/workflows/qa.yml#L16 -name: Upload and run SonarCloud Analysis - -on: - workflow_run: - # This has to be the `name:` of the workflow in `code_coverage.yml`. - # Start when this workflow has finished successfully. - workflows: [sonarcloud-analysis] - types: - - completed - -concurrency: - group: '${{ github.workflow }} @ ${{ github.event.workflow_run.head_repository.full_name || github.ref}} @ ${{ github.event.workflow_run.head_branch || github.event.workflow_run.pull_requests[0].url || github.head_ref || github.ref }}' - cancel-in-progress: true - -jobs: - upload: - runs-on: ubuntu-22.04 - if: github.event.workflow_run.conclusion == 'success' - env: - compiler: clang - compiler-version: 16 - warnings: "-Wall -Wextra " - build-type: Release - BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed - steps: - - name: Print concurrency key - run: echo "${{ github.workflow }} @ ${{ github.event.workflow_run.head_repository.full_name || github.ref}} @ ${{ github.event.workflow_run.head_branch || github.event.workflow_run.pull_requests[0].url || github.head_ref || github.ref }}" - - name: 'Download artifact' - uses: actions/github-script@v7 - if: github.event.workflow_run.event == 'pull_request' - # The following script is taken from the link stated at the - # beginning of this file. It manually downloads an artifact - # from another workflow. - with: - script: | - var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, - }); - var matchArtifact = artifacts.data.artifacts.filter((artifact) => { - return artifact.name == "sonarcloud-report" - })[0]; - var download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - var fs = require('fs'); - fs.writeFileSync('${{github.workspace}}/sonarcloud-report.zip', Buffer.from(download.data)); - - run: unzip sonarcloud-report.zip - if: github.event.workflow_run.event == 'pull_request' - # Read the metadata into environment variables. - - name: "Read PR number" - run: echo "pr_number=`cat pr`" >> $GITHUB_ENV - if: github.event.workflow_run.event == 'pull_request' - - name: "Read Github Ref" - run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; - if: github.event.workflow_run.event == 'pull_request' - - name: "Read Github Repository" - run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; - if: github.event.workflow_run.event == 'pull_request' - # We have to check out the source code from the PR, otherwise Codecov - # won't process the upload properly. We first check it out into a - # subdirectory `qlever-source`, otherwise the coverage report will - # be overwritten. We then move all the files back into the working - # directory such that Codecov will pick them up properly. - - name: Request GitHub API for PR data - uses: octokit/request-action@v2.x - id: get_pr_data - if: github.event.workflow_run.event == 'pull_request' - with: - route: GET /repos/{full_name}/pulls/{number} - number: ${{ env.pr_number }} - full_name: ${{ github.event.repository.full_name }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: "Checkout" - uses: actions/checkout@v4 - with: - repository: ${{ github.event.workflow_run.head_repository.full_name }} - ref: ${{ github.event.workflow_run.head_branch }} - fetch-depth: 0 - submodules: "recursive" - path: qlever-source - - name: Checkout base branch - working-directory: qlever-source - if: github.event.workflow_run.event == 'pull_request' - run: | - git remote add upstream ${{ github.event.repository.clone_url }} - git fetch upstream --no-recurse-submodules - git checkout -B ${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} upstream/${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} - git checkout ${{ github.event.workflow_run.head_branch }} - git clean -ffdx && git reset --hard HEAD - - name: "Move qlever sources up" - run: shopt -s dotglob && mv qlever-source/* . - - name: Install Build Wrapper - uses: SonarSource/sonarqube-scan-action/install-build-wrapper@v6 - - name: Install dependencies - uses: ./.github/workflows/install-dependencies-ubuntu - - name: Install compiler - uses: ./.github/workflows/install-compiler-ubuntu - with: - compiler: ${{env.compiler}} - compiler-version: ${{env.compiler-version}} - - - name: Configure CMake - # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. - # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type - run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{env.compiler}}${{env.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}} ${{env.asan-flags}} ${{env.ubsan-flags}}" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true -DLOGLEVEL=TRACE - - name: Build - # Build your program with the given configuration - run: build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build ${{github.workspace}}/build --config ${{env.build-type}} -- -j $(nproc) - - name: Run sonar-scanner on PR - if: github.event.workflow_run.event == 'pull_request' - uses: SonarSource/sonarqube-scan-action@v6 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - with: - args: --define sonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }} -Dsonar.pullrequest.key=${{ fromJson(steps.get_pr_data.outputs.data).number }} -Dsonar.pullrequest.branch=${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} -Dsonar.pullrequest.base=${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} - - name: SonarCloud Scan on push - if: github.event.workflow_run.event == 'push' && github.event.workflow_run.head_repository.full_name == github.event.repository.full_name - uses: SonarSource/sonarqube-scan-action@v6 - with: - args: --define sonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }} -Dsonar.branch.name=${{ github.event.workflow_run.head_branch }} - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 1456cea47e26296d5b8f7c70fa82949750932e21 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 13:57:53 +0100 Subject: [PATCH 24/45] Add ICU_ROOT for cmake to find icu in macos native runner --- .github/workflows/macos-appleclang-native.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index 4da3e01a54..6eafa939cd 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -29,6 +29,7 @@ jobs: run: | brew update brew install boost icu4c openssl@3 zstd jemalloc pkg-config + echo "ICU_ROOT=$(brew --prefix icu4c)" >> $GITHUB_ENV echo PATH="$(brew --prefix icu4c)/bin:$(brew --prefix icu4c)/sbin:$PATH" >> $GITHUB_ENV echo PKG_CONFIG_PATH="$PKG_CONFIG_PATH:$(brew --prefix icu4c)/lib/pkgconfig" >> $GITHUB_ENV From b4c9dd90a0288be531b4196bfcbcd8a2f2e4f853 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 14 Jan 2026 14:54:40 +0100 Subject: [PATCH 25/45] Remove repeated wrong sections --- .github/workflows/macos-appleclang-native.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index 6eafa939cd..fb124ed576 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -58,18 +58,6 @@ jobs: working-directory: ${{github.workspace}}/build/test run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . - - name: Running and printing the benchmark examples. - run: ${{github.workspace}}/build/benchmark/BenchmarkExamples -p - - - name: Test - working-directory: ${{github.workspace}}/build/test - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - run: | - df -h - source ${{github.workspace}}/build/conanrun.sh - env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . - - name: Running and printing the benchmark examples. working-directory: ${{github.workspace}}/build run: benchmark/BenchmarkExamples -p From 31955072d1cd9e89000576c7f668eb7cc2736640 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Thu, 15 Jan 2026 23:30:53 +0100 Subject: [PATCH 26/45] Move project version logic to CompilationInfo.cmake --- CMakeLists.txt | 20 -------------------- CompilationInfo.cmake | 17 +++++++++++++++-- src/CompilationInfo.h | 14 +++++++++----- src/ProjectVersion.h.in | 6 ------ src/ServerMain.cpp | 4 ++-- src/index/IndexBuilderMain.cpp | 3 +-- 6 files changed, 27 insertions(+), 37 deletions(-) delete mode 100644 src/ProjectVersion.h.in diff --git a/CMakeLists.txt b/CMakeLists.txt index 416c856a9b..f99b6ecf86 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -426,23 +426,6 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# For the latest commit return `{most recent tag}-{how many commits back the tag is}-{hash of latest commit}` -execute_process( - COMMAND git describe --tags --always - OUTPUT_VARIABLE PROJECT_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE -) - -if(PROJECT_VERSION STREQUAL "") - set(PROJECT_VERSION "0.0.0-unknown") -endif() - -configure_file( - ${CMAKE_SOURCE_DIR}/src/ProjectVersion.h.in - ${CMAKE_CURRENT_BINARY_DIR}/ProjectVersion.h - @ONLY -) - set(LOG_LEVEL_FATAL FATAL) set(LOG_LEVEL_ERROR ERROR) set(LOG_LEVEL_WARN WARN) @@ -513,9 +496,6 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) -# For IndexBuilderMain and ServerMain to find generated ProjectVersion.h -target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) # set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-index-builder") # set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") diff --git a/CompilationInfo.cmake b/CompilationInfo.cmake index d54428cb23..ee665f2c4c 100644 --- a/CompilationInfo.cmake +++ b/CompilationInfo.cmake @@ -1,5 +1,5 @@ -# A small cmake script that writes the current git hash and time to a -# .cpp file +# A small cmake script that writes the current git hash, project version, +# and time to a .cpp file # Get the current time, remove the trailing newline and add quotes. execute_process(COMMAND date OUTPUT_VARIABLE DATETIME_OF_COMPILATION) @@ -14,16 +14,29 @@ if ((NOT DEFINED GIT_HASH) OR (GIT_HASH STREQUAL "")) endif() message(STATUS "GIT_HASH is ${GIT_HASH}") +# Get the project version from git describe. +# Format: {most recent tag}-{commits since tag}-g{short hash} or just the short hash if no tags. +execute_process(COMMAND git describe --tags --always + OUTPUT_VARIABLE PROJECT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE) +if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) + set(PROJECT_VERSION "0.0.0-unknown") +endif() +set(PROJECT_VERSION "\"${PROJECT_VERSION}\"") +message(STATUS "PROJECT_VERSION is ${PROJECT_VERSION}") + # Write the .cpp file. set(CONSTANTS "#include \"CompilationInfo.h\" namespace qlever::version { constexpr std::string_view GitHash = ${GIT_HASH}; constexpr std::string_view GitShortHash = GitHash.substr(0, 6); constexpr std::string_view DatetimeOfCompilation = ${DATETIME_OF_COMPILATION}; +constexpr std::string_view ProjectVersion = ${PROJECT_VERSION}; void copyVersionInfo() { *gitShortHashWithoutLinking.wlock() = GitShortHash; *datetimeOfCompilationWithoutLinking.wlock() = DatetimeOfCompilation; + *projectVersionWithoutLinking.wlock() = ProjectVersion; } }") diff --git a/src/CompilationInfo.h b/src/CompilationInfo.h index 3dcb72b59b..7321f1ce0a 100644 --- a/src/CompilationInfo.h +++ b/src/CompilationInfo.h @@ -14,15 +14,17 @@ #include "util/Synchronized.h" namespace qlever::version { -// The following two constants require linking against the `compilationInfo` -// library which is recreated on every compilation. Short version of the hash of +// The following constants require linking against the `compilationInfo` +// library which is recreated on every compilation. -// The commit that was used to compile QLever. +// Short version of the hash of the commit that was used to compile QLever. extern const std::string_view GitShortHash; // The date and time at which QLever was compiled. extern const std::string_view DatetimeOfCompilation; +// The project version from `git describe --tags --always`. +extern const std::string_view ProjectVersion; -// The following two versions of the above constants do NOT require linking +// The following versions of the above constants do NOT require linking // against the `compilationInfo` library, but only the inclusion of this header. // They only have meaningful values once the `copyVersionInfo` function (below) // was called. This is currently done in the `main` functions of @@ -31,7 +33,9 @@ inline ad_utility::Synchronized gitShortHashWithoutLinking{ std::string_view{"git short hash not set"}}; inline ad_utility::Synchronized datetimeOfCompilationWithoutLinking{ - std::string_view{"git short hash not set"}}; + std::string_view{"datetime of compilation not set"}}; +inline ad_utility::Synchronized projectVersionWithoutLinking{ + std::string_view{"project version not set"}}; // Copy the values from the constants that require linking to the `inline` // variables that don't require linking. For details see above. diff --git a/src/ProjectVersion.h.in b/src/ProjectVersion.h.in deleted file mode 100644 index a42136bb7f..0000000000 --- a/src/ProjectVersion.h.in +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef QLEVER_SRC_PROJECTVERSION_H -#define QLEVER_SRC_PROJECTVERSION_H - -#define QLEVER_SRC_PROJECTVERSION "@PROJECT_VERSION@" - -#endif \ No newline at end of file diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index 57d9153d2f..30d73e44ef 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -12,7 +12,6 @@ #include #include "CompilationInfo.h" -#include "ProjectVersion.h" #include "engine/Server.h" #include "global/Constants.h" #include "global/RuntimeParameters.h" @@ -186,7 +185,8 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever ServerMain " << QLEVER_SRC_PROJECTVERSION << '\n'; + std::cout << "QLever ServerMain " << qlever::version::ProjectVersion + << '\n'; return EXIT_SUCCESS; } po::notify(optionsMap); diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index 55b0e0d75f..c29e4ba482 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -15,7 +15,6 @@ #include #include "CompilationInfo.h" -#include "ProjectVersion.h" #include "global/Constants.h" #include "index/ConstantsIndexBuilding.h" #include "libqlever/Qlever.h" @@ -263,7 +262,7 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever IndexBuilderMain " << QLEVER_SRC_PROJECTVERSION + std::cout << "QLever IndexBuilderMain " << qlever::version::ProjectVersion << '\n'; return EXIT_SUCCESS; } From a20c28e5c9439364d65468896efc013ab0bb12d9 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Fri, 16 Jan 2026 16:44:05 +0100 Subject: [PATCH 27/45] Clean up cpack code and change maintainer to Julian --- CMakeLists.txt | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f99b6ecf86..b2ce36bba3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -496,10 +496,6 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) - -# set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-index-builder") -# set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") - install(TARGETS ServerMain IndexBuilderMain @@ -537,7 +533,7 @@ else() set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") endif() -set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") +set(CPACK_PACKAGE_CONTACT "mundhahj@tf.uni-freiburg.de") set(CPACK_PACKAGE_HOMEPAGE_URL "https://github.com/ad-freiburg/qlever") set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/packages") @@ -547,7 +543,4 @@ set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) set(CPACK_VERBATIM_VARIABLES YES) -set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") -set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever Graph Database implementing the RDF and SPARQL standards.") - include(CPack) From 0de5c518b3171372fd0074d1bbf8f0128232008c Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Fri, 16 Jan 2026 16:48:54 +0100 Subject: [PATCH 28/45] Take in the latest changes from debian cpack branch --- CMakeLists.txt | 29 ++--------------------------- CompilationInfo.cmake | 17 +++++++++++++++-- e2e/e2e.sh | 2 +- src/CompilationInfo.h | 14 +++++++++----- src/ServerMain.cpp | 4 ++-- src/index/IndexBuilderMain.cpp | 3 +-- 6 files changed, 30 insertions(+), 39 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c8cd4efb48..760bc9aad7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -443,23 +443,6 @@ else () COMMAND cmake -P ${CMAKE_CURRENT_SOURCE_DIR}/CompilationInfo.cmake) endif () -# For the latest commit return `{most recent tag}-{how many commits back the tag is}-{hash of latest commit}` -execute_process( - COMMAND git describe --tags --always - OUTPUT_VARIABLE PROJECT_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE -) - -if(PROJECT_VERSION STREQUAL "") - set(PROJECT_VERSION "0.0.0-unknown") -endif() - -configure_file( - ${CMAKE_SOURCE_DIR}/src/ProjectVersion.h.in - ${CMAKE_CURRENT_BINARY_DIR}/ProjectVersion.h - @ONLY -) - set(LOG_LEVEL_FATAL FATAL) set(LOG_LEVEL_ERROR ERROR) set(LOG_LEVEL_WARN WARN) @@ -468,7 +451,6 @@ set(LOG_LEVEL_DEBUG DEBUG) set(LOG_LEVEL_TIMING TIMING) set(LOG_LEVEL_TRACE TRACE) - if (CMAKE_BUILD_TYPE MATCHES DEBUG) set(LOGLEVEL DEBUG CACHE STRING "The loglevel") else () @@ -530,11 +512,7 @@ qlever_target_link_libraries(VocabularyMergerMain index parser ${CMAKE_THREAD_LI add_executable(PrintIndexVersionMain src/PrintIndexVersionMain.cpp) qlever_target_link_libraries(PrintIndexVersionMain util) -# For IndexBuilderMain and ServerMain to find generated ProjectVersion.h -target_include_directories(IndexBuilderMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -target_include_directories(ServerMain PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) - -set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-loader") +set_target_properties(IndexBuilderMain PROPERTIES OUTPUT_NAME "qlever-index") set_target_properties(ServerMain PROPERTIES OUTPUT_NAME "qlever-server") install(TARGETS @@ -574,7 +552,7 @@ else() set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") endif() -set(CPACK_PACKAGE_CONTACT "bast@cs.uni-freiburg.de") +set(CPACK_PACKAGE_CONTACT "mundhahj@tf.uni-freiburg.de") set(CPACK_PACKAGE_HOMEPAGE_URL "https://github.com/ad-freiburg/qlever") set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/packages") @@ -584,7 +562,4 @@ set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT) set(CPACK_VERBATIM_VARIABLES YES) -set(CPACK_PACKAGE_DESCRIPTION "QLever Graph Database") -set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "QLever Graph Database implementing the RDF and SPARQL standards.") - include(CPack) diff --git a/CompilationInfo.cmake b/CompilationInfo.cmake index d54428cb23..ee665f2c4c 100644 --- a/CompilationInfo.cmake +++ b/CompilationInfo.cmake @@ -1,5 +1,5 @@ -# A small cmake script that writes the current git hash and time to a -# .cpp file +# A small cmake script that writes the current git hash, project version, +# and time to a .cpp file # Get the current time, remove the trailing newline and add quotes. execute_process(COMMAND date OUTPUT_VARIABLE DATETIME_OF_COMPILATION) @@ -14,16 +14,29 @@ if ((NOT DEFINED GIT_HASH) OR (GIT_HASH STREQUAL "")) endif() message(STATUS "GIT_HASH is ${GIT_HASH}") +# Get the project version from git describe. +# Format: {most recent tag}-{commits since tag}-g{short hash} or just the short hash if no tags. +execute_process(COMMAND git describe --tags --always + OUTPUT_VARIABLE PROJECT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE) +if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) + set(PROJECT_VERSION "0.0.0-unknown") +endif() +set(PROJECT_VERSION "\"${PROJECT_VERSION}\"") +message(STATUS "PROJECT_VERSION is ${PROJECT_VERSION}") + # Write the .cpp file. set(CONSTANTS "#include \"CompilationInfo.h\" namespace qlever::version { constexpr std::string_view GitHash = ${GIT_HASH}; constexpr std::string_view GitShortHash = GitHash.substr(0, 6); constexpr std::string_view DatetimeOfCompilation = ${DATETIME_OF_COMPILATION}; +constexpr std::string_view ProjectVersion = ${PROJECT_VERSION}; void copyVersionInfo() { *gitShortHashWithoutLinking.wlock() = GitShortHash; *datetimeOfCompilationWithoutLinking.wlock() = DatetimeOfCompilation; + *projectVersionWithoutLinking.wlock() = ProjectVersion; } }") diff --git a/e2e/e2e.sh b/e2e/e2e.sh index 3e1a39c2d5..b75b78e3c1 100755 --- a/e2e/e2e.sh +++ b/e2e/e2e.sh @@ -28,7 +28,7 @@ REBUILD_THE_INDEX="YES" INDEX_DIRECTORY="." #if not set, we will build the index ourselves. BINARY_DIRECTORY="build" BUILD_TEXT_INDEX_SEPARATELY="NO" -INDEX_BUILDER_BINARY="qlever-loader" +INDEX_BUILDER_BINARY="qlever-index" SERVER_BINARY="qlever-server" while getopts ":i:d:t" arg; do diff --git a/src/CompilationInfo.h b/src/CompilationInfo.h index 3dcb72b59b..7321f1ce0a 100644 --- a/src/CompilationInfo.h +++ b/src/CompilationInfo.h @@ -14,15 +14,17 @@ #include "util/Synchronized.h" namespace qlever::version { -// The following two constants require linking against the `compilationInfo` -// library which is recreated on every compilation. Short version of the hash of +// The following constants require linking against the `compilationInfo` +// library which is recreated on every compilation. -// The commit that was used to compile QLever. +// Short version of the hash of the commit that was used to compile QLever. extern const std::string_view GitShortHash; // The date and time at which QLever was compiled. extern const std::string_view DatetimeOfCompilation; +// The project version from `git describe --tags --always`. +extern const std::string_view ProjectVersion; -// The following two versions of the above constants do NOT require linking +// The following versions of the above constants do NOT require linking // against the `compilationInfo` library, but only the inclusion of this header. // They only have meaningful values once the `copyVersionInfo` function (below) // was called. This is currently done in the `main` functions of @@ -31,7 +33,9 @@ inline ad_utility::Synchronized gitShortHashWithoutLinking{ std::string_view{"git short hash not set"}}; inline ad_utility::Synchronized datetimeOfCompilationWithoutLinking{ - std::string_view{"git short hash not set"}}; + std::string_view{"datetime of compilation not set"}}; +inline ad_utility::Synchronized projectVersionWithoutLinking{ + std::string_view{"project version not set"}}; // Copy the values from the constants that require linking to the `inline` // variables that don't require linking. For details see above. diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index 57d9153d2f..30d73e44ef 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -12,7 +12,6 @@ #include #include "CompilationInfo.h" -#include "ProjectVersion.h" #include "engine/Server.h" #include "global/Constants.h" #include "global/RuntimeParameters.h" @@ -186,7 +185,8 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever ServerMain " << QLEVER_SRC_PROJECTVERSION << '\n'; + std::cout << "QLever ServerMain " << qlever::version::ProjectVersion + << '\n'; return EXIT_SUCCESS; } po::notify(optionsMap); diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index 55b0e0d75f..c29e4ba482 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -15,7 +15,6 @@ #include #include "CompilationInfo.h" -#include "ProjectVersion.h" #include "global/Constants.h" #include "index/ConstantsIndexBuilding.h" #include "libqlever/Qlever.h" @@ -263,7 +262,7 @@ int main(int argc, char** argv) { return EXIT_SUCCESS; } if (optionsMap.count("version")) { - std::cout << "QLever IndexBuilderMain " << QLEVER_SRC_PROJECTVERSION + std::cout << "QLever IndexBuilderMain " << qlever::version::ProjectVersion << '\n'; return EXIT_SUCCESS; } From 03b44c51ffe1cb7320cb70391b2d873126446ea1 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Mon, 19 Jan 2026 00:13:46 +0100 Subject: [PATCH 29/45] Fix cmake generated package version number --- CMakeLists.txt | 4 ++++ CompilationInfo.cmake | 10 ++-------- GitVersion.cmake | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 GitVersion.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index b2ce36bba3..88e28b47eb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -506,6 +506,10 @@ install(TARGETS # CPack packaging ############################################################### +# Get version from git describe for CPack +include(GitVersion.cmake) +set(PROJECT_VERSION ${GIT_VERSION}) + set(CPACK_PACKAGE_NAME "qlever") # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules diff --git a/CompilationInfo.cmake b/CompilationInfo.cmake index ee665f2c4c..793a58ae03 100644 --- a/CompilationInfo.cmake +++ b/CompilationInfo.cmake @@ -15,14 +15,8 @@ endif() message(STATUS "GIT_HASH is ${GIT_HASH}") # Get the project version from git describe. -# Format: {most recent tag}-{commits since tag}-g{short hash} or just the short hash if no tags. -execute_process(COMMAND git describe --tags --always - OUTPUT_VARIABLE PROJECT_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE) -if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) - set(PROJECT_VERSION "0.0.0-unknown") -endif() -set(PROJECT_VERSION "\"${PROJECT_VERSION}\"") +include(${CMAKE_CURRENT_LIST_DIR}/GitVersion.cmake) +set(PROJECT_VERSION "\"${GIT_VERSION}\"") message(STATUS "PROJECT_VERSION is ${PROJECT_VERSION}") # Write the .cpp file. diff --git a/GitVersion.cmake b/GitVersion.cmake new file mode 100644 index 0000000000..230f5a470d --- /dev/null +++ b/GitVersion.cmake @@ -0,0 +1,14 @@ +# Get the project version from git describe. +# Format : {most recent tag } - {commits since tag } - g{short hash } +# or just the short hash if no tags. +execute_process( + COMMAND git describe --tags --always + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE PROJECT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET +) + +if ((NOT DEFINED PROJECT_VERSION) OR (PROJECT_VERSION STREQUAL "")) + set(PROJECT_VERSION "0.0.0-unknown") +endif() From bda94aeb03bc78b2248af769e1fa2ff3f7fb8a07 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Mon, 19 Jan 2026 00:20:51 +0100 Subject: [PATCH 30/45] Fix PROJECT_VERSION bug --- CMakeLists.txt | 1 - CompilationInfo.cmake | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 88e28b47eb..32ebe9abe1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -508,7 +508,6 @@ install(TARGETS # Get version from git describe for CPack include(GitVersion.cmake) -set(PROJECT_VERSION ${GIT_VERSION}) set(CPACK_PACKAGE_NAME "qlever") diff --git a/CompilationInfo.cmake b/CompilationInfo.cmake index 793a58ae03..b4f7a6991c 100644 --- a/CompilationInfo.cmake +++ b/CompilationInfo.cmake @@ -16,7 +16,7 @@ message(STATUS "GIT_HASH is ${GIT_HASH}") # Get the project version from git describe. include(${CMAKE_CURRENT_LIST_DIR}/GitVersion.cmake) -set(PROJECT_VERSION "\"${GIT_VERSION}\"") +set(PROJECT_VERSION "\"${PROJECT_VERSION}\"") message(STATUS "PROJECT_VERSION is ${PROJECT_VERSION}") # Write the .cpp file. From 6f524dfad260ef9ddd57ec9fe331d4b102fa3240 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 20 Jan 2026 23:47:43 +0100 Subject: [PATCH 31/45] Move ICU_ROOT to cmake instruction in workflow --- .github/workflows/macos-appleclang-native.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/macos-appleclang-native.yml b/.github/workflows/macos-appleclang-native.yml index fb124ed576..03a6b1b1c9 100644 --- a/.github/workflows/macos-appleclang-native.yml +++ b/.github/workflows/macos-appleclang-native.yml @@ -29,7 +29,6 @@ jobs: run: | brew update brew install boost icu4c openssl@3 zstd jemalloc pkg-config - echo "ICU_ROOT=$(brew --prefix icu4c)" >> $GITHUB_ENV echo PATH="$(brew --prefix icu4c)/bin:$(brew --prefix icu4c)/sbin:$PATH" >> $GITHUB_ENV echo PKG_CONFIG_PATH="$PKG_CONFIG_PATH:$(brew --prefix icu4c)/lib/pkgconfig" >> $GITHUB_ENV @@ -45,7 +44,7 @@ jobs: - name: Configure CMake working-directory: ${{ github.workspace }}/build - run: cmake -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 -DLOGLEVEL=INFO -DUSE_PARALLEL=false -D_NO_TIMING_TESTS=ON -DCOMPILER_SUPPORTS_MARCH_NATIVE=FALSE -GNinja .. + run: cmake -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 -DLOGLEVEL=INFO -DUSE_PARALLEL=false -D_NO_TIMING_TESTS=ON -DCOMPILER_SUPPORTS_MARCH_NATIVE=FALSE -DICU_ROOT=$(brew --prefix icu4c) -GNinja .. - name: Build # Build your program with the given configuration From 081b2894db5bdd1091316e927424f3cab36a6443 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 00:22:51 +0100 Subject: [PATCH 32/45] Add VERSION_SUFFIX and PACKAGE_REVISION variables for cpack Debian package --- CMakeLists.txt | 44 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 32ebe9abe1..7ddb586c55 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -514,10 +514,10 @@ set(CPACK_PACKAGE_NAME "qlever") # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules # If git describe output begins with v + semver: if (PROJECT_VERSION MATCHES "^v([0-9]+\\.[0-9]+\\.[0-9]+)(-[0-9]+)?(-g[0-9a-f]+)?$") - + # CMAKE_MATCH_1 is "major.minor.patch" set(TAG_VERSION "${CMAKE_MATCH_1}") - + # CMAKE_MATCH_2 is "-commits_ahead" (or empty) if (CMAKE_MATCH_2) string(SUBSTRING "${CMAKE_MATCH_2}" 1 -1 COMMITS_AHEAD) @@ -536,6 +536,46 @@ else() set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") endif() +# Debian package revision number (e.g., the "1" in "0.5.42-1~plucky~25.04"). +# Override with: cmake -DPACKAGE_REVISION=2 +set(PACKAGE_REVISION "1" CACHE STRING "Debian package revision number") + +# Version suffix for distribution-specific packages (e.g., "~plucky~25.04"). +# Auto-detected from the system, override with: cmake -DVERSION_SUFFIX="~noble~24.04" +# Map Ubuntu codenames to version numbers +set(UBUNTU_jammy "22.04") +set(UBUNTU_noble "24.04") +set(UBUNTU_oracular "24.10") +set(UBUNTU_plucky "25.04") +set(UBUNTU_questing "25.10") +set(UBUNTU_resolute "26.04") + +set(DEFAULT_VERSION_SUFFIX "") +if (EXISTS "/etc/os-release") + # Read os-release to get distribution info + file(STRINGS "/etc/os-release" OS_RELEASE_CONTENTS) + + # Prefer UBUNTU_CODENAME (set on Ubuntu derivatives like Linux Mint) + # Fall back to VERSION_CODENAME + set(DISTRO_CODENAME "") + foreach(line ${OS_RELEASE_CONTENTS}) + if (line MATCHES "^UBUNTU_CODENAME=(.*)$") + set(DISTRO_CODENAME "${CMAKE_MATCH_1}") + break() + elseif (line MATCHES "^VERSION_CODENAME=(.*)$" AND NOT DISTRO_CODENAME) + set(DISTRO_CODENAME "${CMAKE_MATCH_1}") + endif() + endforeach() + + if (DEFINED UBUNTU_${DISTRO_CODENAME}) + set(DEFAULT_VERSION_SUFFIX "~${DISTRO_CODENAME}~${UBUNTU_${DISTRO_CODENAME}}") + endif() +endif() + +set(VERSION_SUFFIX "${DEFAULT_VERSION_SUFFIX}" CACHE STRING "Distribution-specific version suffix") + +set(CPACK_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}-${PACKAGE_REVISION}${VERSION_SUFFIX}") + set(CPACK_PACKAGE_CONTACT "mundhahj@tf.uni-freiburg.de") set(CPACK_PACKAGE_HOMEPAGE_URL "https://github.com/ad-freiburg/qlever") From efa82b5976b46ba1ecffdf7c8b7634051960feb6 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 00:37:27 +0100 Subject: [PATCH 33/45] In debian package name, change commits ahead separator from - to + --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 7ddb586c55..8e4f80523c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -521,7 +521,7 @@ if (PROJECT_VERSION MATCHES "^v([0-9]+\\.[0-9]+\\.[0-9]+)(-[0-9]+)?(-g[0-9a-f]+) # CMAKE_MATCH_2 is "-commits_ahead" (or empty) if (CMAKE_MATCH_2) string(SUBSTRING "${CMAKE_MATCH_2}" 1 -1 COMMITS_AHEAD) - set(CPACK_PACKAGE_VERSION "${TAG_VERSION}-${COMMITS_AHEAD}") + set(CPACK_PACKAGE_VERSION "${TAG_VERSION}+${COMMITS_AHEAD}") else() set(CPACK_PACKAGE_VERSION "${TAG_VERSION}") endif() @@ -533,7 +533,7 @@ else() else() set(HASH "${PROJECT_VERSION}") endif() - set(CPACK_PACKAGE_VERSION "0.0.0-${HASH}") + set(CPACK_PACKAGE_VERSION "0.0.0+${HASH}") endif() # Debian package revision number (e.g., the "1" in "0.5.42-1~plucky~25.04"). From e7315bdfaef9f2fe3d963f258a2e87a3d71c3e02 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 00:44:58 +0100 Subject: [PATCH 34/45] Change package name from qlever to qlever-bin --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 8e4f80523c..f223b69b59 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -509,7 +509,7 @@ install(TARGETS # Get version from git describe for CPack include(GitVersion.cmake) -set(CPACK_PACKAGE_NAME "qlever") +set(CPACK_PACKAGE_NAME "qlever-bin") # Set CPACK_PACKAGE_VERSION according to Debian packaging naming rules # If git describe output begins with v + semver: From 76c39c9504ff3a8e141a4e4fd2a0998eacbf85d1 Mon Sep 17 00:00:00 2001 From: Julian Mundhahs Date: Wed, 21 Jan 2026 11:22:52 +0100 Subject: [PATCH 35/45] complement distribution detection remove unsupported (EOL) ubuntu releases and add debian releases --- CMakeLists.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f223b69b59..35373f419b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -543,12 +543,12 @@ set(PACKAGE_REVISION "1" CACHE STRING "Debian package revision number") # Version suffix for distribution-specific packages (e.g., "~plucky~25.04"). # Auto-detected from the system, override with: cmake -DVERSION_SUFFIX="~noble~24.04" # Map Ubuntu codenames to version numbers -set(UBUNTU_jammy "22.04") set(UBUNTU_noble "24.04") -set(UBUNTU_oracular "24.10") set(UBUNTU_plucky "25.04") set(UBUNTU_questing "25.10") set(UBUNTU_resolute "26.04") +set(DEBIAN_trixie "deb13") +set(DEBIAN_bookworm "deb12") set(DEFAULT_VERSION_SUFFIX "") if (EXISTS "/etc/os-release") @@ -569,6 +569,8 @@ if (EXISTS "/etc/os-release") if (DEFINED UBUNTU_${DISTRO_CODENAME}) set(DEFAULT_VERSION_SUFFIX "~${DISTRO_CODENAME}~${UBUNTU_${DISTRO_CODENAME}}") + elseif (DEFINED DEBIAN_${DISTRO_CODENAME}) + set(DEFAULT_VERSION_SUFFIX "~${DISTRO_CODENAME}~${DEBIAN_${DISTRO_CODENAME}}") endif() endif() From 0861eea3b973a257cadbb99374e9b57498172b12 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 14:12:13 +0100 Subject: [PATCH 36/45] Remove UBUNTU_CODENAME based logic and have a fully automated way of constructing VERSION_SUFFIX --- CMakeLists.txt | 40 ++++++++++++++++++---------------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 35373f419b..e6589c2c3a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -541,36 +541,32 @@ endif() set(PACKAGE_REVISION "1" CACHE STRING "Debian package revision number") # Version suffix for distribution-specific packages (e.g., "~plucky~25.04"). -# Auto-detected from the system, override with: cmake -DVERSION_SUFFIX="~noble~24.04" -# Map Ubuntu codenames to version numbers -set(UBUNTU_noble "24.04") -set(UBUNTU_plucky "25.04") -set(UBUNTU_questing "25.10") -set(UBUNTU_resolute "26.04") -set(DEBIAN_trixie "deb13") -set(DEBIAN_bookworm "deb12") - +# Auto-detected from /etc/os-release for Ubuntu and Debian. +# For derived distros, override with: cmake -DVERSION_SUFFIX="~noble~24.04" set(DEFAULT_VERSION_SUFFIX "") if (EXISTS "/etc/os-release") - # Read os-release to get distribution info file(STRINGS "/etc/os-release" OS_RELEASE_CONTENTS) - # Prefer UBUNTU_CODENAME (set on Ubuntu derivatives like Linux Mint) - # Fall back to VERSION_CODENAME - set(DISTRO_CODENAME "") + set(_OS_ID "") + set(_OS_VERSION_ID "") + set(_OS_VERSION_CODENAME "") + foreach(line ${OS_RELEASE_CONTENTS}) - if (line MATCHES "^UBUNTU_CODENAME=(.*)$") - set(DISTRO_CODENAME "${CMAKE_MATCH_1}") - break() - elseif (line MATCHES "^VERSION_CODENAME=(.*)$" AND NOT DISTRO_CODENAME) - set(DISTRO_CODENAME "${CMAKE_MATCH_1}") + if (line MATCHES "^ID=\"?([^\"]*)\"?$") + set(_OS_ID "${CMAKE_MATCH_1}") + elseif (line MATCHES "^VERSION_ID=\"?([^\"]*)\"?$") + set(_OS_VERSION_ID "${CMAKE_MATCH_1}") + elseif (line MATCHES "^VERSION_CODENAME=\"?([^\"]*)\"?$") + set(_OS_VERSION_CODENAME "${CMAKE_MATCH_1}") endif() endforeach() - if (DEFINED UBUNTU_${DISTRO_CODENAME}) - set(DEFAULT_VERSION_SUFFIX "~${DISTRO_CODENAME}~${UBUNTU_${DISTRO_CODENAME}}") - elseif (DEFINED DEBIAN_${DISTRO_CODENAME}) - set(DEFAULT_VERSION_SUFFIX "~${DISTRO_CODENAME}~${DEBIAN_${DISTRO_CODENAME}}") + if (_OS_VERSION_CODENAME AND _OS_VERSION_ID) + if (_OS_ID STREQUAL "ubuntu") + set(DEFAULT_VERSION_SUFFIX "~${_OS_VERSION_CODENAME}~${_OS_VERSION_ID}") + elseif (_OS_ID STREQUAL "debian") + set(DEFAULT_VERSION_SUFFIX "~${_OS_VERSION_CODENAME}~deb${_OS_VERSION_ID}") + endif() endif() endif() From 8bcae9fee7d9931d299cc995148ccdff3cbeb3b5 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 14:12:40 +0100 Subject: [PATCH 37/45] Add GitVersion.cmake copy step to Dockerfile --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index df573b4110..1989694c63 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,6 +28,7 @@ COPY benchmark /qlever/benchmark/ COPY .git /qlever/.git/ COPY CMakeLists.txt /qlever/ COPY CompilationInfo.cmake /qlever/ +COPY GitVersion.cmake /qlever/ # Build and compile. By default, also compile and run all tests. In order not # to, build the image with `--build-arg RUN_TESTS=false`. From 547d42218878a1bf11e7b8e78230268a8b7f907e Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Wed, 21 Jan 2026 14:25:17 +0100 Subject: [PATCH 38/45] Add GitVersion.cmake to dockerignore file as well --- .dockerignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.dockerignore b/.dockerignore index bc19307271..6c3b2d8a11 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,4 +6,5 @@ !.git !CMakeLists.txt !CompilationInfo.cmake +!GitVersion.cmake !docker-entrypoint.sh From 76f3001b2c72f994d61970a74f3d7b8d41020d9f Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Thu, 29 Jan 2026 18:09:41 +0100 Subject: [PATCH 39/45] Add binary version to qlever-index and qlever-server first log line --- src/ServerMain.cpp | 3 ++- src/index/IndexBuilderMain.cpp | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index 36b404831d..f5e5d7f259 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -195,7 +195,8 @@ int main(int argc, char** argv) { return EXIT_FAILURE; } - AD_LOG_INFO << EMPH_ON << "QLever server, compiled on " + AD_LOG_INFO << EMPH_ON << "QLever server " + << qlever::version::ProjectVersion << ", compiled on " << qlever::version::DatetimeOfCompilation << " using git hash " << qlever::version::GitShortHash << EMPH_OFF << std::endl; diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index 1d080de5a8..e9b39fa27e 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -278,7 +278,8 @@ int main(int argc, char** argv) { return EXIT_FAILURE; } - AD_LOG_INFO << EMPH_ON << "QLever index builder, compiled on " + AD_LOG_INFO << EMPH_ON << "QLever index builder " + << qlever::version::ProjectVersion << ", compiled on " << qlever::version::DatetimeOfCompilation << " using git hash " << qlever::version::GitShortHash << EMPH_OFF << std::endl; From 5b764332ebce53a68863cdf1ffaf7437b1e9b0fb Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Thu, 29 Jan 2026 22:15:34 +0100 Subject: [PATCH 40/45] Revert "Delete github workflows temporarily" This reverts commit 2d7fa61727adbd6308821f548ae2d2f8d66d831e. --- .github/workflows/check_index_version.yml | 96 +++++++++++ .github/workflows/code-coverage.yml | 117 +++++++++++++ .github/workflows/codespell.yml | 25 +++ .github/workflows/cpp-17-libqlever.yml | 84 ++++++++++ .github/workflows/docker-publish.yml | 155 ++++++++++++++++++ .github/workflows/format-check.yml | 32 ++++ .github/workflows/macos.yml | 96 +++++++++++ .github/workflows/native-build-conan.yml | 75 +++++++++ .github/workflows/native-build.yml | 151 +++++++++++++++++ .github/workflows/sonarcloud.yml | 34 ++++ .../workflows/sparql-conformance-uploader.yml | 65 ++++++++ .github/workflows/sparql-conformance.yml | 86 ++++++++++ .github/workflows/upload-coverage.yml | 95 +++++++++++ .github/workflows/upload-sonarcloud.yml | 144 ++++++++++++++++ 14 files changed, 1255 insertions(+) create mode 100644 .github/workflows/check_index_version.yml create mode 100644 .github/workflows/code-coverage.yml create mode 100644 .github/workflows/codespell.yml create mode 100644 .github/workflows/cpp-17-libqlever.yml create mode 100644 .github/workflows/docker-publish.yml create mode 100644 .github/workflows/format-check.yml create mode 100644 .github/workflows/macos.yml create mode 100644 .github/workflows/native-build-conan.yml create mode 100644 .github/workflows/native-build.yml create mode 100644 .github/workflows/sonarcloud.yml create mode 100644 .github/workflows/sparql-conformance-uploader.yml create mode 100644 .github/workflows/sparql-conformance.yml create mode 100644 .github/workflows/upload-coverage.yml create mode 100644 .github/workflows/upload-sonarcloud.yml diff --git a/.github/workflows/check_index_version.yml b/.github/workflows/check_index_version.yml new file mode 100644 index 0000000000..ecba7a725b --- /dev/null +++ b/.github/workflows/check_index_version.yml @@ -0,0 +1,96 @@ +name: Check index version + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + check-index-version: + strategy: + fail-fast: false + matrix: + compiler: [gcc] + compiler-version: [11] + # We deliberately set `Release` and then `O1`, s.t. we get faster compile times (O1, less optimizations), but + # smaller binaries (no debug info), because otherwise we run out of memory on GitHub actions. + warnings: [ "-Wall -Wextra -O1 " ] + build-type: [Release] + + runs-on: ubuntu-22.04 + + + steps: + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + path: 'pr' + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + path: 'master' + ref: 'master' + + - name: Install dependencies + uses: ./pr/.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./pr/.github/workflows/install-compiler-ubuntu + with: + compiler: ${{matrix.compiler}} + compiler-version: ${{matrix.compiler-version}} + - name: Configure CMake Master + working-directory: ${{github.workspace}}/master + run: cmake -B build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true + - name: Configure CMake PR + working-directory: ${{github.workspace}}/pr + run: cmake -B build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true + + - name: Build master + # Build your program with the given configuration + # Only build the actually required executables to save space on the device. + run: | + cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) + cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) + cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) + + - name: E2E on Master + working-directory: ${{github.workspace}}/master + run: e2e/e2e.sh + - name: Get index version master + working-directory: ${{github.workspace}}/master/build + run: | + if test -f "./PrintIndexVersionMain"; then + echo 'index_version_master<> $GITHUB_ENV + ./PrintIndexVersionMain >> $GITHUB_ENV + echo 'EOF' >> $GITHUB_ENV + else + echo 'index_version_master={}' >> $GITHUB_ENV + fi + - name: Clean master + run: cmake --build ${{github.workspace}}/master/build --target clean + - name: BuildPr + # also only build the required executables here. + run: | + cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) + cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) + cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) + - name: Get index version PR + working-directory: ${{github.workspace}}/pr/build + run: | + echo 'index_version_pr<> $GITHUB_ENV + ./PrintIndexVersionMain >> $GITHUB_ENV + echo 'EOF' >> $GITHUB_ENV + - name: Print index versions + run : | + echo '${{ fromJson(env.index_version_master)}}' + echo '${{ fromJson(env.index_version_pr)}}' + - name: E2E on PR using index from Master + if: env.index_version_master == env.index_version_pr + working-directory: ${{github.workspace}}/pr + run: e2e/e2e.sh -i ../master diff --git a/.github/workflows/code-coverage.yml b/.github/workflows/code-coverage.yml new file mode 100644 index 0000000000..c9f46032dd --- /dev/null +++ b/.github/workflows/code-coverage.yml @@ -0,0 +1,117 @@ +# Measure the code coverage of QLever's unit tests using LLVMs source +# instrumentation. When this workflow runs for the master branch, the +# coverage report is directly uploaded to Codecov.io. When this workflow +# runs for a pull request, the coverage report is stored as a GitHub artifact +# and a separate workflow from `upload-coverage.yml` is triggered that uploads +# the coverage report. That way the uploader has access to the Codecov token +# in all cases and the upload should never fail. + +# The following name has to be the same as in the `workflows:` key in +# `upload-coverage.yml`, otherwise the uploader won't pick up the result +# of this workflow. +name: measure-code-coverage + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + build: + env: + compiler: clang + compiler-version: 16 + build-type: Debug + warnings: "-Wall -Wextra " + # we disable the `assert()` macro as it messes with the coverage reports + asan-flags: "-DNDEBUG" + ubsan-flags: "" + coverage-flags: "-fprofile-instr-generate -fcoverage-mapping" + cmake-flags: "-DCMAKE_C_COMPILER=clang-16 -DCMAKE_CXX_COMPILER=clang++-16" + + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + submodules: "recursive" + + - name: Install dependencies + uses: ./.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./.github/workflows/install-compiler-ubuntu + with: + compiler: "clang" + compiler-version: "16" + - name: Install coverage tools + run: | + sudo apt install -y llvm-16 + sudo apt install mold + - name: Show path + run: | + which llvm-profdata-16 + which llvm-cov-16 + - name: Create build directory + run: mkdir ${{github.workspace}}/build + - name: Configure CMake + # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. + # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type + run: cmake -B ${{github.workspace}}/build ${{env.cmake-flags}} -DCMAKE_BUILD_TYPE=${{env.build-type}} -DLOGLEVEL=TIMING -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}} ${{env.asan-flags}} ${{env.ubsan-flags}} ${{env.coverage-flags}}" -DADDITIONAL_LINKER_FLAGS="${{env.coverage-flags}}" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=false -DSINGLE_TEST_BINARY=ON -DENABLE_EXPENSIVE_CHECKS=true -DADDITIONAL_LINKER_FLAGS="-fuse-ld=mold" + + - name: Build + # Build your program with the given configuration + run: cmake --build ${{github.workspace}}/build --config ${{env.build-type}} -- -j $(nproc) + - name: Run unit tests + working-directory: ${{github.workspace}}/build/test + env: + LLVM_PROFILE_FILE: "default%p.profraw" + # We have to manually run the test executable to only get a single `.profraw` file. + # Otherwise, the GitHub runner goes out of memory. + run: env ASAN_OPTIONS="alloc_dealloc_mismatch=0" ./QLeverAllUnitTestsMain + + - name: Process coverage info + working-directory: ${{github.workspace}}/build/test + run: > + llvm-profdata-16 merge -sparse *.profraw -o default.profdata; + llvm-cov-16 export ./QLeverAllUnitTestsMain --dump --format=lcov --instr-profile ./default.profdata --ignore-filename-regex="/third_party/" --ignore-filename-regex="/generated/" --ignore-filename-regex="/nlohmann/" --ignore-filename-regex="/ctre/" --ignore-filename-regex="/test/" --ignore-filename-regex="/benchmark/" > ./coverage.lcov + +# Only upload the coverage directly if this is not a pull request. In this +# case we are on the master branch and have access to the Codecov token. + - name: "Submit coverage data to codecov.io" + if: github.event_name != 'pull_request' + uses: codecov/codecov-action@v4 + with: + file: ${{github.workspace}}/build/test/coverage.lcov + # Note: technically, a `token` is not required for codecov.io when + # uploading from a public repository, but specifying it avoids the + # nasty spurious failures due to GitHub's rate limit for codecov's + # public default token. + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true + +# For a pull request we store the coverage file as well as some information +# about this PR (number, how to check it out, etc.) and upload it as an artifact. +# This is all the data that is required for running the Codecov uploader manually +# from `upload-coverage.yml`. + - name: Save PR number and coverage file in same directory + if: github.event_name == 'pull_request' + # Note: If you change any of the filenames here, you also have to change them in `upload-coverage.yml` + run : | + mkdir -p coverage-report + echo ${{ github.event.number }} > ./coverage-report/pr + echo ${{ github.repository }} > ./coverage-report/github_repository + echo ${GITHUB_REF} > ./coverage-report/github_ref + mv ${{ github.workspace}}/build/test/coverage.lcov coverage-report + - name: Upload coverage artifact + if: github.event_name == 'pull_request' + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: coverage-report/ + # Note: for now we do not run the e2e tests for the coverage check + diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000000..b026c855dd --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,25 @@ +# Codespell configuration is within .codespellrc +--- +name: Codespell + +on: + push: + branches: [master] + pull_request: + branches: [master] + +permissions: + contents: read + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Annotate locations with typos + uses: codespell-project/codespell-problem-matcher@v1 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/cpp-17-libqlever.yml b/.github/workflows/cpp-17-libqlever.yml new file mode 100644 index 0000000000..7afffd917c --- /dev/null +++ b/.github/workflows/cpp-17-libqlever.yml @@ -0,0 +1,84 @@ +name: CPP17 libQLever + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-22.04 + # The CMake configure and build commands are platform-agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + strategy: + fail-fast: false + matrix: + include: + - compiler-version: 11 + use-keep-going: false + use-ignore-errors: false + additional-cmake-options: "-DREDUCED_FEATURE_SET_FOR_CPP17=ON" + - compiler-version: 8 + expensive-tests: true + use-keep-going: true + use-ignore-errors: true + additional-cmake-options: "-DREDUCED_FEATURE_SET_FOR_CPP17=ON -DUSE_CPP_17_BACKPORTS=ON -DCMAKE_CXX_STANDARD=17 -DCOMPILER_VERSION_CHECK_DEACTIVATED=ON" + + + env: + warnings: "" + build-type: Release + expensive-tests: true + compiler: gcc + + steps: + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Install dependencies + uses: ./.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./.github/workflows/install-compiler-ubuntu + with: + compiler: ${{env.compiler}} + compiler-version: ${{matrix.compiler-version}} + - name: Configure CMake + # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. + # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type + run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{env.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=${{env.expensive-tests}} -DENABLE_EXPENSIVE_CHECKS=true ${{matrix.additional-cmake-options}} -DADDITIONAL_LINKER_FLAGS="-B /usr/bin/mold" + + - name: Build + # Build your program with the given configuration + run: | + KEEP_GOING="" + if [[ "${{ matrix.use-keep-going }}" == "true" ]]; then + KEEP_GOING="-k" + fi + IGNORE_ERRORS="" + if [[ "${{ matrix.use-ignore-errors }}" == "true" ]]; then + IGNORE_ERRORS="-i" + fi + set -o pipefail # the `tee` will never fail, but the `build` command might. We want to fail if the build fails. + cmake --build ${{github.workspace}}/build --target QleverTest --config ${{env.build-type}} -- $IGNORE_ERRORS $KEEP_GOING -j $(nproc) 2>&1 | tee /tmp/build.log + id: build + + - name: Run gcc8 log analyzer on gcc8 builds + if: matrix.compiler-version == 8 + run: | + python ${{ github.workspace }}/misc/gcc8_logs_analyzer.py /tmp/build.log --on-github + id: gcc8_log_analyzer + - name: Test + id: runTest + if: (matrix.compiler-version == 11) + working-directory: ${{github.workspace}}/build/test + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . -L QleverTest diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000000..297546c5b2 --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,155 @@ +name: Docker build and publish + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +env: + IMAGE: adfreiburg/qlever + +concurrency: + # When this is not a pull request, then we want all the docker containers to be pushed, we therefore + # directly fall back to the commit hash which will be distinct for each push to master. + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.sha}}' + cancel-in-progress: true + +# This workflow is heavily based on https://docs.docker.com/build/ci/github-actions/multi-platform/#distribute-build-across-multiple-runners . +jobs: + build: + strategy: + matrix: + include: + - platform: linux/amd64 + runner: ubuntu-24.04 + - platform: linux/arm64 + runner: ubuntu-24.04-arm + runs-on: ${{ matrix.runner }} + steps: + - name: Checkout + uses: actions/checkout@v4 + # Generate metadata for the docker image based on the GH Actions environment. + - name: Generate image metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build ${{ matrix.platform }} + uses: docker/build-push-action@v6 + id: build + with: + context: . + platforms: ${{ matrix.platform }} + tags: ${{ env.IMAGE }} + # push-by-digest means that the built image is not associated with a tag. Instead, the only way to refer to it + # is by using its digest (which is basically a unique hash). + outputs: type=image,push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }} + labels: ${{ steps.meta.outputs.labels }} + annotations: ${{ steps.meta.outputs.annotations }} + # Export the digest outside of this job, so that the merge job can pick it up. + - name: Export digest + if: github.event_name != 'pull_request' + run: | + # Strip forward slash from matrix.platform + platform=${{ matrix.platform }} + echo "ARTIFACT_NAME=${platform//\//-}" >> $GITHUB_ENV + mkdir -p ${{ runner.temp }}/digests + digest="${{ steps.build.outputs.digest }}" + touch "${{ runner.temp }}/digests/${digest#sha256:}" + - name: Upload digest + if: github.event_name != 'pull_request' + uses: actions/upload-artifact@v4 + with: + name: digests-${{ env.ARTIFACT_NAME }} + path: ${{ runner.temp }}/digests/* + if-no-files-found: error + retention-days: 1 + # Build image for local use. Since we pushed by digest it has not been loaded into the local docker instance. + # It might be possible to directly push by using a unique tag and make this step redundant, but pushing by digest + # seems to be the recommended way to do it. + - name: Setup E2E test image + if: matrix.platform == 'linux/amd64' + uses: docker/build-push-action@v6 + with: + # The cache should already provide this. So no rebuild should occur. + context: . + load: true + push: false + tags: ${{ env.IMAGE }}:tmp-${{ github.sha }} + - name: E2E in Docker + if: matrix.platform == 'linux/amd64' + run: | + sudo mkdir ${{ github.workspace }}/e2e_data + sudo chmod a+rwx ${{ github.workspace }}/e2e_data + sudo docker run -i --rm -v "${{ github.workspace }}/e2e_data:/app/e2e_data/" --entrypoint e2e/e2e.sh ${{ env.IMAGE }}:tmp-${{ github.sha }} + docker-merge: + if: github.event_name != 'pull_request' + needs: [ build ] + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Generate image metadata + id: meta + uses: docker/metadata-action@v5 + env: + # We build multiplatform images which have an image index above the + # image manifests. Attach the annotations directly to the image index. + DOCKER_METADATA_ANNOTATIONS_LEVELS: "index" + with: + images: ${{ env.IMAGE }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Get short sha + id: sha + run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + - name: Get PR number + id: pr + run: echo "pr_num=$(git log --format=%s -n 1 | sed -nr 's/.*\(\#([0-9]+)\)/\1/p')" >> $GITHUB_OUTPUT + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: ${{ runner.temp }}/digests + pattern: digests-* + merge-multiple: true + - name: Merge amd64 + arm64 images into multi-arch manifest + # Changing the working directory to this folder is important, so that + # the '*' in printf down below is expanded to the simple filenames without + # a leading path in the way. + working-directory: ${{ runner.temp }}/digests + # steps.meta.outputs.annotations contains a line for every annotation. + # To properly handle the expansion of this multi-line value in bash, + # we need to properly transform it into the `EXPANDED_ANNOTATIONS` + # variable, which then no longer contains newlines and properly + # handles other spaces in the annotations. + run: | + ANNOTATIONS=$(cat <<'EOF' + ${{ steps.meta.outputs.annotations }} + EOF + ) + EXPANDED_ANNOTATIONS=() + while IFS= read -r line; do + # Skip empty lines + [[ -n "$line" ]] || continue + EXPANDED_ANNOTATIONS+=( --annotation "$line" ) + done <<< "$ANNOTATIONS" + docker buildx imagetools create \ + -t ${{ env.IMAGE }}:latest \ + -t ${{ env.IMAGE }}:${{ github.ref_name == 'master' && format('pr-{0}', steps.pr.outputs.pr_num) || github.ref_name }} \ + -t ${{ env.IMAGE }}:commit-${{ steps.sha.outputs.sha_short }} \ + "${EXPANDED_ANNOTATIONS[@]}" \ + $(printf '${{ env.IMAGE }}@sha256:%s ' *) diff --git a/.github/workflows/format-check.yml b/.github/workflows/format-check.yml new file mode 100644 index 0000000000..07412916bb --- /dev/null +++ b/.github/workflows/format-check.yml @@ -0,0 +1,32 @@ +name: Format check + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +jobs: + build: + # The CMake configure and build commands are platform agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: ubuntu-22.04 + + steps: + - uses: actions/checkout@v4 + - name: Install dependencies + run: | + # The following line currently seems to be necessary to work around a bug in the installation. + sudo apt remove python3-lldb-* + wget https://apt.llvm.org/llvm.sh + sudo chmod +x llvm.sh + sed 's/apt-key del/echo/' llvm.sh -iy + sudo ./llvm.sh 16 + sudo apt install -y clang-format-16 + + - name: Run the format checker + run: ${{github.workspace}}/misc/format-check.sh + diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 0000000000..b64924e7d9 --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,96 @@ +name: Native build with conan on MacOS + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + build: + # The CMake configure and build commands are platform-agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + strategy: + fail-fast: false + matrix: + build-type: [Release] + runs-on: macos-15 + env: + LLVM_VERSION: 17 + steps: + - uses: actions/checkout@v4 + + - name: Install ICU dependency for python (only needed for E2E test) + run: | + df -h + brew install pkg-config icu4c + echo PATH="$(brew --prefix icu4c)/bin:$(brew --prefix icu4c)/sbin:$PATH" >> $GITHUB_ENV + echo PKG_CONFIG_PATH="$PKG_CONFIG_PATH:$(brew --prefix icu4c)/lib/pkgconfig" >> $GITHUB_ENV + + - name: Install python dependencies for E2E tests + run: | + pip3 install --break-system-packages pyaml --no-binary=:pyicu: pyicu + + - name: Install dependencies + run: | + brew install llvm@${{env.LLVM_VERSION}} conan@2 + export LLVM_PREFIX=$(brew --prefix llvm@${{env.LLVM_VERSION}}) + echo PATH="$LLVM_PREFIX/bin:$PATH" >> $GITHUB_ENV + echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV + echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV + + - name: Print clang version + run: clang++ --version + + - name: Cache for conan + uses: actions/cache@v3 + env: + cache-name: cache-conan-modules-macos-15 + with: + path: ~/.conan2 + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('conanfile.txt', 'conanprofiles/clang-17-macos')}} + + - name: Create build directory + run: mkdir ${{github.workspace}}/build + + - name: Install and run conan + working-directory: ${{github.workspace}}/build + run: conan install .. -pr:b=../conanprofiles/clang-17-macos -pr:h=../conanprofiles/clang-17-macos -of=. --build=missing + + - name: Configure CMake + # For std::ranges::join_view we need the -fexperimental-library flag on libc++17. + # We currently cannot use the parallel algorithms, as the parallel sort requires a GNU-extension, and we build with `libc++`. + run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/build/conan_toolchain.cmake" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=false -DENABLE_EXPENSIVE_CHECKS=true -DADDITIONAL_COMPILER_FLAGS="-fexperimental-library" -D_NO_TIMING_TESTS=ON + + - name: Build + # Build your program with the given configuration + # Sourcing the conanrun.sh even for building is required to make gtest_discover_tests pass reliably. + run: | + df -h + source ${{github.workspace}}/build/conanrun.sh + cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j 2 + + - name: Test + working-directory: ${{github.workspace}}/build/test + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: | + df -h + source ${{github.workspace}}/build/conanrun.sh + env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . + + - name: Running and printing the benchmark examples. + working-directory: ${{github.workspace}}/build + run: benchmark/BenchmarkExamples -p + + - name: E2E + run: | + source ${{github.workspace}}/build/conanrun.sh + ${{github.workspace}}/e2e/e2e.sh diff --git a/.github/workflows/native-build-conan.yml b/.github/workflows/native-build-conan.yml new file mode 100644 index 0000000000..ef6ad67f50 --- /dev/null +++ b/.github/workflows/native-build-conan.yml @@ -0,0 +1,75 @@ +name: Native build with conan + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + build: + # The CMake configure and build commands are platform-agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + strategy: + fail-fast: false + matrix: + warnings: [ "-Wall -Wextra" ] + build-type: [Release] + runs-on: ubuntu-22.04 + + + steps: + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + + - name: Install dependencies + uses: ./.github/workflows/install-dependencies-ubuntu + with: + install-third-party-libraries: "false" + + - name: Create build directory + run: mkdir ${{github.workspace}}/build + - name: Install and run conan + working-directory: ${{github.workspace}}/build + run: > + sudo pip3 install conan; + conan profile detect; + conan install .. -pr:b=default -of=. --build=missing ; + - name: Configure CMake + # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. + # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type + run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DLOGLEVEL=DEBUG -DCMAKE_TOOLCHAIN_FILE="$(pwd)/build/conan_toolchain.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=false -DENABLE_EXPENSIVE_CHECKS=true + + - name: Build + # Build your program with the given configuration + run: cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j $(nproc) + + - name: Test + working-directory: ${{github.workspace}}/build/test + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: > + source ../conanrun.sh; + env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} .; + + - name: Running and printing the benchmark examples. + working-directory: ${{github.workspace}}/build + run: > + source ./conanrun.sh; + benchmark/BenchmarkExamples -p; + + # explicitly specify the binary directory for the E2E script via the `-d` option to also + # test that it works. + - name: E2E + run: > + source ${{github.workspace}}/build/conanrun.sh; + ${{github.workspace}}/e2e/e2e.sh -d build + diff --git a/.github/workflows/native-build.yml b/.github/workflows/native-build.yml new file mode 100644 index 0000000000..cec789a10d --- /dev/null +++ b/.github/workflows/native-build.yml @@ -0,0 +1,151 @@ +name: Native build + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + build: + + # The CMake configure and build commands are platform-agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + strategy: + fail-fast: false + matrix: + compiler: [gcc, clang] + compiler-version: [8, 11, 12, 13, 15, 16, 18, 21] + warnings: [ "-Wall -Wextra -Werror " ] + build-type: [Release] + expensive-tests: [true] + additional-cmake-options: [""] + isPr: + - ${{github.event_name == 'pull_request'}} # some checks are only run in the master. + skipIfPr: [false] + exclude: + - compiler: gcc + compiler-version: 8 + - compiler: gcc + compiler-version: 12 + isPr: true + - compiler: gcc + compiler-version: 15 + - compiler: gcc + compiler-version: 16 + - compiler: gcc + compiler-version: 18 + - compiler: gcc + compiler-version: 21 + - compiler: clang + compiler-version: 8 + - compiler: clang + compiler-version: 11 + - compiler: clang + compiler-version: 12 + - compiler: clang + compiler-version: 13 + - compiler: clang + compiler-version: 15 + - compiler: clang + compiler-version: 16 + isPr: true + - compiler: clang + compiler-version: 18 + isPr: true + include: + - compiler: gcc + compiler-version: 11 + additional-cmake-options: "-DUSE_CPP_17_BACKPORTS=ON -DEXPRESSION_GENERATOR_BACKPORTS_FOR_CPP17=ON" + build-type: Release +# TODO Figure out a way to enable this build only for master pushes and not for the PR. +# - compiler: clang +# compiler-version: 15 +# additional-cmake-options: "-DUSE_CPP_17_BACKPORTS=ON -DCMAKE_CXX_STANDARD=17 -DCMAKE_CXX_FLAGS='-ferror-limit=0' -DCOMPILER_VERSION_CHECK_DEACTIVATED=ON" +# build-type: Debug +# skipIfPr: true +# expensive-tests: false +# continue-on-error: true +# use-keep-going: true +# use-ignore-errors: true + - compiler: clang + compiler-version: 16 + asan-flags: "-fsanitize=address -fno-omit-frame-pointer" + build-type: RelWithDebInfo + expensive-tests: false + - compiler: clang + compiler-version: 16 + ubsan-flags: " -fsanitize=undefined" + build-type: RelWithDebInfo + expensive-tests: false + skipIfPr: false + - compiler: clang + compiler-version: 17 + build-type: Debug + expensive-tests: false + ubsan-flags: " -fsanitize=thread -O1 -g" + additional-cmake-options: "-D_NO_TIMING_TESTS=ON" + skipIfPr: false + + runs-on: ubuntu-22.04 + + + steps: + - name: Skip early if conditions are not met + if: (github.event_name == 'pull_request' && matrix.skipIfPr) + run: exit 0; + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Install dependencies + uses: ./.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./.github/workflows/install-compiler-ubuntu + with: + compiler: ${{matrix.compiler}} + compiler-version: ${{matrix.compiler-version}} + - name: Reduce address randomization to make sanitizers work + # For details see for example `https://stackoverflow.com/questions/77850769/fatal-threadsanitizer-unexpected-memory-mapping-when-running-on-linux-kernels` + run: | + sudo sysctl vm.mmap_rnd_bits + sudo sysctl vm.mmap_rnd_bits=28 + + + - name: Configure CMake + env: + USE_PCH: ${{ matrix.compiler-version == 21 && 'ON' || 'OFF' }} + # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. + # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type + run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{matrix.compiler}}${{matrix.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{matrix.warnings}} ${{matrix.asan-flags}} ${{matrix.ubsan-flags}}" -DUSE_PARALLEL=true -DRUN_EXPENSIVE_TESTS=${{matrix.expensive-tests}} -DENABLE_EXPENSIVE_CHECKS=true ${{matrix.additional-cmake-options}} -DADDITIONAL_LINKER_FLAGS="-B /usr/bin/mold" -DSINGLE_TEST_BINARY=ON -DUSE_PRECOMPILED_HEADERS=${{env.USE_PCH}} + + - name: Build + # Build your program with the given configuration + run: | + cmake --build ${{github.workspace}}/build --config ${{matrix.build-type}} -- -j $(nproc) + id: build + + - name: Test + id: complete_tests + working-directory: ${{github.workspace}}/build/test + run: env CTEST_OUTPUT_ON_FAILURE=1 ctest -C ${{matrix.build-type}} . + + - name: Running and printing the benchmark examples. + run: ${{github.workspace}}/build/benchmark/BenchmarkExamples -p + + # Run the E2E test only once, but for GCC add the text index separately. + # Note that the dispatch by the compiler is arbitrary, we just want + # to have both cases tested. + - name: E2E with index in one go + if: (matrix.compiler == 'clang') && (steps.complete_tests.outcome == 'success') + run: ${{github.workspace}}/e2e/e2e.sh + + - name: E2E with separate text index build + if: (matrix.compiler != 'clang') && (steps.complete_tests.outcome == 'success') + run: ${{github.workspace}}/e2e/e2e.sh -t diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml new file mode 100644 index 0000000000..0580a783b7 --- /dev/null +++ b/.github/workflows/sonarcloud.yml @@ -0,0 +1,34 @@ +name: sonarcloud-analysis + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + build: + strategy: + fail-fast: false + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + submodules: 'recursive' + + - name: Save PR number and sonarcloud data in same directory + if: github.event_name == 'pull_request' + # Note: If you change any of the filenames here, you also have to change them in `upload-sonarcloud.yml` + run : | + mkdir -p sonarcloud-report + echo ${{ github.event.number }} > ./sonarcloud-report/pr + echo ${{ github.repository }} > ./sonarcloud-report/github_repository + echo ${GITHUB_REF} > ./sonarcloud-report/github_ref + - name: Upload sonarcloud artifact + if: github.event_name == 'pull_request' + uses: actions/upload-artifact@v4 + with: + name: sonarcloud-report + path: sonarcloud-report/ + # Note: for now we do not run the e2e tests for the coverage check + diff --git a/.github/workflows/sparql-conformance-uploader.yml b/.github/workflows/sparql-conformance-uploader.yml new file mode 100644 index 0000000000..5532e3b061 --- /dev/null +++ b/.github/workflows/sparql-conformance-uploader.yml @@ -0,0 +1,65 @@ +name: Upload conformance tests result + +on: + workflow_run: + workflows: [sparql-conformance] + types: + - completed + +jobs: + upload: + env: + SERVER_URL: https://qlever.dev/sparql-conformance-uploader + API_KEY: ${{ secrets.SPARQL_CONFORMANCE_TOKEN }} + runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion == 'success' + steps: + - name: 'Download artifact' + uses: actions/github-script@v6 + with: + script: | + var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "conformance-report" + })[0]; + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/conformance-report.zip', Buffer.from(download.data)); + - run: unzip conformance-report.zip + # Read the metadata into environment variables. + - name: "Read github event" + run: echo "github_event=`cat event`" >> $GITHUB_ENV + - name: "Read PR number" + run: echo "pr_number=`cat pr`" >> $GITHUB_ENV + - name: "Read Github Ref" + run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; + - name: "Read Github SHA" + run: echo "commit_sha=`cat sha`" >> $GITHUB_ENV; + - name: "Read Github Repository" + run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; + - name: "Submit data to server" + run: | + response=$(curl -L -s -o temp_response.txt -w "%{http_code}" \ + -H "x-api-key: $API_KEY" \ + -H "event: ${{ env.github_event }}" \ + -H "sha: ${{ env.commit_sha }}" \ + -H "pr-number: ${{ env.pr_number }}" \ + -F "file=@${{env.commit_sha}}.json.bz2" \ + $SERVER_URL/upload) + + echo "Server response:" + cat temp_response.txt + echo "HTTP Status: $response" + if [ "$response" -gt 200 ]; then + echo "Server did not respond with status 200. Failing the workflow." + exit 1 + fi diff --git a/.github/workflows/sparql-conformance.yml b/.github/workflows/sparql-conformance.yml new file mode 100644 index 0000000000..9103de62bd --- /dev/null +++ b/.github/workflows/sparql-conformance.yml @@ -0,0 +1,86 @@ +name: sparql-conformance + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + merge_group: + +jobs: + build: + env: + compiler: clang + compiler-version: 16 + build-type: Release + cmake-flags: "-DCMAKE_C_COMPILER=clang-16 -DCMAKE_CXX_COMPILER=clang++-16" + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + with: + submodules: "recursive" + path: qlever-code + - name: Checkout sparql-test-suite-files + uses: actions/checkout@v3 + with: + repository: "w3c/rdf-tests" + path: sparql-test-suite + - name: Checkout qlever-test-suite + uses: actions/checkout@v3 + with: + repository: "ad-freiburg/sparql-conformance" + path: qlever-test-suite + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install requests + pip install rdflib + - name: Install dependencies + uses: ./qlever-code/.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./qlever-code/.github/workflows/install-compiler-ubuntu + with: + compiler: "clang" + compiler-version: "16" + - name: Create build directory + run: mkdir ${{github.workspace}}/qlever-code/build + - name: Configure CMake + run: cmake -S ${{github.workspace}}/qlever-code/ -B ${{github.workspace}}/qlever-code/build ${{env.cmake-flags}} -DCMAKE_BUILD_TYPE=${{env.build-type}} -DLOGLEVEL=INFO -DUSE_PARALLEL=false + - name: Build IndexBuilderMain + run: cmake --build ${{github.workspace}}/qlever-code/build --target IndexBuilderMain --config ${{env.build-type}} -- -j $(nproc) + - name: Build ServerMain + run: cmake --build ${{github.workspace}}/qlever-code/build --target ServerMain --config ${{env.build-type}} -- -j $(nproc) + - name: Execute test suite + run: | + cd qlever-test-suite + python testsuite.py config http://0.0.0.0 7001 ${{github.workspace}}/sparql-test-suite/sparql/sparql11/ ${{github.workspace}}/qlever-code/build/ localhost http-graph-store sparql + python testsuite.py extract + python testsuite.py ${{ github.sha }} + cd .. + - name: Save workflow information + # Note: If you change any of the filenames here, you also have to change them in `upload-conformance.yml` + run : | + mkdir -p conformance-report + echo ${{ github.event_name }} > ./conformance-report/event + echo ${{ github.event.number }} > ./conformance-report/pr + echo ${{ github.repository }} > ./conformance-report/github_repository + echo ${GITHUB_REF} > ./conformance-report/github_ref + - name: Save SHA and conformance report if it is a master commit. + if: github.event_name == 'push' + run : | + echo ${{github.sha}} > ./conformance-report/sha + mv ${{ github.workspace}}/qlever-test-suite/results/${{ github.sha }}.json.bz2 conformance-report/${{ github.sha }}.json.bz2 + - name: Save SHA and conformance report if it is a PR. + if: github.event_name == 'pull_request' + run : | + echo ${{github.event.pull_request.head.sha}} > ./conformance-report/sha + mv ${{ github.workspace}}/qlever-test-suite/results/${{ github.sha }}.json.bz2 conformance-report/${{ github.event.pull_request.head.sha }}.json.bz2 + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: conformance-report + path: conformance-report/ \ No newline at end of file diff --git a/.github/workflows/upload-coverage.yml b/.github/workflows/upload-coverage.yml new file mode 100644 index 0000000000..77eacab868 --- /dev/null +++ b/.github/workflows/upload-coverage.yml @@ -0,0 +1,95 @@ +# This workflow runs as soon as the workflow from `code-coverage.yml` has +# successfully finished. It downloads the created artifact and runs the +# Codecov uploader. This workflow uses the `workflow_run` trigger. This +# means that it will always be run from the master branch, meaning that +# the contents of this file will always be taken from the master branch, +# even if a PR changes it. Since this approach disallows several attacks +# from malicious PR authors, such workflows have access to the secrets +# stored on GitHub. For details on the `workflow_run` trigger and this +# security measures, see +# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ +name: Upload code coverage + +on: + workflow_run: + # This has to be the `name:` of the workflow in `code_coverage.yml`. + # Start when this workflow has finished successfully. + workflows: [measure-code-coverage] + types: + - completed + +jobs: + upload: + runs-on: ubuntu-latest + # Only run on successful pull requests. Merge commits to master upload + # their coverage reports directly inside `code-coverage.yml` + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + steps: + - name: 'Download artifact' + uses: actions/github-script@v7 + # The following script is taken from the link stated at the + # beginning of this file. It manually downloads an artifact + # from another workflow. + with: + script: | + var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "coverage-report" + })[0]; + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/coverage-report.zip', Buffer.from(download.data)); + - run: unzip coverage-report.zip + # Read the metadata into environment variables. + - name: "Read PR number" + run: echo "pr_number=`cat pr`" >> $GITHUB_ENV + - name: "Read Github Ref" + run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; + - name: "Read Github Repository" + run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; + # We have to check out the source code from the PR, otherwise Codecov + # won't process the upload properly. We first check it out into a + # subdirectory `qlever-source`, otherwise the coverage report will + # be overwritten. We then move all the files back into the working + # directory such that Codecov will pick them up properly. + - name: "Checkout" + uses: actions/checkout@v4 + with: + repository: ${{env.original_github_repository}} + submodules: "recursive" + ref: ${{env.original_github_ref}} + path: qlever-source + - name: "Move qlever sources up" + run: shopt -s dotglob && mv qlever-source/* . + # For the new version of the codecov action we have to move the coverage file back to its original location, + # else several things don't work... + - name: "Move coverage file to original location" + run: mkdir build && mkdir build/test && mv coverage.lcov build/test + - name: "Upload coverage report" + uses: codecov/codecov-action@v4 + with: + file: ${{github.workspace}}/build/test/coverage.lcov + # Note: technically, a `token` is not required for codecov.io when + # uploading from a public repository, but specifying it avoids the + # nasty spurious failures due to Github's rate limit for codecov's + # public default token. + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true + # Since this workflow runs on the master branch and not in a PR + # we have to specify the following settings manually to make Codecov + # aware of the "actual" origin of the coverage report. + override_branch: ${{github.event.workflow_run.head_branch}} + override_build: ${{github.event.workflow_run.workflow_id}} + override_commit: ${{github.event.workflow_run.head_commit.id}} + override_pr: ${{env.pr_number}} diff --git a/.github/workflows/upload-sonarcloud.yml b/.github/workflows/upload-sonarcloud.yml new file mode 100644 index 0000000000..fe365b7384 --- /dev/null +++ b/.github/workflows/upload-sonarcloud.yml @@ -0,0 +1,144 @@ +# This workflow runs as soon as the workflow from `sonarcloud.yml` has +# successfully finished. It downloads the created artifact and runs the +# SonarCloud analysis and uploader. This workflow uses the `workflow_run` trigger. This +# means that it will always be run from the master branch, meaning that +# the contents of this file will always be taken from the master branch, +# even if a PR changes it. Since this approach disallows several attacks +# from malicious PR authors, such workflows have access to the secrets +# stored on GitHub. For details on the `workflow_run` trigger and this +# security measures, see +# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ + +# Credit goes to the `1c-syntax` project where I found a code snippet to make +# SonarCloud work with the `workflow_run` trigger: +# https://github.com/1c-syntax/bsl-language-server/blob/21a6bb5172cbc4591a05414d5d0ac221689e45ce/.github/workflows/qa.yml#L16 +name: Upload and run SonarCloud Analysis + +on: + workflow_run: + # This has to be the `name:` of the workflow in `code_coverage.yml`. + # Start when this workflow has finished successfully. + workflows: [sonarcloud-analysis] + types: + - completed + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.workflow_run.head_repository.full_name || github.ref}} @ ${{ github.event.workflow_run.head_branch || github.event.workflow_run.pull_requests[0].url || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + upload: + runs-on: ubuntu-22.04 + if: github.event.workflow_run.conclusion == 'success' + env: + compiler: clang + compiler-version: 16 + warnings: "-Wall -Wextra " + build-type: Release + BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed + steps: + - name: Print concurrency key + run: echo "${{ github.workflow }} @ ${{ github.event.workflow_run.head_repository.full_name || github.ref}} @ ${{ github.event.workflow_run.head_branch || github.event.workflow_run.pull_requests[0].url || github.head_ref || github.ref }}" + - name: 'Download artifact' + uses: actions/github-script@v7 + if: github.event.workflow_run.event == 'pull_request' + # The following script is taken from the link stated at the + # beginning of this file. It manually downloads an artifact + # from another workflow. + with: + script: | + var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "sonarcloud-report" + })[0]; + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/sonarcloud-report.zip', Buffer.from(download.data)); + - run: unzip sonarcloud-report.zip + if: github.event.workflow_run.event == 'pull_request' + # Read the metadata into environment variables. + - name: "Read PR number" + run: echo "pr_number=`cat pr`" >> $GITHUB_ENV + if: github.event.workflow_run.event == 'pull_request' + - name: "Read Github Ref" + run: echo "original_github_ref=`cat github_ref`" >> $GITHUB_ENV; + if: github.event.workflow_run.event == 'pull_request' + - name: "Read Github Repository" + run: echo "original_github_repository=`cat github_repository`" >> $GITHUB_ENV; + if: github.event.workflow_run.event == 'pull_request' + # We have to check out the source code from the PR, otherwise Codecov + # won't process the upload properly. We first check it out into a + # subdirectory `qlever-source`, otherwise the coverage report will + # be overwritten. We then move all the files back into the working + # directory such that Codecov will pick them up properly. + - name: Request GitHub API for PR data + uses: octokit/request-action@v2.x + id: get_pr_data + if: github.event.workflow_run.event == 'pull_request' + with: + route: GET /repos/{full_name}/pulls/{number} + number: ${{ env.pr_number }} + full_name: ${{ github.event.repository.full_name }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Checkout" + uses: actions/checkout@v4 + with: + repository: ${{ github.event.workflow_run.head_repository.full_name }} + ref: ${{ github.event.workflow_run.head_branch }} + fetch-depth: 0 + submodules: "recursive" + path: qlever-source + - name: Checkout base branch + working-directory: qlever-source + if: github.event.workflow_run.event == 'pull_request' + run: | + git remote add upstream ${{ github.event.repository.clone_url }} + git fetch upstream --no-recurse-submodules + git checkout -B ${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} upstream/${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} + git checkout ${{ github.event.workflow_run.head_branch }} + git clean -ffdx && git reset --hard HEAD + - name: "Move qlever sources up" + run: shopt -s dotglob && mv qlever-source/* . + - name: Install Build Wrapper + uses: SonarSource/sonarqube-scan-action/install-build-wrapper@v6 + - name: Install dependencies + uses: ./.github/workflows/install-dependencies-ubuntu + - name: Install compiler + uses: ./.github/workflows/install-compiler-ubuntu + with: + compiler: ${{env.compiler}} + compiler-version: ${{env.compiler-version}} + + - name: Configure CMake + # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. + # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type + run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.build-type}} -DCMAKE_TOOLCHAIN_FILE="$(pwd)/toolchains/${{env.compiler}}${{env.compiler-version}}.cmake" -DADDITIONAL_COMPILER_FLAGS="${{env.warnings}} ${{env.asan-flags}} ${{env.ubsan-flags}}" -DUSE_PARALLEL=false -DRUN_EXPENSIVE_TESTS=true -DENABLE_EXPENSIVE_CHECKS=true -DLOGLEVEL=TRACE + - name: Build + # Build your program with the given configuration + run: build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build ${{github.workspace}}/build --config ${{env.build-type}} -- -j $(nproc) + - name: Run sonar-scanner on PR + if: github.event.workflow_run.event == 'pull_request' + uses: SonarSource/sonarqube-scan-action@v6 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + with: + args: --define sonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }} -Dsonar.pullrequest.key=${{ fromJson(steps.get_pr_data.outputs.data).number }} -Dsonar.pullrequest.branch=${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} -Dsonar.pullrequest.base=${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} + - name: SonarCloud Scan on push + if: github.event.workflow_run.event == 'push' && github.event.workflow_run.head_repository.full_name == github.event.repository.full_name + uses: SonarSource/sonarqube-scan-action@v6 + with: + args: --define sonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }} -Dsonar.branch.name=${{ github.event.workflow_run.head_branch }} + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 485a0528a9984df1637115e39b2c445ad9a6eaec Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Thu, 29 Jan 2026 22:17:11 +0100 Subject: [PATCH 41/45] Integrate back the lost workflow changes --- .github/workflows/check_index_version.yml | 10 +++++----- .github/workflows/sparql-conformance.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/check_index_version.yml b/.github/workflows/check_index_version.yml index ecba7a725b..180edc38e2 100644 --- a/.github/workflows/check_index_version.yml +++ b/.github/workflows/check_index_version.yml @@ -55,8 +55,8 @@ jobs: # Build your program with the given configuration # Only build the actually required executables to save space on the device. run: | - cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) - cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) + cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target qlever-index -- -j $(nproc) + cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target qlever-server -- -j $(nproc) cmake --build ${{github.workspace}}/master/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) - name: E2E on Master @@ -76,9 +76,9 @@ jobs: run: cmake --build ${{github.workspace}}/master/build --target clean - name: BuildPr # also only build the required executables here. - run: | - cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target IndexBuilderMain -- -j $(nproc) - cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target ServerMain -- -j $(nproc) + run: | + cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target qlever-index -- -j $(nproc) + cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target qlever-server -- -j $(nproc) cmake --build ${{github.workspace}}/pr/build --config ${{matrix.build-type}} --target PrintIndexVersionMain -- -j $(nproc) - name: Get index version PR working-directory: ${{github.workspace}}/pr/build diff --git a/.github/workflows/sparql-conformance.yml b/.github/workflows/sparql-conformance.yml index 9103de62bd..1989c69217 100644 --- a/.github/workflows/sparql-conformance.yml +++ b/.github/workflows/sparql-conformance.yml @@ -50,14 +50,14 @@ jobs: run: mkdir ${{github.workspace}}/qlever-code/build - name: Configure CMake run: cmake -S ${{github.workspace}}/qlever-code/ -B ${{github.workspace}}/qlever-code/build ${{env.cmake-flags}} -DCMAKE_BUILD_TYPE=${{env.build-type}} -DLOGLEVEL=INFO -DUSE_PARALLEL=false - - name: Build IndexBuilderMain - run: cmake --build ${{github.workspace}}/qlever-code/build --target IndexBuilderMain --config ${{env.build-type}} -- -j $(nproc) - - name: Build ServerMain - run: cmake --build ${{github.workspace}}/qlever-code/build --target ServerMain --config ${{env.build-type}} -- -j $(nproc) + - name: Build qlever-index + run: cmake --build ${{github.workspace}}/qlever-code/build --target qlever-index --config ${{env.build-type}} -- -j $(nproc) + - name: Build qlever-server + run: cmake --build ${{github.workspace}}/qlever-code/build --target qlever-server --config ${{env.build-type}} -- -j $(nproc) - name: Execute test suite run: | cd qlever-test-suite - python testsuite.py config http://0.0.0.0 7001 ${{github.workspace}}/sparql-test-suite/sparql/sparql11/ ${{github.workspace}}/qlever-code/build/ localhost http-graph-store sparql + python testsuite.py config http://0.0.0.0 7001 ${{github.workspace}}/sparql-test-suite/sparql/sparql11/ ${{github.workspace}}/qlever-code/build/ localhost http-graph-store sparql qlever-server qlever-index python testsuite.py extract python testsuite.py ${{ github.sha }} cd .. From d305ab3b959bb2b41275fdaeffc9af00d285203e Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Fri, 30 Jan 2026 11:45:14 +0100 Subject: [PATCH 42/45] Remove unnecessary comments and fix formatting --- src/ServerMain.cpp | 8 ++++---- src/engine/GroupByImpl.cpp | 7 ------- src/index/IndexBuilderMain.cpp | 5 ----- src/rdfTypes/GeometryInfoHelpersImpl.h | 1 - src/util/JoinAlgorithms/IndexNestedLoopJoin.h | 2 -- test/util/AsyncTestHelpers.h | 3 --- 6 files changed, 4 insertions(+), 22 deletions(-) diff --git a/src/ServerMain.cpp b/src/ServerMain.cpp index f5e5d7f259..21695e5fcf 100644 --- a/src/ServerMain.cpp +++ b/src/ServerMain.cpp @@ -195,10 +195,10 @@ int main(int argc, char** argv) { return EXIT_FAILURE; } - AD_LOG_INFO << EMPH_ON << "QLever server " - << qlever::version::ProjectVersion << ", compiled on " - << qlever::version::DatetimeOfCompilation << " using git hash " - << qlever::version::GitShortHash << EMPH_OFF << std::endl; + AD_LOG_INFO << EMPH_ON << "QLever server " << qlever::version::ProjectVersion + << ", compiled on " << qlever::version::DatetimeOfCompilation + << " using git hash " << qlever::version::GitShortHash << EMPH_OFF + << std::endl; try { Server server(port, numSimultaneousQueries, memoryMaxSize, diff --git a/src/engine/GroupByImpl.cpp b/src/engine/GroupByImpl.cpp index 356aa101c0..24d6082e02 100644 --- a/src/engine/GroupByImpl.cpp +++ b/src/engine/GroupByImpl.cpp @@ -131,13 +131,6 @@ class LazyGroupByRange table, aggregates_, evaluationContext, blockStart, blockEnd, ¤tLocalVocab_, groupByCols_); resultTable_ = std::move(table).toDynamic(); - // // This processes the whole block in batches if possible - // IdTableStatic table = - // std::move(resultTable_).toStatic(); - // parent_->processBlock(table, aggregates_, evaluationContext, - // blockStart, blockEnd, - // ¤tLocalVocab_, groupByCols_); - // resultTable_ = std::move(table).toDynamic(); } } diff --git a/src/index/IndexBuilderMain.cpp b/src/index/IndexBuilderMain.cpp index e9b39fa27e..533620ddc6 100644 --- a/src/index/IndexBuilderMain.cpp +++ b/src/index/IndexBuilderMain.cpp @@ -266,11 +266,6 @@ int main(int argc, char** argv) { << std::endl; return EXIT_SUCCESS; } - if (optionsMap.count("version")) { - std::cout << "QLever IndexBuilderMain " << qlever::version::ProjectVersion - << '\n'; - return EXIT_SUCCESS; - } po::notify(optionsMap); } catch (const std::exception& e) { std::cerr << "Error in command-line argument: " << e.what() << std::endl; diff --git a/src/rdfTypes/GeometryInfoHelpersImpl.h b/src/rdfTypes/GeometryInfoHelpersImpl.h index 09a24b47c3..236813979e 100644 --- a/src/rdfTypes/GeometryInfoHelpersImpl.h +++ b/src/rdfTypes/GeometryInfoHelpersImpl.h @@ -342,7 +342,6 @@ struct MetricLengthVisitor { static_assert(ad_utility::similarToInstantiation); return ::ranges::accumulate( - // ::ranges::transform_view(multiGeom, MetricLengthVisitor{}), 0); ::ranges::transform_view(ad_utility::allView(multiGeom), MetricLengthVisitor{}), 0); diff --git a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h index bf2ceacb86..9af53a75bc 100644 --- a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h +++ b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h @@ -238,8 +238,6 @@ class IndexNestedLoopJoin { for (const auto& rightRow : rightTable) { size_t leftOffset = 0; size_t leftSize = leftTable.size(); - // for (const auto& [rightId, leftCol] : - // ::ranges::zip_view(rightRow, leftColumns)) { // Use index-based iteration instead of ranges::zip_view to avoid // copying RowReference (which has a deleted copy constructor) on // AppleClang. diff --git a/test/util/AsyncTestHelpers.h b/test/util/AsyncTestHelpers.h index b816678081..d74a15fd3c 100644 --- a/test/util/AsyncTestHelpers.h +++ b/test/util/AsyncTestHelpers.h @@ -47,9 +47,6 @@ void runAsyncTest(Func innerRun, size_t numThreads) { } }); return fut; - // Original code that causes AppleClang segfault: - // return net::post(*ioContext, std::packaged_task{ - // [&] { innerRun(*ioContext); }}); } }(); From aba0bb81079c2c1b926d6d2956167e08a6492194 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Fri, 30 Jan 2026 13:18:03 +0100 Subject: [PATCH 43/45] Use index-based iteration to avoid copying CompactVectorOfStrings (which has a deleted copy constructor) on AppleClang. --- test/CompactStringVectorTest.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/CompactStringVectorTest.cpp b/test/CompactStringVectorTest.cpp index fd635ab3ea..59d3fc0e57 100644 --- a/test/CompactStringVectorTest.cpp +++ b/test/CompactStringVectorTest.cpp @@ -310,7 +310,12 @@ TYPED_TEST(CompactVectorOfStringsFixture, cloneAndRemap) { auto copy2 = original.cloneAndRemap(mappingFunction); ASSERT_EQ(original.size(), copy2.size()); - for (auto [reference, element] : ::ranges::zip_view(original, copy2)) { + // Use index-based iteration instead of ranges::zip_view to avoid + // copying CompactVectorOfStrings (which has a deleted copy constructor) + // on AppleClang. + for (size_t i = 0; i < original.size(); ++i) { + const auto& reference = original[i]; + const auto& element = copy2[i]; ASSERT_EQ(reference.size(), element.size()); auto modifiedReference = ::ranges::to( reference | ql::views::transform(mappingFunction)); From 01dce2ba4171645629eed367ce96ca4e9b368cc3 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 3 Feb 2026 10:35:31 +0100 Subject: [PATCH 44/45] Implement pr review suggestions --- src/util/JoinAlgorithms/IndexNestedLoopJoin.h | 12 +++++------- test/CompactStringVectorTest.cpp | 10 ++++------ test/util/AsyncTestHelpers.h | 17 +++++------------ 3 files changed, 14 insertions(+), 25 deletions(-) diff --git a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h index 9af53a75bc..2a8a2d1120 100644 --- a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h +++ b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h @@ -238,13 +238,11 @@ class IndexNestedLoopJoin { for (const auto& rightRow : rightTable) { size_t leftOffset = 0; size_t leftSize = leftTable.size(); - // Use index-based iteration instead of ranges::zip_view to avoid - // copying RowReference (which has a deleted copy constructor) on - // AppleClang. - AD_CORRECTNESS_CHECK(rightRow.size() == leftColumns.size()); - for (size_t colIdx = 0; colIdx < rightRow.size(); ++colIdx) { - const auto& rightId = rightRow[colIdx]; - const auto& leftCol = leftColumns[colIdx]; + // Use `ql::ranges::ref_view` to avoid copying RowReference (which has a + // deleted copy constructor) on AppleClang. + for (const auto& [rightId, leftCol] : + ::ranges::views::zip(ql::ranges::ref_view{rightRow}, + ql::ranges::ref_view{leftColumns})) { AD_EXPENSIVE_CHECK(!rightId.isUndefined()); auto currentStart = leftCol.begin() + leftOffset; auto subrange = ql::ranges::equal_range( diff --git a/test/CompactStringVectorTest.cpp b/test/CompactStringVectorTest.cpp index 59d3fc0e57..5cf3029177 100644 --- a/test/CompactStringVectorTest.cpp +++ b/test/CompactStringVectorTest.cpp @@ -310,12 +310,10 @@ TYPED_TEST(CompactVectorOfStringsFixture, cloneAndRemap) { auto copy2 = original.cloneAndRemap(mappingFunction); ASSERT_EQ(original.size(), copy2.size()); - // Use index-based iteration instead of ranges::zip_view to avoid - // copying CompactVectorOfStrings (which has a deleted copy constructor) - // on AppleClang. - for (size_t i = 0; i < original.size(); ++i) { - const auto& reference = original[i]; - const auto& element = copy2[i]; + // Use `ql::ranges::ref_view` to avoid copying CompactVectorOfStrings (which + // has a deleted copy constructor) on AppleClang. + for (const auto& [reference, element] : ::ranges::views::zip( + ql::ranges::ref_view{original}, ql::ranges::ref_view{copy2})) { ASSERT_EQ(reference.size(), element.size()); auto modifiedReference = ::ranges::to( reference | ql::views::transform(mappingFunction)); diff --git a/test/util/AsyncTestHelpers.h b/test/util/AsyncTestHelpers.h index d74a15fd3c..87924b918b 100644 --- a/test/util/AsyncTestHelpers.h +++ b/test/util/AsyncTestHelpers.h @@ -34,18 +34,11 @@ void runAsyncTest(Func innerRun, size_t numThreads) { if constexpr (TestableCoroutine) { return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); } else { - // Use std::promise instead of std::packaged_task to work around - // AppleClang/LLVM Clang compiler crash with packaged_task + Boost.Asio - auto promise = std::make_shared>(); - auto fut = promise->get_future(); - net::post(*ioContext, [&innerRun, ioContext, promise]() { - try { - innerRun(*ioContext); - promise->set_value(); - } catch (...) { - promise->set_exception(std::current_exception()); - } - }); + // Use a named variable to work around AppleClang compiler crash when + // passing a temporary packaged_task directly to net::post. + std::packaged_task task{[&] { innerRun(*ioContext); }}; + auto fut = task.get_future(); + net::post(*ioContext, std::move(task)); return fut; } }(); From 5601763c0729498a1280c576a69b4d75c9655437 Mon Sep 17 00:00:00 2001 From: Tanmay Garg Date: Tue, 3 Feb 2026 11:58:46 +0100 Subject: [PATCH 45/45] Implement more pr review suggestions --- src/engine/GroupByImpl.cpp | 1 - src/util/JoinAlgorithms/IndexNestedLoopJoin.h | 5 ++--- test/CompactStringVectorTest.cpp | 2 +- test/util/AsyncTestHelpers.h | 6 ++---- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/engine/GroupByImpl.cpp b/src/engine/GroupByImpl.cpp index 24d6082e02..5c626c3579 100644 --- a/src/engine/GroupByImpl.cpp +++ b/src/engine/GroupByImpl.cpp @@ -124,7 +124,6 @@ class LazyGroupByRange groupSplitAcrossTables_ = false; } else { // This processes the whole block in batches if possible. - // Note: Use `template` keyword for dependent template name. IdTableStatic table{ std::move(resultTable_).template toStatic()}; parent_->template processBlock( diff --git a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h index 2a8a2d1120..f8b2a55617 100644 --- a/src/util/JoinAlgorithms/IndexNestedLoopJoin.h +++ b/src/util/JoinAlgorithms/IndexNestedLoopJoin.h @@ -238,11 +238,10 @@ class IndexNestedLoopJoin { for (const auto& rightRow : rightTable) { size_t leftOffset = 0; size_t leftSize = leftTable.size(); - // Use `ql::ranges::ref_view` to avoid copying RowReference (which has a + // Use `ql::ranges::ref_view` to avoid copying `RowReference` (which has a // deleted copy constructor) on AppleClang. for (const auto& [rightId, leftCol] : - ::ranges::views::zip(ql::ranges::ref_view{rightRow}, - ql::ranges::ref_view{leftColumns})) { + ::ranges::views::zip(ql::ranges::ref_view{rightRow}, leftColumns)) { AD_EXPENSIVE_CHECK(!rightId.isUndefined()); auto currentStart = leftCol.begin() + leftOffset; auto subrange = ql::ranges::equal_range( diff --git a/test/CompactStringVectorTest.cpp b/test/CompactStringVectorTest.cpp index 5cf3029177..dd6afae5e9 100644 --- a/test/CompactStringVectorTest.cpp +++ b/test/CompactStringVectorTest.cpp @@ -310,7 +310,7 @@ TYPED_TEST(CompactVectorOfStringsFixture, cloneAndRemap) { auto copy2 = original.cloneAndRemap(mappingFunction); ASSERT_EQ(original.size(), copy2.size()); - // Use `ql::ranges::ref_view` to avoid copying CompactVectorOfStrings (which + // Use `ql::ranges::ref_view` to avoid copying `CompactVectorOfStrings` (which // has a deleted copy constructor) on AppleClang. for (const auto& [reference, element] : ::ranges::views::zip( ql::ranges::ref_view{original}, ql::ranges::ref_view{copy2})) { diff --git a/test/util/AsyncTestHelpers.h b/test/util/AsyncTestHelpers.h index 87924b918b..dea9eaa4a8 100644 --- a/test/util/AsyncTestHelpers.h +++ b/test/util/AsyncTestHelpers.h @@ -35,11 +35,9 @@ void runAsyncTest(Func innerRun, size_t numThreads) { return net::co_spawn(*ioContext, innerRun(*ioContext), net::use_future); } else { // Use a named variable to work around AppleClang compiler crash when - // passing a temporary packaged_task directly to net::post. + // passing a temporary `packaged_task` directly to `net::post`. std::packaged_task task{[&] { innerRun(*ioContext); }}; - auto fut = task.get_future(); - net::post(*ioContext, std::move(task)); - return fut; + return net::post(*ioContext, std::move(task)); } }();