diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index b9d9d9a96a..531946fcda 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -56,6 +56,23 @@ jobs: - name: '[CI Only] Perform CodeQL Analysis' if: inputs.codeql && matrix.preset != 'linux-arm64-ci' uses: github/codeql-action/analyze@v3 + - name: Validate vcpkg Linux binary dependencies + if: matrix.preset == 'linux-ci' || matrix.preset == 'linux-arm64-ci' + run: | + VCPKG_BIN="out/build/${{ matrix.preset }}/vcpkg" + if [ ! -x "$VCPKG_BIN" ]; then + echo "vcpkg binary not found at $VCPKG_BIN" + exit 1 + fi + + echo "Running ldd on $VCPKG_BIN" + ldd "$VCPKG_BIN" + + # Fail if vcpkg links against system curl or OpenSSL + if ldd "$VCPKG_BIN" | grep -E 'libcurl\.so|libssl\.so|libcrypto\.so'; then + echo "ERROR: vcpkg is linked against forbidden libraries (libcurl/libssl/libcrypto)" + exit 1 + fi - name: Run vcpkg and vcpkg-artifacts unit tests run: ctest --preset ${{ matrix.preset }} --output-on-failure 2>&1 - name: Get microsoft/vcpkg pinned sha into VCPKG_SHA diff --git a/CMakeLists.txt b/CMakeLists.txt index 8d52ddf01a..11c2501111 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -195,6 +195,7 @@ set(TEST_SCRIPT_ASSET_CACHE_SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/src/test-script find_package(fmt REQUIRED) find_package(CMakeRC REQUIRED) +find_package(LibCURL REQUIRED) # === Target: locale-resources === @@ -229,6 +230,8 @@ target_compile_definitions(vcpkglib PUBLIC _FILE_OFFSET_BITS=64 ) +target_link_libraries(vcpkglib PUBLIC CURL::libcurl) + if(VCPKG_STANDALONE_BUNDLE_SHA) target_compile_definitions(vcpkglib PUBLIC "VCPKG_STANDALONE_BUNDLE_SHA=${VCPKG_STANDALONE_BUNDLE_SHA}" diff --git a/NOTICE.txt b/NOTICE.txt index 7b5c798535..3d26b1b4b5 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -71,6 +71,35 @@ SOFTWARE. ========================================= END OF CMakeRC NOTICES, INFORMATION, AND LICENSE +curl + +%% curl NOTICES, INFORMATION, AND LICENSE BEGIN HERE +========================================= +COPYRIGHT AND PERMISSION NOTICE + +Copyright (C) Daniel Stenberg, , and many +contributors, see the THANKS file. + +All rights reserved. + +Permission to use, copy, modify, and distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright +notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not +be used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization of the copyright holder. +========================================= +END OF curl NOTICES, INFORMATION, AND LICENSE + The following third party software is incorporated into vcpkg-artifacts: --------------------------------------------------------- diff --git a/azure-pipelines/end-to-end-tests-dir/asset-caching.ps1 b/azure-pipelines/end-to-end-tests-dir/asset-caching.ps1 index cc6e1956b1..bf035e9914 100644 --- a/azure-pipelines/end-to-end-tests-dir/asset-caching.ps1 +++ b/azure-pipelines/end-to-end-tests-dir/asset-caching.ps1 @@ -59,7 +59,8 @@ Throw-IfNotFailed $expected = @( "A suitable version of cmake was not found \(required v[0-9.]+\)\.", "Trying to download cmake-[0-9.]+-[^.]+\.(zip|tar\.gz) using asset cache file://$assetCacheRegex/[0-9a-z]+", -"error: curl: \(37\) Couldn't open file [^\n]+", +"error: curl operation failed with error code 37 \((Couldn't|Could not) read a file:\/\/ file\)\.", +"error: Not a transient network error, won't retry download from file://$assetCacheRegex/[0-9a-z]+" "error: there were no asset cache hits, and x-block-origin blocks trying the authoritative source https://github\.com/Kitware/CMake/releases/download/[^ ]+", "note: If you are using a proxy, please ensure your proxy settings are correct\.", "Possible causes are:", @@ -111,7 +112,8 @@ if (-not ($actual -match $expected)) { Refresh-TestRoot $expected = @( "^Downloading https://localhost:1234/foobar\.html -> example3\.html", -"error: curl: \(7\) Failed to connect to localhost port 1234( after \d+ ms)?: ((Could not|Couldn't) connect to server|Connection refused)", +"error: curl operation failed with error code 7 \((Couldn't|Could not) connect to server\)\.", +"error: Not a transient network error, won't retry download from https://localhost:1234/foobar\.html", "note: If you are using a proxy, please ensure your proxy settings are correct\.", "Possible causes are:", "1\. You are actually using an HTTP proxy, but setting HTTPS_PROXY variable to ``https://address:port``\.", @@ -134,9 +136,11 @@ if (-not ($actual -match $expected)) { Refresh-TestRoot $expected = @( "^Downloading example3\.html, trying https://localhost:1234/foobar\.html", +"error: curl operation failed with error code 7 \((Couldn't|Could not) connect to server\)\.", +"error: Not a transient network error, won't retry download from https://localhost:1234/foobar\.html", "Trying https://localhost:1235/baz\.html", -"error: curl: \(7\) Failed to connect to localhost port 1234( after \d+ ms)?: ((Could not|Couldn't) connect to server|Connection refused)", -"error: curl: \(7\) Failed to connect to localhost port 1235( after \d+ ms)?: ((Could not|Couldn't) connect to server|Connection refused)", +"error: curl operation failed with error code 7 \((Couldn't|Could not) connect to server\)\.", +"error: Not a transient network error, won't retry download from https://localhost:1235/baz\.html", "note: If you are using a proxy, please ensure your proxy settings are correct\.", "Possible causes are:", "1\. You are actually using an HTTP proxy, but setting HTTPS_PROXY variable to ``https://address:port``\.", @@ -188,34 +192,12 @@ if (-not ($actual -match $expected)) { } # ... also with multiple authoritative URLs -if ($IsWindows) { - # WinHTTP - Refresh-TestRoot - $expected = @( - "^Downloading example3\.html, trying https://nonexistent\.example\.com", - "warning: Download https://nonexistent\.example\.com failed -- retrying after 1000ms", - "warning: Download https://nonexistent\.example\.com failed -- retrying after 2000ms", - "warning: Download https://nonexistent\.example\.com failed -- retrying after 4000ms", - "Trying https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt", - "Successfully downloaded example3\.html", - "$" - ) -join "`n" - - $actual = Run-VcpkgAndCaptureOutput @commonArgs x-download "$TestDownloadsRoot/example3.html" --sha512 65077997890f66f6041bb3284bb7b88e27631411ccbc253201ca4e00c4bcc58c0d77edffda4975498797cc10772c7fd68fbeb13cc4ac493a3471a9d49e5b6f24 --url https://nonexistent.example.com --url https://raw.githubusercontent.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE.txt - Throw-IfFailed - if (-not ($actual -match $expected)) { - throw "Failure: azurl (no), x-block-origin (no), asset-cache (n/a), download (succeed)" - } -} - -# Force curl with --header Refresh-TestRoot $expected = @( "^Downloading example3\.html, trying https://nonexistent\.example\.com", -"warning: (Problem : timeout\.|Transient problem: timeout) Will retry in 1 seconds?\. 3 retries left\.", -"warning: (Problem : timeout\.|Transient problem: timeout) Will retry in \d+ seconds?\. 2 retries left\.", -"warning: (Problem : timeout\.|Transient problem: timeout) Will retry in \d+ seconds?\. 1 (retries|retry) left\.", -"Trying https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt", +"error: curl operation failed with error code 6 \((Couldn't|Could not) resolve (hostname|host name)\)\.", +"error: Not a transient network error, won't retry download from https://nonexistent\.example\.com", +"Trying https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt" "Successfully downloaded example3\.html", "$" ) -join "`n" @@ -223,7 +205,7 @@ $expected = @( $actual = Run-VcpkgAndCaptureOutput @commonArgs x-download "$TestDownloadsRoot/example3.html" --sha512 65077997890f66f6041bb3284bb7b88e27631411ccbc253201ca4e00c4bcc58c0d77edffda4975498797cc10772c7fd68fbeb13cc4ac493a3471a9d49e5b6f24 --url https://nonexistent.example.com --url https://raw.githubusercontent.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE.txt --header "Cache-Control: no-cache" Throw-IfFailed if (-not ($actual -match $expected)) { - throw "Failure: azurl (no), x-block-origin (no), asset-cache (n/a), download (succeed)" + throw "Failure: azurl (no), x-block-origin (no), asset-cache (n/a), download (succeed), headers (cache-control)" } # azurl (no), x-block-origin (yes), asset-cache (n/a), download (n/a) @@ -247,8 +229,10 @@ Refresh-TestRoot $expected = @( "^Trying to download example3\.html using asset cache file://$assetCacheRegex/[0-9a-z]+", "Asset cache miss; trying authoritative source https://localhost:1234/foobar\.html", -"error: curl: \(37\) Couldn't open file [^\n]+", -"error: curl: \(7\) Failed to connect to localhost port 1234( after \d+ ms)?: ((Could not|Couldn't) connect to server|Connection refused)", +"error: curl operation failed with error code 37 \((Couldn't|Could not) read a file:// file\)\.", +"error: Not a transient network error, won't retry download from file://$assetCacheRegex/[0-9a-z]+", +"error: curl operation failed with error code 7 \((Couldn't|Could not) connect to server\)\.", +"error: Not a transient network error, won't retry download from https://localhost:1234/foobar\.html", "note: If you are using a proxy, please ensure your proxy settings are correct\.", "Possible causes are:", "1\. You are actually using an HTTP proxy, but setting HTTPS_PROXY variable to ``https://address:port``\.", @@ -307,7 +291,11 @@ if (-not ($actual -match $expected)) { $expected = @( "^Trying to download example3\.html using asset cache file://$assetCacheRegex/[0-9a-z]+", "Asset cache miss; trying authoritative source https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt", -"error: curl: \(37\) Couldn't open file [^\n]+", +"error: curl operation failed with error code 37 \((Couldn't|Could not) read a file:// file\)\.", +"error: Not a transient network error, won't retry download from file://$assetCacheRegex/[0-9a-z]+", +"[^\n]+example3\.html\.\d+\.part: error: download from https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt had an unexpected hash", +"note: Expected: d06b93c883f8126a04589937a884032df031b05518eed9d433efb6447834df2596aebd500d69b8283e5702d988ed49655ae654c1683c7a4ae58bfa6b92f2b73b", +"note: Actual : 65077997890f66f6041bb3284bb7b88e27631411ccbc253201ca4e00c4bcc58c0d77edffda4975498797cc10772c7fd68fbeb13cc4ac493a3471a9d49e5b6f24", "note: If you are using a proxy, please ensure your proxy settings are correct\.", "Possible causes are:", "1\. You are actually using an HTTP proxy, but setting HTTPS_PROXY variable to ``https://address:port``\.", @@ -317,9 +305,6 @@ $expected = @( "The value set by your proxy might be wrong, or have same ``https://`` prefix issue\.", "3\. Your proxy's remote server is out of service\.", "If you believe this is not a temporary download server failure and vcpkg needs to be changed to download this file from a different location, please submit an issue to https://github\.com/Microsoft/vcpkg/issues", -"[^\n]+example3\.html\.\d+\.part: error: download from https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt had an unexpected hash", -"note: Expected: d06b93c883f8126a04589937a884032df031b05518eed9d433efb6447834df2596aebd500d69b8283e5702d988ed49655ae654c1683c7a4ae58bfa6b92f2b73b", -"note: Actual : 65077997890f66f6041bb3284bb7b88e27631411ccbc253201ca4e00c4bcc58c0d77edffda4975498797cc10772c7fd68fbeb13cc4ac493a3471a9d49e5b6f24", "$" ) -join "`n" $actual = Run-VcpkgAndCaptureOutput @commonArgs x-download "$TestDownloadsRoot/example3.html" --sha512 d06b93c883f8126a04589937a884032df031b05518eed9d433efb6447834df2596aebd500d69b8283e5702d988ed49655ae654c1683c7a4ae58bfa6b92f2b73b --url https://raw.githubusercontent.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE.txt "--x-asset-sources=x-azurl,file://$AssetCache,,readwrite" @@ -369,7 +354,8 @@ if (-not ($actual -match $expected)) { Refresh-TestRoot $expected = @( "^Trying to download example3\.html using asset cache file://$assetCacheRegex/[0-9a-z]+", -"error: curl: \(37\) Couldn't open file [^\n]+", +"error: curl operation failed with error code 37 \((Couldn't|Could not) read a file:// file\)\.", +"error: Not a transient network error, won't retry download from file://$assetCacheRegex/[0-9a-z]+", "error: there were no asset cache hits, and x-block-origin blocks trying the authoritative source https://raw\.githubusercontent\.com/microsoft/vcpkg-tool/1767aaee7b229c609f7ad5cf2f57b6a6cc309fb8/LICENSE\.txt", "note: or https://alternate\.example\.com", "note: If you are using a proxy, please ensure your proxy settings are correct\.", diff --git a/azure-pipelines/signing.yml b/azure-pipelines/signing.yml index 17c080709d..0b7bf12519 100644 --- a/azure-pipelines/signing.yml +++ b/azure-pipelines/signing.yml @@ -188,7 +188,7 @@ extends: inputs: failOnStderr: true script: | - cmake -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13 -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_BASE_VERSION=$VCPKG_BASE_VERSION" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$VCPKG_STANDALONE_BUNDLE_SHA" "-DVCPKG_ARTIFACTS_SHA=$VCPKG_ARTIFACTS_SHA" -B "$(Build.BinariesDirectory)/build" 2>&1 + cmake -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13 -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_CURL_URL=$(curl-tarball-url)" "-DVCPKG_BASE_VERSION=$VCPKG_BASE_VERSION" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$VCPKG_STANDALONE_BUNDLE_SHA" "-DVCPKG_ARTIFACTS_SHA=$VCPKG_ARTIFACTS_SHA" -B "$(Build.BinariesDirectory)/build" 2>&1 make -j 8 -C "$(Build.BinariesDirectory)/build" zip -j "$(Build.ArtifactStagingDirectory)/vcpkg-macos.zip" "$(Build.BinariesDirectory)/build/vcpkg" - job: glibc_build @@ -226,7 +226,7 @@ extends: inlineScript: | az acr login --name vcpkgpmeofficialbuilders --resource-group vcpkg-tool-official-builds --subscription c0f11a1f-38f5-4908-8698-1aa5df75baf3 mkdir -p "$(Agent.TempDirectory)/build" - docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-linux-amd64:2025-07-28 sh -c "cmake -G Ninja -DCMAKE_TOOLCHAIN_FILE=/source/azure-pipelines/vcpkg-linux/toolchain.cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" + docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-linux-amd64:2025-11-17 sh -c "cmake -G Ninja -DCMAKE_TOOLCHAIN_FILE=/source/azure-pipelines/vcpkg-linux/toolchain.cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_CURL_URL=$(curl-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" mv "$(Agent.TempDirectory)/build/vcpkg" "$(Build.ArtifactStagingDirectory)/vcpkg-glibc" - job: muslc_build displayName: 'muslc (Alpine) Build' @@ -263,7 +263,7 @@ extends: inlineScript: | az acr login --name vcpkgpmeofficialbuilders --resource-group vcpkg-tool-official-builds --subscription c0f11a1f-38f5-4908-8698-1aa5df75baf3 mkdir -p "$(Agent.TempDirectory)/build" - docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-alpine:3.16 sh -c "cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DCMAKE_CXX_FLAGS=\"-static -s -static-libgcc -static-libstdc++\" -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" + docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-alpine:3.16.1 sh -c "cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DCMAKE_CXX_FLAGS=\"-s -static-libgcc -static-libstdc++\" -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_CURL_URL=$(curl-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" mv "$(Agent.TempDirectory)/build/vcpkg" "$(Build.ArtifactStagingDirectory)/vcpkg-muslc" - job: glibc_arm64_build displayName: 'glibc Arm64 Build' @@ -301,7 +301,7 @@ extends: inlineScript: | az acr login --name vcpkgpmeofficialbuilders --resource-group vcpkg-tool-official-builds --subscription c0f11a1f-38f5-4908-8698-1aa5df75baf3 mkdir -p "$(Agent.TempDirectory)/build" - docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-linux-arm64:2025-07-28 sh -c "cmake -G Ninja -DCMAKE_TOOLCHAIN_FILE=/source/azure-pipelines/vcpkg-arm64/toolchain.cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" + docker run --rm --mount "type=bind,source=$(Build.Repository.LocalPath),target=/source,readonly" --mount "type=bind,source=$(Agent.TempDirectory)/build,target=/build" vcpkgpmeofficialbuilders-c7ajd0chdtfugffn.azurecr.io/vcpkg/vcpkg-build-linux-arm64:2025-11-17 sh -c "cmake -G Ninja -DCMAKE_TOOLCHAIN_FILE=/source/azure-pipelines/vcpkg-arm64/toolchain.cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url) -DVCPKG_FMT_URL=$(fmt-tarball-url) -DVCPKG_CURL_URL=$(curl-tarball-url) -DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA) -DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA) -DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION) -DVCPKG_VERSION=$(Build.SourceVersion) -S /source -B /build 2>&1 && ninja -C /build" mv "$(Agent.TempDirectory)/build/vcpkg" "$(Build.ArtifactStagingDirectory)/vcpkg-glibc-arm64" - job: windows_and_sign displayName: 'Build Windows binaries and Sign' @@ -371,7 +371,7 @@ extends: script: | call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\VsDevCmd.bat" -arch=amd64 -host_arch=amd64 cmake.exe --version - cmake.exe -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_BUILD_TLS12_DOWNLOADER=ON -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION)" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA)" "-DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA)" -B "$(Build.BinariesDirectory)\amd64" 2>&1 + cmake.exe -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_BUILD_TLS12_DOWNLOADER=ON -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_CURL_URL=$(curl-tarball-url)" "-DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION)" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA)" "-DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA)" -B "$(Build.BinariesDirectory)\amd64" 2>&1 ninja.exe -C "$(Build.BinariesDirectory)\amd64" - task: CmdLine@2 displayName: "Build vcpkg arm64 with CMake" @@ -380,7 +380,7 @@ extends: script: | call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\VsDevCmd.bat" -arch=arm64 -host_arch=amd64 cmake.exe --version - cmake.exe -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_BUILD_TLS12_DOWNLOADER=ON -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_PDB_SUFFIX="-arm64" "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION)" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA)" "-DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA)" -B "$(Build.BinariesDirectory)\arm64" 2>&1 + cmake.exe -G Ninja -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DVCPKG_DEVELOPMENT_WARNINGS=ON -DVCPKG_WARNINGS_AS_ERRORS=ON -DVCPKG_BUILD_FUZZING=OFF -DVCPKG_BUILD_TLS12_DOWNLOADER=ON -DVCPKG_EMBED_GIT_SHA=ON -DVCPKG_OFFICIAL_BUILD=ON -DVCPKG_PDB_SUFFIX="-arm64" "-DVCPKG_FMT_URL=$(fmt-tarball-url)" "-DVCPKG_CMAKERC_URL=$(cmakerc-tarball-url)" "-DVCPKG_CURL_URL=$(curl-tarball-url)" "-DVCPKG_BASE_VERSION=$(VCPKG_BASE_VERSION)" "-DVCPKG_VERSION=$(Build.SourceVersion)" "-DVCPKG_STANDALONE_BUNDLE_SHA=$(VCPKG_STANDALONE_BUNDLE_SHA)" "-DVCPKG_ARTIFACTS_SHA=$(VCPKG_ARTIFACTS_SHA)" -B "$(Build.BinariesDirectory)\arm64" 2>&1 ninja.exe -C "$(Build.BinariesDirectory)\arm64" - task: NuGetToolInstaller@1 inputs: diff --git a/azure-pipelines/vcpkg-alpine/Dockerfile b/azure-pipelines/vcpkg-alpine/Dockerfile index e7e85974ee..07bdffc627 100644 --- a/azure-pipelines/vcpkg-alpine/Dockerfile +++ b/azure-pipelines/vcpkg-alpine/Dockerfile @@ -1,4 +1,4 @@ # The authoritative version of this file is in https://devdiv.visualstudio.com/DevDiv/_git/vcpkg-pme-utils FROM alpine:3.16 -RUN apk add alpine-sdk cmake ninja git curl tar gzip zip && apk upgrade +RUN apk add alpine-sdk cmake ninja git curl tar gzip zip curl-dev && apk upgrade diff --git a/cgmanifest.json b/cgmanifest.json index 19dcc509fe..1244e27517 100644 --- a/cgmanifest.json +++ b/cgmanifest.json @@ -37,6 +37,19 @@ }, "DevelopmentDependency": false, "DependencyRoots": [] + }, + { + "Component": { + "Type": "other", + "other": { + "name": "curl", + "version": "8.17.0", + "downloadUrl": "https://github.com/curl/curl/archive/refs/tags/curl-8_17_0.tar.gz", + "hash": "88ab4b7aac12b26a6ad32fb0e1a9675288a45894438cb031102ef5d4ab6b33c2bc99cae0c70b71bdfa12eb49762827e2490555114c5eb4a6876b95e1f2a4eb74" + } + }, + "DevelopmentDependency": false, + "DependencyRoots": [] } ] } diff --git a/cmake/FindBoringSSL.cmake b/cmake/FindBoringSSL.cmake new file mode 100644 index 0000000000..62f56b4bc9 --- /dev/null +++ b/cmake/FindBoringSSL.cmake @@ -0,0 +1,38 @@ +if(POLICY CMP0135) + cmake_policy(SET CMP0135 NEW) +endif() + +# This option exists to allow the URI to be replaced with a Microsoft-internal URI in official +# builds which have restricted internet access; see azure-pipelines/signing.yml +# Note that the SHA512 is the same, so vcpkg-tool contributors need not be concerned that we built +# with different content. +if(NOT VCPKG_BORINGSSL_URL) + set(VCPKG_BORINGSSL_URL "https://github.com/google/boringssl/releases/download/0.20251110.0/boringssl-0.20251110.0.tar.gz") +endif() + +include(FetchContent) +find_package(Git REQUIRED) +FetchContent_Declare( + BoringSSL + URL "${VCPKG_BORINGSSL_URL}" + URL_HASH "SHA512=b017d3ae05a7491374c6f6b249220c8dfa6955748084e9701afdfc58c1dc9c9ff25f735510c3ca37f8ce61c9f72f22cf94a7a525ca9f6e1ad4ab3b93652f525b" + PATCH_COMMAND "${GIT_EXECUTABLE}" "--work-tree=." apply "${CMAKE_CURRENT_LIST_DIR}/boringssl_warnings.patch" +) + +if(NOT BoringSSL_FIND_REQUIRED) + message(FATAL_ERROR "BoringSSL must be REQUIRED") +endif() + +FetchContent_MakeAvailable(BoringSSL) + +if(NOT TARGET BoringSSL::ssl) + if(TARGET ssl) + add_library(BoringSSL::ssl ALIAS ssl) + endif() +endif() + +if(NOT TARGET BoringSSL::crypto) + if(TARGET crypto) + add_library(BoringSSL::crypto ALIAS crypto) + endif() +endif() diff --git a/cmake/FindLibCURL.cmake b/cmake/FindLibCURL.cmake new file mode 100644 index 0000000000..0c1b158270 --- /dev/null +++ b/cmake/FindLibCURL.cmake @@ -0,0 +1,133 @@ +# This option exists to allow contributors to use an external libcurl installation, +# but it should not be used in official builds. +option(VCPKG_DEPENDENCY_EXTERNAL_LIBCURL "Use an external version of the libcurl library" OFF) + +if(POLICY CMP0135) + cmake_policy(SET CMP0135 NEW) +endif() + +if (VCPKG_DEPENDENCY_EXTERNAL_LIBCURL) + find_package(CURL REQUIRED) + return() +endif() + +# This option exists to allow the URI to be replaced with a Microsoft-internal URI in official +# builds which have restricted internet access; see azure-pipelines/signing.yml +# Note that the SHA512 is the same, so vcpkg-tool contributors need not be concerned that we built +# with different content. +if(NOT VCPKG_LIBCURL_URL) + set(VCPKG_LIBCURL_URL "https://github.com/curl/curl/releases/download/curl-8_17_0/curl-8.17.0.tar.gz") +endif() + +include(FetchContent) +FetchContent_Declare( + LibCURL + URL "${VCPKG_LIBCURL_URL}" + URL_HASH "SHA512=88ab4b7aac12b26a6ad32fb0e1a9675288a45894438cb031102ef5d4ab6b33c2bc99cae0c70b71bdfa12eb49762827e2490555114c5eb4a6876b95e1f2a4eb74" +) + +if(NOT LibCURL_FIND_REQUIRED) + message(FATAL_ERROR "LibCURL must be REQUIRED") +endif() + +# This is in function() so no need to backup the variables +function(get_libcurl) + set(BUILD_CURL_EXE OFF) + set(BUILD_EXAMPLES OFF) + set(BUILD_LIBCURL_DOCS OFF) + set(BUILD_MISC_DOCS OFF) + set(BUILD_SHARED_LIBS OFF) + set(BUILD_TESTING OFF) + set(CURL_ENABLE_EXPORT_TARGET OFF) + set(CURL_USE_LIBSSH2 OFF) + set(CURL_USE_LIBPSL OFF) + if (WIN32) + set(CURL_USE_SCHANNEL ON) + set(CURL_USE_OPENSSL OFF) + elseif(APPLE) + set(CURL_USE_SECTRANSP ON) + set(CURL_USE_OPENSSL OFF) + elseif(UNIX) + set(CURL_USE_OPENSSL ON) + set(CURL_USE_SCHANNEL OFF) + set(CURL_USE_SECTRANSP OFF) + set(CURL_ENABLE_SSL ON) + endif() + + # vcpkg tool only needs HTTP(S) downloads and uploads (including FTP). + # Disable other protocols and high-surface features in the embedded libcurl + # to reduce the attack surface. + # Keep FILE support enabled for file:// URLs if ever needed. + set(CURL_DISABLE_GOPHER ON) + set(CURL_DISABLE_IMAP ON) + set(CURL_DISABLE_IPFS ON) + set(CURL_DISABLE_LDAP ON) + set(CURL_DISABLE_LDAPS ON) + set(CURL_DISABLE_MQTT ON) + set(CURL_DISABLE_POP3 ON) + set(CURL_DISABLE_RTSP ON) + set(CURL_DISABLE_SMB ON) + set(CURL_DISABLE_SMTP ON) + set(CURL_DISABLE_TELNET ON) + set(CURL_DISABLE_TFTP ON) + set(CURL_DISABLE_WEBSOCKETS ON) + + # Extra HTTP-related features that vcpkg does not rely on. + set(CURL_DISABLE_ALTSVC ON) + set(CURL_DISABLE_HSTS ON) + set(CURL_DISABLE_DOH ON) + set(CURL_DISABLE_AWS ON) + set(CURL_DISABLE_HEADERS_API ON) + set(CURL_DISABLE_GETOPTIONS ON) + set(CURL_DISABLE_LIBCURL_OPTION ON) + set(CURL_DISABLE_NETRC ON) + set(CURL_DISABLE_PROGRESS_METER ON) + set(CURL_DISABLE_SHUFFLE_DNS ON) + set(CURL_DISABLE_SOCKETPAIR ON) + set(CURL_DISABLE_VERBOSE_STRINGS ON) + + set(ENABLE_CURL_MANUAL OFF) + set(ENABLE_UNICODE ON) + set(PICKY_COMPILER OFF) + set(USE_NGHTTP2 OFF) + set(USE_LIBIDN2 OFF) + set(CMAKE_DISABLE_FIND_PACKAGE_Perl ON) + set(CMAKE_DISABLE_FIND_PACKAGE_ZLIB ON) + set(CMAKE_DISABLE_FIND_PACKAGE_LibPSL ON) + set(CMAKE_DISABLE_FIND_PACKAGE_LibSSH2 ON) + set(CMAKE_DISABLE_FIND_PACKAGE_Brotli ON) + set(CMAKE_DISABLE_FIND_PACKAGE_Zstd ON) + set(CMAKE_DISABLE_FIND_PACKAGE_NGHTTP2 ON) + set(CMAKE_DISABLE_FIND_PACKAGE_Libidn2 ON) + if(MSVC) + string(APPEND CMAKE_C_FLAGS " /wd6101") + string(APPEND CMAKE_C_FLAGS " /wd6011") + string(APPEND CMAKE_C_FLAGS " /wd6054") + string(APPEND CMAKE_C_FLAGS " /wd6287") + string(APPEND CMAKE_C_FLAGS " /wd6323") + string(APPEND CMAKE_C_FLAGS " /wd6385") + string(APPEND CMAKE_C_FLAGS " /wd6387") + string(APPEND CMAKE_C_FLAGS " /wd28182") + string(APPEND CMAKE_C_FLAGS " /wd28251") + string(APPEND CMAKE_C_FLAGS " /wd28301") + else() + string(APPEND CMAKE_C_FLAGS " -Wno-error") + endif() + FetchContent_MakeAvailable(LibCURL) +endfunction() + +get_libcurl() + +if(NOT TARGET CURL::libcurl) + if(TARGET libcurl_static) + add_library(CURL::libcurl ALIAS libcurl_static) + target_compile_definitions(libcurl_static INTERFACE CURL_STATICLIB) + elseif(TARGET libcurl) + add_library(CURL::libcurl ALIAS libcurl) + if(NOT BUILD_SHARED_LIBS) + target_compile_definitions(libcurl INTERFACE CURL_STATICLIB) + endif() + else() + message(FATAL_ERROR "After FetchContent_MakeAvailable(LibCURL) no suitable curl target (libcurl or libcurl_static) was found.") + endif() +endif() diff --git a/cmake/FindOpenSSL.cmake b/cmake/FindOpenSSL.cmake new file mode 100644 index 0000000000..6e24420c6b --- /dev/null +++ b/cmake/FindOpenSSL.cmake @@ -0,0 +1,53 @@ +if(UNIX AND NOT APPLE) + find_package(BoringSSL REQUIRED) + + if(NOT TARGET OpenSSL::SSL) + if(TARGET ssl) + add_library(OpenSSL::SSL ALIAS ssl) + else() + message(FATAL_ERROR "Target 'ssl' not found from BoringSSL") + endif() + endif() + if(NOT TARGET OpenSSL::Crypto) + if(TARGET crypto) + add_library(OpenSSL::Crypto ALIAS crypto) + else() + message(FATAL_ERROR "Target 'crypto' not found from BoringSSL") + endif() + endif() + + FetchContent_GetProperties(BoringSSL) + set(OPENSSL_INCLUDE_DIR "${boringssl_SOURCE_DIR}/include") + set(OPENSSL_LIBRARIES BoringSSL::ssl BoringSSL::crypto) + set(OPENSSL_FOUND TRUE) + set(OPENSSL_VERSION "1.1.1") + + # Pre-fill Curl's checks to avoid linking errors during configuration + # because BoringSSL is being built in the same project and not yet + # available as traditional OpenSSL imported targets inside + # try_compile() projects. + set(HAVE_BORINGSSL TRUE CACHE INTERNAL "") + set(HAVE_AWSLC FALSE CACHE INTERNAL "") + set(HAVE_LIBRESSL FALSE CACHE INTERNAL "") + + # Assume BoringSSL has standard functions but maybe not + # deprecated/obscure ones. Predefining these avoids curl running + # its own feature-detection checks that rely on try_compile with + # imported targets, which does not work reliably in this + # superbuild-style configuration. + set(HAVE_SSL_SET0_WBIO TRUE CACHE INTERNAL "") + set(HAVE_OPENSSL_SRP FALSE CACHE INTERNAL "") + set(HAVE_DES_ECB_ENCRYPT FALSE CACHE INTERNAL "") # BoringSSL might not export this + + # QUIC related + set(HAVE_SSL_SET_QUIC_TLS_CBS FALSE CACHE INTERNAL "") + set(HAVE_SSL_SET_QUIC_USE_LEGACY_CODEPOINT FALSE CACHE INTERNAL "") + + return() +endif() + +if(EXISTS "${CMAKE_ROOT}/Modules/FindOpenSSL.cmake") + include("${CMAKE_ROOT}/Modules/FindOpenSSL.cmake") +else() + message(FATAL_ERROR "Could not find standard FindOpenSSL.cmake") +endif() diff --git a/cmake/boringssl_warnings.patch b/cmake/boringssl_warnings.patch new file mode 100644 index 0000000000..5d4f11eccb --- /dev/null +++ b/cmake/boringssl_warnings.patch @@ -0,0 +1,13 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 5ee0a37cb..b94703b94 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -119,7 +119,7 @@ set(CMAKE_C_STANDARD_REQUIRED ON) + if(CMAKE_COMPILER_IS_GNUCXX OR CLANG) + # Note clang-cl is odd and sets both CLANG and MSVC. We base our configuration + # primarily on our normal Clang one. +- set(C_CXX_FLAGS "-Werror -Wformat=2 -Wmissing-field-initializers -Wshadow -Wsign-compare -Wtype-limits -Wvla -Wwrite-strings -fno-strict-aliasing") ++ set(C_CXX_FLAGS "-Wformat=2 -Wmissing-field-initializers -Wshadow -Wsign-compare -Wtype-limits -Wvla -Wwrite-strings -Wno-unused-parameter -fno-strict-aliasing") + if(MSVC) + # clang-cl sets different default warnings than clang. It also treats -Wall + # as -Weverything, to match MSVC. Instead -W3 is the alias for -Wall. diff --git a/include/vcpkg/base/contractual-constants.h b/include/vcpkg/base/contractual-constants.h index abbe26abfc..0ddca6cbb4 100644 --- a/include/vcpkg/base/contractual-constants.h +++ b/include/vcpkg/base/contractual-constants.h @@ -591,4 +591,9 @@ namespace vcpkg inline constexpr StringLiteral StatusInstalled = "installed"; inline constexpr StringLiteral StatusNotInstalled = "not-installed"; inline constexpr StringLiteral StatusPurge = "purge"; + + // App Insights JSON response fields + inline constexpr StringLiteral AppInsightsResponseItemsReceived = "itemsReceived"; + inline constexpr StringLiteral AppInsightsResponseItemsAccepted = "itemsAccepted"; + inline constexpr StringLiteral AppInsightsResponseErrors = "errors"; } diff --git a/include/vcpkg/base/curl.h b/include/vcpkg/base/curl.h new file mode 100644 index 0000000000..c93955e3c6 --- /dev/null +++ b/include/vcpkg/base/curl.h @@ -0,0 +1,70 @@ +#pragma once + +#include + +#include + +#include + +VCPKG_MSVC_WARNING(push) +// note: disable warning triggered by curl headers +// ws2tcpip.h(968): warning C6101: Returning uninitialized memory '*Mtu': A successful path through the function does +// not set the named _Out_ parameter. +VCPKG_MSVC_WARNING(disable : 6101) +#include +#include +VCPKG_MSVC_WARNING(pop) + +namespace vcpkg +{ + CURLcode get_curl_global_init_status() noexcept; + void curl_set_system_ssl_root_certs(CURL* curl); + + struct CurlEasyHandle + { + CurlEasyHandle(); + CurlEasyHandle(CurlEasyHandle&& other) noexcept; + CurlEasyHandle& operator=(CurlEasyHandle&& other) noexcept; + ~CurlEasyHandle(); + + CURL* get(); + + private: + CURL* m_ptr = nullptr; + }; + + struct CurlMultiHandle + { + CurlMultiHandle(); + CurlMultiHandle(CurlMultiHandle&& other) noexcept; + CurlMultiHandle& operator=(CurlMultiHandle&& other) noexcept; + ~CurlMultiHandle(); + + // Adds an easy handle to the multi handle but doesn't take ownership of it. + // Makes sure that the easy handle is removed from the multi handle on cleanup. + void add_easy_handle(CurlEasyHandle& easy_handle); + + CURLM* get(); + + private: + CURLM* m_ptr = nullptr; + std::vector m_easy_handles; + }; + + struct CurlHeaders + { + CurlHeaders() = default; + CurlHeaders(View headers); + CurlHeaders(CurlHeaders&& other) noexcept; + CurlHeaders& operator=(CurlHeaders&& other) noexcept; + ~CurlHeaders(); + + curl_slist* get() const; + + private: + curl_slist* m_headers = nullptr; + }; + + constexpr char vcpkg_curl_user_agent[] = + "vcpkg/" VCPKG_BASE_VERSION_AS_STRING "-" VCPKG_VERSION_AS_STRING " (curl)"; +} diff --git a/include/vcpkg/base/downloads.h b/include/vcpkg/base/downloads.h index 068ab463b3..50300cfd04 100644 --- a/include/vcpkg/base/downloads.h +++ b/include/vcpkg/base/downloads.h @@ -37,20 +37,9 @@ namespace vcpkg View azure_blob_headers(); - // Parses a curl output line for curl invoked with - // -w "PREFIX%{http_code} %{exitcode} %{errormsg}" - // with specific handling for curl version < 7.75.0 which does not understand %{exitcode} %{errormsg} - // If the line is malformed for any reason, no entry to http_codes is added. - // Returns: true if the new version of curl's output with exitcode and errormsg was parsed; otherwise, false. - bool parse_curl_status_line(DiagnosticContext& context, - std::vector& http_codes, - StringLiteral prefix, - StringView this_line); - std::vector download_files_no_cache(DiagnosticContext& context, View> url_pairs, - View headers, - View secrets); + View headers); bool submit_github_dependency_graph_snapshot(DiagnosticContext& context, const Optional& maybe_github_server_url, @@ -58,19 +47,9 @@ namespace vcpkg const std::string& github_repository, const Json::Object& snapshot); - Optional invoke_http_request(DiagnosticContext& context, - StringLiteral method, - View headers, - StringView url, - View secrets, - StringView data = {}); - std::string format_url_query(StringView base_url, View query_params); - std::vector url_heads(DiagnosticContext& context, - View urls, - View headers, - View secrets); + std::vector url_heads(DiagnosticContext& context, View urls, View headers); struct AssetCachingSettings { @@ -107,7 +86,6 @@ namespace vcpkg bool store_to_asset_cache(DiagnosticContext& context, StringView raw_url, const SanitizedUrl& sanitized_url, - StringLiteral method, View headers, const Path& file); diff --git a/include/vcpkg/base/files.h b/include/vcpkg/base/files.h index 2c4ee81e3f..32da65659b 100644 --- a/include/vcpkg/base/files.h +++ b/include/vcpkg/base/files.h @@ -103,6 +103,8 @@ namespace vcpkg // reads any remaining chunks of the file; used to implement read_to_end void read_to_end_suffix( std::string& output, std::error_code& ec, char* buffer, size_t buffer_size, size_t last_read); + uint64_t size(LineInfo li) const; + uint64_t size(std::error_code& ec) const; }; struct WriteFilePointer : FilePointer diff --git a/include/vcpkg/base/message-data.inc.h b/include/vcpkg/base/message-data.inc.h index cf2d292fb8..d1e6a781b5 100644 --- a/include/vcpkg/base/message-data.inc.h +++ b/include/vcpkg/base/message-data.inc.h @@ -989,22 +989,16 @@ DECLARE_MESSAGE(CreationFailed, (msg::path), "", "Creating {path} failed.") DECLARE_MESSAGE(CurlFailedGeneric, (msg::exit_code), "curl is the name of a program, see curl.se.", - "curl operation failed with error code {exit_code}.") + "curl operation failed with error code {exit_code}") +DECLARE_MESSAGE(CurlDownloadTimeout, (), "", "Download timed out.") +DECLARE_MESSAGE(CurlFailedHttpResponse, + (msg::exit_code), + "curl is the name of a program, see curl.se.", + "curl operation failed with HTTP response code {exit_code}.") DECLARE_MESSAGE(CurlFailedToPut, - (msg::exit_code, msg::url), - "curl is the name of a program, see curl.se", - "curl failed to put file to {url} with exit code {exit_code}.") -DECLARE_MESSAGE(CurlFailedToPutHttp, - (msg::exit_code, msg::url, msg::value), + (msg::url, msg::value), "curl is the name of a program, see curl.se. {value} is an HTTP status code", - "curl failed to put file to {url} with exit code {exit_code} and http code {value}.") -DECLARE_MESSAGE( - CurlFailedToReturnExpectedNumberOfExitCodes, - (msg::exit_code, msg::command_line), - "", - "curl failed to return the expected number of exit codes; this can happen if something terminates curl " - "before it has finished. curl exited with {exit_code} which is normally the result code for the last operation, " - "but may be the result of a crash. The command line was {command_line}, and all output is below:") + "curl failed to PUT file to {url} with response code {value}.") DECLARE_MESSAGE(CurrentCommitBaseline, (msg::commit_sha), "", @@ -1086,10 +1080,6 @@ DECLARE_MESSAGE( (msg::sha), "", "failing download because the expected SHA512 was all zeros, please change the expected SHA512 to: {sha}") -DECLARE_MESSAGE(DownloadFailedRetrying, - (msg::value, msg::url), - "{value} is a number of milliseconds", - "Download {url} failed -- retrying after {value}ms") DECLARE_MESSAGE(DownloadFailedStatusCode, (msg::url, msg::value), "{value} is an HTTP status code", @@ -1126,6 +1116,18 @@ DECLARE_MESSAGE(DownloadingVcpkgStandaloneBundle, (msg::version), "", "Downloadi DECLARE_MESSAGE(DownloadingVcpkgStandaloneBundleLatest, (), "", "Downloading latest standalone bundle.") DECLARE_MESSAGE(DownloadingTools, (msg::count), "", "Downloading {count} tools") DECLARE_MESSAGE(DownloadOrUrl, (msg::url), "", "or {url}") +DECLARE_MESSAGE(DownloadTransientErrorRetry, + (msg::count, msg::value), + "{value} is the maximum number of attempts to download a file", + "Attempt {count} of {value}, retrying download.") +DECLARE_MESSAGE(DownloadTransientErrorRetriesExhausted, + (msg::url), + "", + "Reached maximum number of attempts, won't retry download from {url}.") +DECLARE_MESSAGE(DownloadNotTransientErrorWontRetry, + (msg::url), + "", + "Not a transient network error, won't retry download from {url}") DECLARE_MESSAGE(DownloadTryingAuthoritativeSource, (msg::url), "", "Trying {url}") DECLARE_MESSAGE(DownloadRootsDir, (msg::env_var), "", "Downloads directory (default: {env_var})") DECLARE_MESSAGE(DownloadSuccesful, (msg::path), "", "Successfully downloaded {path}") @@ -1133,10 +1135,6 @@ DECLARE_MESSAGE(DownloadSuccesfulUploading, (msg::path, msg::url), "", "Successfully downloaded {path}, storing to {url}") -DECLARE_MESSAGE(DownloadWinHttpError, - (msg::system_api, msg::exit_code, msg::url), - "", - "{url}: {system_api} failed with exit code {exit_code}.") DECLARE_MESSAGE(DuplicateDependencyOverride, (msg::package_name), "", "{package_name} already has an override") DECLARE_MESSAGE(DuplicatedKeyInObj, (msg::value), @@ -2618,7 +2616,7 @@ DECLARE_MESSAGE( "the license is not installed to ${{CURRENT_PACKAGES_DIR}}/share/${{PORT}}/copyright . This can be fixed by adding " "a call to vcpkg_install_copyright. To suppress this message, add set(VCPKG_POLICY_SKIP_COPYRIGHT_CHECK enabled)") DECLARE_MESSAGE(PortBugMissingLicenseFixIt, - (msg ::value), + (msg::value), "{value} is a CMake function call for the user to paste into their file, for example: " "vcpkg_install_copyright(FILE_LIST ${{SOURCE_PATH}}/COPYING ${{SOURCE_PATH}}/LICENSE.txt)", "Consider adding: {value}") @@ -3170,7 +3168,6 @@ DECLARE_MESSAGE(VcpkgUsage, "[]s, or --s should be preserved. @response_file should be localized to be consistent with the message " "named 'ResponseFileCode'.", "usage: vcpkg [--switches] [--options=values] [arguments] @response_file") -DECLARE_MESSAGE(InvalidUri, (msg::value), "{value} is the URI we attempted to parse.", "unable to parse uri: {value}") DECLARE_MESSAGE(VcpkgInVsPrompt, (msg::value, msg::triplet), "'{value}' is a VS prompt", diff --git a/include/vcpkg/metrics.h b/include/vcpkg/metrics.h index 07d750439d..e81ea0c95b 100644 --- a/include/vcpkg/metrics.h +++ b/include/vcpkg/metrics.h @@ -198,7 +198,6 @@ namespace vcpkg extern std::atomic g_should_send_metrics; void flush_global_metrics(const Filesystem&); -#if defined(_WIN32) - void winhttp_upload_metrics(StringView payload); -#endif // ^^^ _WIN32 + bool curl_upload_metrics(const std::string& payload); + bool parse_metrics_response(StringView response_body); } diff --git a/locales/messages.json b/locales/messages.json index 7598020abd..5fd1135e22 100644 --- a/locales/messages.json +++ b/locales/messages.json @@ -568,14 +568,13 @@ "CreatingZipArchive": "Creating zip archive...", "CreationFailed": "Creating {path} failed.", "_CreationFailed.comment": "An example of {path} is /foo/bar.", - "CurlFailedGeneric": "curl operation failed with error code {exit_code}.", + "CurlDownloadTimeout": "Download timed out.", + "CurlFailedGeneric": "curl operation failed with error code {exit_code}", "_CurlFailedGeneric.comment": "curl is the name of a program, see curl.se. An example of {exit_code} is 127.", - "CurlFailedToPut": "curl failed to put file to {url} with exit code {exit_code}.", - "_CurlFailedToPut.comment": "curl is the name of a program, see curl.se An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", - "CurlFailedToPutHttp": "curl failed to put file to {url} with exit code {exit_code} and http code {value}.", - "_CurlFailedToPutHttp.comment": "curl is the name of a program, see curl.se. {value} is an HTTP status code An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", - "CurlFailedToReturnExpectedNumberOfExitCodes": "curl failed to return the expected number of exit codes; this can happen if something terminates curl before it has finished. curl exited with {exit_code} which is normally the result code for the last operation, but may be the result of a crash. The command line was {command_line}, and all output is below:", - "_CurlFailedToReturnExpectedNumberOfExitCodes.comment": "An example of {exit_code} is 127. An example of {command_line} is vcpkg install zlib.", + "CurlFailedHttpResponse": "curl operation failed with HTTP response code {exit_code}.", + "_CurlFailedHttpResponse.comment": "curl is the name of a program, see curl.se. An example of {exit_code} is 127.", + "CurlFailedToPut": "curl failed to PUT file to {url} with response code {value}.", + "_CurlFailedToPut.comment": "curl is the name of a program, see curl.se. {value} is an HTTP status code An example of {url} is https://github.com/microsoft/vcpkg.", "CurrentCommitBaseline": "You can use the current commit as a baseline, which is:\n\t\"builtin-baseline\": \"{commit_sha}\"", "_CurrentCommitBaseline.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949.", "CycleDetectedDuring": "cycle detected during {spec}:", @@ -623,10 +622,10 @@ "DownloadFailedHashMismatchZero": "failing download because the expected SHA512 was all zeros, please change the expected SHA512 to: {sha}", "_DownloadFailedHashMismatchZero.comment": "An example of {sha} is eb32643dd2164c72b8a660ef52f1e701bb368324ae461e12d70d6a9aefc0c9573387ee2ed3828037ed62bb3e8f566416a2d3b3827a3928f0bff7c29f7662293e.", "DownloadFailedProxySettings": "If you are using a proxy, please ensure your proxy settings are correct.\nPossible causes are:\n1. You are actually using an HTTP proxy, but setting HTTPS_PROXY variable to `https://address:port`.\nThis is not correct, because `https://` prefix claims the proxy is an HTTPS proxy, while your proxy (v2ray, shadowsocksr, etc...) is an HTTP proxy.\nTry setting `http://address:port` to both HTTP_PROXY and HTTPS_PROXY instead.\n2. If you are using Windows, vcpkg will automatically use your Windows IE Proxy Settings set by your proxy software. See: https://github.com/microsoft/vcpkg-tool/pull/77\nThe value set by your proxy might be wrong, or have same `https://` prefix issue.\n3. Your proxy's remote server is out of service.\nIf you believe this is not a temporary download server failure and vcpkg needs to be changed to download this file from a different location, please submit an issue to https://github.com/Microsoft/vcpkg/issues", - "DownloadFailedRetrying": "Download {url} failed -- retrying after {value}ms", - "_DownloadFailedRetrying.comment": "{value} is a number of milliseconds An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadFailedStatusCode": "{url}: failed: status code {value}", "_DownloadFailedStatusCode.comment": "{value} is an HTTP status code An example of {url} is https://github.com/microsoft/vcpkg.", + "DownloadNotTransientErrorWontRetry": "Not a transient network error, won't retry download from {url}", + "_DownloadNotTransientErrorWontRetry.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadOrUrl": "or {url}", "_DownloadOrUrl.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadRootsDir": "Downloads directory (default: {env_var})", @@ -635,10 +634,12 @@ "_DownloadSuccesful.comment": "An example of {path} is /foo/bar.", "DownloadSuccesfulUploading": "Successfully downloaded {path}, storing to {url}", "_DownloadSuccesfulUploading.comment": "An example of {path} is /foo/bar. An example of {url} is https://github.com/microsoft/vcpkg.", + "DownloadTransientErrorRetriesExhausted": "Reached maximum number of attempts, won't retry download from {url}.", + "_DownloadTransientErrorRetriesExhausted.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", + "DownloadTransientErrorRetry": "Attempt {count} of {value}, retrying download.", + "_DownloadTransientErrorRetry.comment": "{value} is the maximum number of attempts to download a file An example of {count} is 42.", "DownloadTryingAuthoritativeSource": "Trying {url}", "_DownloadTryingAuthoritativeSource.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", - "DownloadWinHttpError": "{url}: {system_api} failed with exit code {exit_code}.", - "_DownloadWinHttpError.comment": "An example of {system_api} is CreateProcessW. An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadedSources": "Downloaded sources for {spec}", "_DownloadedSources.comment": "An example of {spec} is zlib:x64-windows.", "DownloadingAssetShaToFile": "Downloading asset cache entry {sha} -> {path}", @@ -1114,8 +1115,6 @@ "InvalidToolVersion": "Invalid tool version; expected a string containing a substring of between 1 and 3 numbers separated by dots.", "InvalidTriplet": "Invalid triplet: {triplet}", "_InvalidTriplet.comment": "An example of {triplet} is x64-windows.", - "InvalidUri": "unable to parse uri: {value}", - "_InvalidUri.comment": "{value} is the URI we attempted to parse.", "InvalidValueHashAdditionalFiles": "Variable VCPKG_HASH_ADDITIONAL_FILES contains invalid file path: '{path}'. The value must be an absolute path to an existent file.", "_InvalidValueHashAdditionalFiles.comment": "An example of {path} is /foo/bar.", "InvalidValuePostPortfileIncludes": "Variable VCPKG_POST_PORTFILE_INCLUDES contains invalid file path: '{path}'. The value must be an absolute path to an existent cmake file.", diff --git a/src/vcpkg-test/downloads.cpp b/src/vcpkg-test/downloads.cpp index 3df1949a8e..1f601505b2 100644 --- a/src/vcpkg-test/downloads.cpp +++ b/src/vcpkg-test/downloads.cpp @@ -120,92 +120,25 @@ TEST_CASE ("parse_split_url_view", "[downloads]") } } -TEST_CASE ("parse_curl_status_line", "[downloads]") -{ - std::vector http_codes; - StringLiteral malformed_examples[] = { - "asdfasdf", // wrong prefix - "curl: unknown --write-out variable: 'exitcode'", // wrong prefixes, and also what old curl does - "curl: unknown --write-out variable: 'errormsg'", - "prefix", // missing spaces - "prefix42", // missing spaces - "prefix42 2", // missing space - "prefix42 2a", // non numeric exitcode - }; - - FullyBufferedDiagnosticContext bdc; - for (auto&& malformed : malformed_examples) - { - REQUIRE(!parse_curl_status_line(bdc, http_codes, "prefix", malformed)); - REQUIRE(http_codes.empty()); - REQUIRE(bdc.empty()); - } - - // old curl output - REQUIRE(!parse_curl_status_line(bdc, http_codes, "prefix", "prefix200 ")); - REQUIRE(http_codes == std::vector{200}); - REQUIRE(bdc.empty()); - http_codes.clear(); - - REQUIRE(!parse_curl_status_line(bdc, http_codes, "prefix", "prefix404 ")); - REQUIRE(http_codes == std::vector{404}); - REQUIRE(bdc.empty()); - http_codes.clear(); - - REQUIRE(!parse_curl_status_line(bdc, http_codes, "prefix", "prefix0 ")); // a failure, but we don't know that yet - REQUIRE(http_codes == std::vector{0}); - REQUIRE(bdc.empty()); - http_codes.clear(); - - // current curl output - REQUIRE(parse_curl_status_line(bdc, http_codes, "prefix", "prefix200 0 ")); - REQUIRE(http_codes == std::vector{200}); - REQUIRE(bdc.empty()); - http_codes.clear(); - - REQUIRE(parse_curl_status_line( - bdc, - http_codes, - "prefix", - "prefix0 60 schannel: SNI or certificate check failed: SEC_E_WRONG_PRINCIPAL (0x80090322) " - "- The target principal name is incorrect.")); - REQUIRE(http_codes == std::vector{0}); - REQUIRE(bdc.to_string() == - "error: curl operation failed with error code 60. schannel: SNI or certificate check failed: " - "SEC_E_WRONG_PRINCIPAL (0x80090322) - The target principal name is incorrect."); -} - TEST_CASE ("download_files", "[downloads]") { auto const dst = Test::base_temporary_directory() / "download_files"; - auto const url = [&](std::string l) -> auto { return std::pair(l, dst); }; + real_filesystem.create_directories(dst, VCPKG_LINE_INFO); + + static const std::vector> test_downloads{ + {"unknown://localhost:9/secret", dst / "test1"}, + {"http://localhost:9/not-exists/secret", dst / "test2"}, + }; FullyBufferedDiagnosticContext bdc; std::vector headers; - std::vector secrets; - auto results = download_files_no_cache( - bdc, - std::vector{url("unknown://localhost:9/secret"), url("http://localhost:9/not-exists/secret")}, - headers, - secrets); - REQUIRE(results == std::vector{0, 0}); - auto all_errors = bdc.to_string(); - if (all_errors == "error: curl operation failed with error code 7.") - { - // old curl, this is OK! - } - else - { - // new curl - REQUIRE_THAT( - all_errors, - Catch::Matches("error: curl operation failed with error code 1\\. Protocol \"unknown\" not supported( or " - "disabled in libcurl)?\n" - "error: curl operation failed with error code 7\\. ((Failed to connect to localhost port 9 " - "after [0-9]+ ms: ((Could not|Couldn't) connect to server|Connection refused))" - "|(getsockname\\(\\) failed with errno 22: Invalid argument))", - Catch::CaseSensitive::Yes)); - } + auto results = download_files_no_cache(bdc, test_downloads, headers); + REQUIRE(results == std::vector{-1, -1}); + auto all_errors = Strings::split(bdc.to_string(), '\n'); + REQUIRE(all_errors[0] == "error: curl operation failed with error code 1 (Unsupported protocol)."); + // Old versions of libcurl use "Couldn't" on error messages + REQUIRE((all_errors[1] == "error: curl operation failed with error code 7 (Could not connect to server)." || + all_errors[1] == "error: curl operation failed with error code 7 (Couldn't connect to server).")); } TEST_CASE ("try_parse_curl_max5_size", "[downloads]") @@ -374,7 +307,7 @@ TEST_CASE ("azblob", "[.][azblob]") FullyBufferedDiagnosticContext diagnostics{}; auto plain_put_success = store_to_asset_cache( - diagnostics, plain_put_url, SanitizedUrl{url, {}}, "PUT", azure_blob_headers(), data_filepath); + diagnostics, plain_put_url, SanitizedUrl{url, {}}, azure_blob_headers(), data_filepath); INFO(diagnostics.to_string()); CHECK(plain_put_success); } @@ -393,7 +326,7 @@ TEST_CASE ("azblob", "[.][azblob]") { FullyBufferedDiagnosticContext diagnostics{}; - auto results = download_files_no_cache(diagnostics, url_pairs, azure_blob_headers(), {}); + auto results = download_files_no_cache(diagnostics, url_pairs, azure_blob_headers()); INFO(diagnostics.to_string()); CHECK(results == std::vector{200, 200}); } diff --git a/src/vcpkg-test/metrics.cpp b/src/vcpkg-test/metrics.cpp index dbd24d08a2..e069ae4826 100644 --- a/src/vcpkg-test/metrics.cpp +++ b/src/vcpkg-test/metrics.cpp @@ -256,3 +256,39 @@ TEST_CASE ("payload smoke test", "[metrics]") )json"; REQUIRE(expected == actual); } + +TEST_CASE ("parse metrics response", "[metrics]") +{ + const std::string response = R"json( +{ + "itemsReceived": 1, + "itemsAccepted": 1, + "errors": [] +} +)json"; + auto parsed = parse_metrics_response(response); + CHECK(parsed); + + const std::string response_with_errors = R"json( +{ + "itemsReceived": 2, + "itemsAccepted": 1, + "errors": [ + { + "message": "Invalid payload" + } + ] +} +)json"; + auto parsed_with_errors = parse_metrics_response(response_with_errors); + CHECK(!parsed_with_errors); + + const std::string response_with_errors2 = R"json( +{ + "itemsReceived": 2, + "errors": [] +} +)json"; + auto parsed_with_errors2 = parse_metrics_response(response_with_errors2); + CHECK(!parsed_with_errors2); +} diff --git a/src/vcpkg.cpp b/src/vcpkg.cpp index 0dc64e15ea..3b3b18dda8 100644 --- a/src/vcpkg.cpp +++ b/src/vcpkg.cpp @@ -2,6 +2,7 @@ #include #include +#include #include #include #include diff --git a/src/vcpkg/base/curl.cpp b/src/vcpkg/base/curl.cpp new file mode 100644 index 0000000000..5d6d5a6407 --- /dev/null +++ b/src/vcpkg/base/curl.cpp @@ -0,0 +1,247 @@ +#include +#include + +#include +#include + +#if defined(__linux__) +#include +#include +#endif + +#include + +namespace +{ + struct CurlGlobalInit + { + CurlGlobalInit() : init_status(curl_global_init(CURL_GLOBAL_DEFAULT)) { } + ~CurlGlobalInit() { curl_global_cleanup(); } + + CurlGlobalInit(const CurlGlobalInit&) = delete; + CurlGlobalInit(CurlGlobalInit&&) = delete; + CurlGlobalInit& operator=(const CurlGlobalInit&) = delete; + CurlGlobalInit& operator=(CurlGlobalInit&&) = delete; + + CURLcode get_init_status() const { return init_status; } + + private: + CURLcode init_status; + }; + +#if defined(__linux__) + struct CurlCaBundle + { + std::string ca_file; + std::string ca_path; + bool initialized = false; + }; + + bool path_exists(const char* path, bool require_directory) + { + struct stat st; + if (stat(path, &st) != 0) + { + return false; + } + + if (!require_directory) + { + return S_ISREG(st.st_mode) || S_ISLNK(st.st_mode); + } + + return S_ISDIR(st.st_mode); + } + + CurlCaBundle& get_global_curl_ca_bundle() + { + static CurlCaBundle bundle; + if (bundle.initialized) + { + return bundle; + } + + bundle.initialized = true; + + const char* ssl_ca_file = std::getenv("SSL_CERT_FILE"); + const char* ssl_ca_dir = std::getenv("SSL_CERT_DIR"); + + if (ssl_ca_file && *ssl_ca_file) + { + bundle.ca_file = ssl_ca_file; + } + + if (ssl_ca_dir && *ssl_ca_dir) + { + bundle.ca_path = ssl_ca_dir; + } + + // If env vars didn't provide values, probe common Linux locations, + // largely based on Go's crypto/x509 package. + if (bundle.ca_file.empty()) + { + constexpr std::array cert_files = { + "/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Gentoo etc. + "/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL 6 + "/etc/ssl/ca-bundle.pem", // OpenSUSE + "/etc/pki/tls/cacert.pem", // OpenELEC + "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", // CentOS/RHEL 7 + "/etc/ssl/cert.pem", // Alpine Linux + }; + + for (const auto* f : cert_files) + { + if (path_exists(f, false)) + { + bundle.ca_file = f; + break; + } + } + } + + if (bundle.ca_path.empty()) + { + constexpr std::array cert_dirs = { + "/etc/ssl/certs", // SLES10/SLES11 + "/etc/pki/tls/certs", // Fedora/RHEL + }; + + for (const auto* d : cert_dirs) + { + if (path_exists(d, true)) + { + bundle.ca_path = d; + break; + } + } + } + + return bundle; + } +#endif +} + +namespace vcpkg +{ + CURLcode get_curl_global_init_status() noexcept + { + static CurlGlobalInit g_curl_global_init; + return g_curl_global_init.get_init_status(); + } + + void curl_set_system_ssl_root_certs(CURL* curl) + { +#if defined(__linux__) + if (!curl) + { + return; + } + + CurlCaBundle& bundle = get_global_curl_ca_bundle(); + if (!bundle.ca_file.empty()) + { + curl_easy_setopt(curl, CURLOPT_CAINFO, bundle.ca_file.c_str()); + } + + if (!bundle.ca_path.empty()) + { + curl_easy_setopt(curl, CURLOPT_CAPATH, bundle.ca_path.c_str()); + } +#else + (void)curl; +#endif + } + + CurlEasyHandle::CurlEasyHandle() { get_curl_global_init_status(); } + CurlEasyHandle::CurlEasyHandle(CurlEasyHandle&& other) noexcept : m_ptr(std::exchange(other.m_ptr, nullptr)) { } + CurlEasyHandle& CurlEasyHandle::operator=(CurlEasyHandle&& other) noexcept + { + m_ptr = std::exchange(other.m_ptr, nullptr); + return *this; + } + CurlEasyHandle::~CurlEasyHandle() + { + if (m_ptr) + { + curl_easy_cleanup(m_ptr); + } + } + CURL* CurlEasyHandle::get() + { + if (!m_ptr) + { + m_ptr = curl_easy_init(); + if (!m_ptr) + { + Checks::unreachable(VCPKG_LINE_INFO); + } + } + return m_ptr; + } + + CurlMultiHandle::CurlMultiHandle() { get_curl_global_init_status(); } + CurlMultiHandle::CurlMultiHandle(CurlMultiHandle&& other) noexcept + : m_ptr(std::exchange(other.m_ptr, nullptr)), m_easy_handles(std::move(other.m_easy_handles)) + { + } + CurlMultiHandle& CurlMultiHandle::operator=(CurlMultiHandle&& other) noexcept + { + m_ptr = std::exchange(other.m_ptr, nullptr); + m_easy_handles = std::move(other.m_easy_handles); + return *this; + } + CurlMultiHandle::~CurlMultiHandle() + { + for (auto* easy_handle : m_easy_handles) + { + curl_multi_remove_handle(m_ptr, easy_handle); + } + + if (m_ptr) + { + curl_multi_cleanup(m_ptr); + } + } + void CurlMultiHandle::add_easy_handle(CurlEasyHandle& easy_handle) + { + auto* handle = easy_handle.get(); + if (curl_multi_add_handle(this->get(), handle) == CURLM_OK) + { + m_easy_handles.push_back(handle); + } + } + CURLM* CurlMultiHandle::get() + { + if (!m_ptr) + { + m_ptr = curl_multi_init(); + if (!m_ptr) + { + Checks::unreachable(VCPKG_LINE_INFO); + } + } + return m_ptr; + } + + CurlHeaders::CurlHeaders(View headers) + { + for (const auto& header : headers) + { + m_headers = curl_slist_append(m_headers, header.c_str()); + } + } + CurlHeaders::CurlHeaders(CurlHeaders&& other) noexcept : m_headers(std::exchange(other.m_headers, nullptr)) { } + CurlHeaders& CurlHeaders::operator=(CurlHeaders&& other) noexcept + { + m_headers = std::exchange(other.m_headers, nullptr); + return *this; + } + CurlHeaders::~CurlHeaders() + { + if (m_headers) + { + curl_slist_free_all(m_headers); + } + } + curl_slist* CurlHeaders::get() const { return m_headers; } +} diff --git a/src/vcpkg/base/downloads.cpp b/src/vcpkg/base/downloads.cpp index 4019e87cae..9cda975e25 100644 --- a/src/vcpkg/base/downloads.cpp +++ b/src/vcpkg/base/downloads.cpp @@ -1,5 +1,6 @@ #include #include +#include #include #include #include @@ -15,24 +16,24 @@ #include #include -#include - #include using namespace vcpkg; namespace { - constexpr StringLiteral vcpkg_curl_user_agent_header = - "User-Agent: vcpkg/" VCPKG_BASE_VERSION_AS_STRING "-" VCPKG_VERSION_AS_STRING " (curl)"; - - void add_curl_headers(Command& cmd, View headers) + void set_common_curl_easy_options(CurlEasyHandle& easy_handle, StringView url, const CurlHeaders& request_headers) { - cmd.string_arg("-H").string_arg(vcpkg_curl_user_agent_header); - for (auto&& header : headers) - { - cmd.string_arg("-H").string_arg(header); - } + auto* curl = easy_handle.get(); + curl_easy_setopt(curl, CURLOPT_USERAGENT, vcpkg_curl_user_agent); + curl_easy_setopt(curl, CURLOPT_URL, url_encode_spaces(url).c_str()); + curl_easy_setopt(curl, + CURLOPT_FOLLOWLOCATION, + 2L); // Follow redirects, change request method based on HTTP response code. + // https://curl.se/libcurl/c/CURLOPT_FOLLOWLOCATION.html#CURLFOLLOWOBEYCODE + curl_easy_setopt(curl, CURLOPT_HTTPHEADER, request_headers.get()); + curl_easy_setopt(curl, CURLOPT_HEADEROPT, CURLHEADER_SEPARATE); // don't send headers to proxy CONNECT + curl_set_system_ssl_root_certs(curl); } } @@ -44,547 +45,6 @@ namespace vcpkg replace_secrets(m_sanitized_url, secrets); } -#if defined(_WIN32) - struct FormatMessageHLocalAlloc - { - LPWSTR buffer = nullptr; - - ~FormatMessageHLocalAlloc() - { - if (buffer) - { - LocalFree(buffer); - } - } - }; - - static LocalizedString format_winhttp_last_error_message(StringLiteral api_name, - const SanitizedUrl& sanitized_url, - DWORD last_error) - { - const HMODULE winhttp_module = GetModuleHandleW(L"winhttp.dll"); - FormatMessageHLocalAlloc alloc; - DWORD tchars_excluding_terminating_null = 0; - if (winhttp_module) - { - tchars_excluding_terminating_null = - FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_HMODULE, - winhttp_module, - last_error, - 0, - reinterpret_cast(&alloc.buffer), - 0, - nullptr); - } - - auto result = msg::format( - msgDownloadWinHttpError, msg::system_api = api_name, msg::exit_code = last_error, msg::url = sanitized_url); - if (tchars_excluding_terminating_null && alloc.buffer) - { - while (tchars_excluding_terminating_null != 0 && - (alloc.buffer[tchars_excluding_terminating_null - 1] == L'\r' || - alloc.buffer[tchars_excluding_terminating_null - 1] == L'\n')) - { - --tchars_excluding_terminating_null; - } - - tchars_excluding_terminating_null = static_cast( - std::remove(alloc.buffer, alloc.buffer + tchars_excluding_terminating_null, L'\r') - alloc.buffer); - result.append_raw(' ').append_raw(Strings::to_utf8(alloc.buffer, tchars_excluding_terminating_null)); - } - - return result; - } - - static LocalizedString format_winhttp_last_error_message(StringLiteral api_name, const SanitizedUrl& sanitized_url) - { - return format_winhttp_last_error_message(api_name, sanitized_url, GetLastError()); - } - - static void maybe_emit_winhttp_progress(MessageSink& machine_readable_progress, - const Optional& maybe_content_length, - std::chrono::steady_clock::time_point& last_write, - unsigned long long total_downloaded_size) - { - if (const auto content_length = maybe_content_length.get()) - { - const auto now = std::chrono::steady_clock::now(); - if ((now - last_write) >= std::chrono::milliseconds(100)) - { - const double percent = - (static_cast(total_downloaded_size) / static_cast(*content_length)) * 100; - machine_readable_progress.println(LocalizedString::from_raw(fmt::format("{:.2f}%", percent))); - last_write = now; - } - } - } - - struct WinHttpHandle - { - WinHttpHandle() = default; - WinHttpHandle(const WinHttpHandle&) = delete; - WinHttpHandle& operator=(const WinHttpHandle&) = delete; - - void require_null_handle() const - { - if (h) - { - Checks::unreachable(VCPKG_LINE_INFO, "WinHTTP handle type confusion"); - } - } - - void require_created_handle() const - { - if (!h) - { - Checks::unreachable(VCPKG_LINE_INFO, "WinHTTP handle not created"); - } - } - - bool Connect(DiagnosticContext& context, - const WinHttpHandle& session, - StringView hostname, - INTERNET_PORT port, - const SanitizedUrl& sanitized_url) - { - require_null_handle(); - session.require_created_handle(); - h = WinHttpConnect(session.h, Strings::to_utf16(hostname).c_str(), port, 0); - if (h) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpConnect", sanitized_url)); - return false; - } - - bool Open(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - _In_opt_z_ LPCWSTR pszAgentW, - _In_ DWORD dwAccessType, - _In_opt_z_ LPCWSTR pszProxyW, - _In_opt_z_ LPCWSTR pszProxyBypassW, - _In_ DWORD dwFlags) - { - require_null_handle(); - h = WinHttpOpen(pszAgentW, dwAccessType, pszProxyW, pszProxyBypassW, dwFlags); - if (h) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpOpen", sanitized_url)); - return false; - } - - bool OpenRequest(DiagnosticContext& context, - const WinHttpHandle& hConnect, - const SanitizedUrl& sanitized_url, - IN LPCWSTR pwszVerb, - StringView path_query_fragment, - IN LPCWSTR pwszVersion, - IN LPCWSTR pwszReferrer OPTIONAL, - IN LPCWSTR FAR* ppwszAcceptTypes OPTIONAL, - IN DWORD dwFlags) - { - require_null_handle(); - h = WinHttpOpenRequest(hConnect.h, - pwszVerb, - Strings::to_utf16(path_query_fragment).c_str(), - pwszVersion, - pwszReferrer, - ppwszAcceptTypes, - dwFlags); - if (h) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpOpenRequest", sanitized_url)); - return false; - } - - bool SendRequest(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - _In_reads_opt_(dwHeadersLength) LPCWSTR lpszHeaders, - IN DWORD dwHeadersLength, - _In_reads_bytes_opt_(dwOptionalLength) LPVOID lpOptional, - IN DWORD dwOptionalLength, - IN DWORD dwTotalLength, - IN DWORD_PTR dwContext) const - { - require_created_handle(); - if (WinHttpSendRequest( - h, lpszHeaders, dwHeadersLength, lpOptional, dwOptionalLength, dwTotalLength, dwContext)) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpSendRequest", sanitized_url)); - return false; - } - - bool ReceiveResponse(DiagnosticContext& context, const SanitizedUrl& url) - { - require_created_handle(); - if (WinHttpReceiveResponse(h, NULL)) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpReceiveResponse", url)); - return false; - } - - bool SetTimeouts(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - int nResolveTimeout, - int nConnectTimeout, - int nSendTimeout, - int nReceiveTimeout) const - { - require_created_handle(); - if (WinHttpSetTimeouts(h, nResolveTimeout, nConnectTimeout, nSendTimeout, nReceiveTimeout)) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpSetTimeouts", sanitized_url)); - return false; - } - - bool SetOption(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - DWORD dwOption, - LPVOID lpBuffer, - DWORD dwBufferLength) const - { - require_created_handle(); - if (WinHttpSetOption(h, dwOption, lpBuffer, dwBufferLength)) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpSetOption", sanitized_url)); - return false; - } - - DWORD QueryHeaders(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - DWORD dwInfoLevel, - LPWSTR pwszName, - LPVOID lpBuffer, - LPDWORD lpdwBufferLength, - LPDWORD lpdwIndex) const - { - require_created_handle(); - if (WinHttpQueryHeaders(h, dwInfoLevel, pwszName, lpBuffer, lpdwBufferLength, lpdwIndex)) - { - return 0; - } - - DWORD last_error = GetLastError(); - context.report_error(format_winhttp_last_error_message("WinHttpQueryHeaders", sanitized_url, last_error)); - return last_error; - } - - bool ReadData(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - LPVOID buffer, - DWORD dwNumberOfBytesToRead, - DWORD* numberOfBytesRead) - { - require_created_handle(); - if (WinHttpReadData(h, buffer, dwNumberOfBytesToRead, numberOfBytesRead)) - { - return true; - } - - context.report_error(format_winhttp_last_error_message("WinHttpReadData", sanitized_url)); - return false; - } - - ~WinHttpHandle() - { - if (h) - { - // intentionally ignore failures - (void)WinHttpCloseHandle(h); - } - } - - private: - HINTERNET h{}; - }; - - enum class WinHttpTrialResult - { - failed, - succeeded, - retry - }; - - struct WinHttpSession - { - bool open(DiagnosticContext& context, const SanitizedUrl& sanitized_url) - { - if (!m_hSession.Open(context, - sanitized_url, - L"vcpkg/1.0", - WINHTTP_ACCESS_TYPE_NO_PROXY, - WINHTTP_NO_PROXY_NAME, - WINHTTP_NO_PROXY_BYPASS, - 0)) - { - return false; - } - - // Increase default timeouts to help connections behind proxies - // WinHttpSetTimeouts(HINTERNET hInternet, int nResolveTimeout, int nConnectTimeout, int nSendTimeout, int - // nReceiveTimeout); - if (!m_hSession.SetTimeouts(context, sanitized_url, 0, 120000, 120000, 120000)) - { - return false; - } - - // If the environment variable HTTPS_PROXY is set - // use that variable as proxy. This situation might exist when user is in a company network - // with restricted network/proxy settings - auto maybe_https_proxy_env = get_environment_variable(EnvironmentVariableHttpsProxy); - if (auto p_https_proxy = maybe_https_proxy_env.get()) - { - StringView p_https_proxy_view = *p_https_proxy; - if (p_https_proxy_view.size() != 0 && p_https_proxy_view.back() == '/') - { - // remove trailing slash - p_https_proxy_view = p_https_proxy_view.substr(0, p_https_proxy_view.size() - 1); - } - - std::wstring env_proxy_settings = Strings::to_utf16(p_https_proxy_view); - WINHTTP_PROXY_INFO proxy; - proxy.dwAccessType = WINHTTP_ACCESS_TYPE_NAMED_PROXY; - proxy.lpszProxy = env_proxy_settings.data(); - - // Try to get bypass list from environment variable - auto maybe_no_proxy_env = get_environment_variable(EnvironmentVariableNoProxy); - std::wstring env_noproxy_settings; - if (auto p_no_proxy = maybe_no_proxy_env.get()) - { - env_noproxy_settings = Strings::to_utf16(*p_no_proxy); - proxy.lpszProxyBypass = env_noproxy_settings.data(); - } - else - { - proxy.lpszProxyBypass = nullptr; - } - - if (!m_hSession.SetOption(context, sanitized_url, WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy))) - { - return false; - } - } - // IE Proxy fallback, this works on Windows 10 - else - { - // We do not use WPAD anymore - // Directly read IE Proxy setting - auto ieProxy = get_windows_ie_proxy_server(); - if (ieProxy.has_value()) - { - WINHTTP_PROXY_INFO proxy; - proxy.dwAccessType = WINHTTP_ACCESS_TYPE_NAMED_PROXY; - proxy.lpszProxy = ieProxy.get()->server.data(); - proxy.lpszProxyBypass = ieProxy.get()->bypass.data(); - if (!m_hSession.SetOption(context, sanitized_url, WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy))) - { - return false; - } - } - } - - // Use Windows 10 defaults on Windows 7 - DWORD secure_protocols(WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 | WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 | - WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2); - if (!m_hSession.SetOption(context, - sanitized_url, - WINHTTP_OPTION_SECURE_PROTOCOLS, - &secure_protocols, - sizeof(secure_protocols))) - { - return false; - } - - // Many open source mirrors such as https://download.gnome.org/ will redirect to http mirrors. - // `curl.exe -L` does follow https -> http redirection. - // Additionally, vcpkg hash checks the resulting archive. - DWORD redirect_policy(WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS); - if (!m_hSession.SetOption( - context, sanitized_url, WINHTTP_OPTION_REDIRECT_POLICY, &redirect_policy, sizeof(redirect_policy))) - { - return false; - } - - return true; - } - - WinHttpHandle m_hSession; - }; - - struct WinHttpConnection - { - bool connect(DiagnosticContext& context, - const WinHttpSession& hSession, - StringView hostname, - INTERNET_PORT port, - const SanitizedUrl& sanitized_url) - { - // Specify an HTTP server. - return m_hConnect.Connect(context, hSession.m_hSession, hostname, port, sanitized_url); - } - - WinHttpHandle m_hConnect; - }; - - struct WinHttpRequest - { - bool open(DiagnosticContext& context, - const WinHttpConnection& hConnect, - StringView path_query_fragment, - const SanitizedUrl& sanitized_url, - bool https, - const wchar_t* method = L"GET") - { - if (!m_hRequest.OpenRequest(context, - hConnect.m_hConnect, - sanitized_url, - method, - path_query_fragment, - nullptr, - WINHTTP_NO_REFERER, - WINHTTP_DEFAULT_ACCEPT_TYPES, - https ? WINHTTP_FLAG_SECURE : 0)) - { - return false; - } - - // Send a request. - if (!m_hRequest.SendRequest( - context, sanitized_url, WINHTTP_NO_ADDITIONAL_HEADERS, 0, WINHTTP_NO_REQUEST_DATA, 0, 0, 0)) - { - return false; - } - - // End the request. - if (!m_hRequest.ReceiveResponse(context, sanitized_url)) - { - return false; - } - - return true; - } - - Optional query_status(DiagnosticContext& context, const SanitizedUrl& sanitized_url) const - { - DWORD status_code; - DWORD size = sizeof(status_code); - DWORD last_error = m_hRequest.QueryHeaders(context, - sanitized_url, - WINHTTP_QUERY_STATUS_CODE | WINHTTP_QUERY_FLAG_NUMBER, - WINHTTP_HEADER_NAME_BY_INDEX, - &status_code, - &size, - WINHTTP_NO_HEADER_INDEX); - if (last_error) - { - return nullopt; - } - - return status_code; - } - - bool query_content_length(DiagnosticContext& context, - const SanitizedUrl& sanitized_url, - Optional& result) const - { - static constexpr DWORD buff_characters = 21; // 18446744073709551615 - wchar_t buff[buff_characters]; - DWORD size = sizeof(buff); - AttemptDiagnosticContext adc{context}; - DWORD last_error = m_hRequest.QueryHeaders(adc, - sanitized_url, - WINHTTP_QUERY_CONTENT_LENGTH, - WINHTTP_HEADER_NAME_BY_INDEX, - buff, - &size, - WINHTTP_NO_HEADER_INDEX); - if (!last_error) - { - adc.commit(); - result = Strings::strto(Strings::to_utf8(buff, size >> 1)); - return true; - } - - if (last_error == ERROR_WINHTTP_HEADER_NOT_FOUND) - { - adc.handle(); - return true; - } - - adc.commit(); - return false; - } - - WinHttpTrialResult write_response_body(DiagnosticContext& context, - MessageSink& machine_readable_progress, - const SanitizedUrl& sanitized_url, - const WriteFilePointer& file) - { - static constexpr DWORD buff_size = 65535; - std::unique_ptr buff{new char[buff_size]}; - Optional maybe_content_length; - auto last_write = std::chrono::steady_clock::now(); - if (!query_content_length(context, sanitized_url, maybe_content_length)) - { - return WinHttpTrialResult::retry; - } - - unsigned long long total_downloaded_size = 0; - for (;;) - { - DWORD this_read; - if (!m_hRequest.ReadData(context, sanitized_url, buff.get(), buff_size, &this_read)) - { - return WinHttpTrialResult::retry; - } - - if (this_read == 0) - { - return WinHttpTrialResult::succeeded; - } - - do - { - const auto this_write = static_cast(file.write(buff.get(), 1, this_read)); - if (this_write == 0) - { - context.report_error(format_filesystem_call_error( - std::error_code{errno, std::generic_category()}, "fwrite", {file.path()})); - return WinHttpTrialResult::failed; - } - - maybe_emit_winhttp_progress( - machine_readable_progress, maybe_content_length, last_write, total_downloaded_size); - this_read -= this_write; - total_downloaded_size += this_write; - } while (this_read > 0); - } - } - - WinHttpHandle m_hRequest; - }; -#endif - Optional parse_split_url_view(StringView raw_url) { auto sep = std::find(raw_url.begin(), raw_url.end(), ':'); @@ -672,107 +132,164 @@ namespace vcpkg return true; } - static std::vector curl_bulk_operation(DiagnosticContext& context, - View operation_args, - StringLiteral prefixArgs, - View headers, - View secrets) + static size_t write_file_callback(void* contents, size_t size, size_t nmemb, void* param) + { + if (!param) return 0; + return static_cast(param)->write(contents, size, nmemb); + } + + static size_t progress_callback( + void* clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow) { -#define GUID_MARKER "5ec47b8e-6776-4d70-b9b3-ac2a57bc0a1c" - static constexpr StringLiteral guid_marker = GUID_MARKER; - Command prefix_cmd{"curl"}; - if (!prefixArgs.empty()) + (void)ultotal; + (void)ulnow; + auto machine_readable_progress = static_cast(clientp); + if (dltotal && machine_readable_progress) { - prefix_cmd.raw_arg(prefixArgs); + double percentage = static_cast(dlnow) / static_cast(dltotal) * 100.0; + machine_readable_progress->println(LocalizedString::from_raw(fmt::format("{:.2f}%", percentage))); } + return 0; + } - prefix_cmd.string_arg("--retry").string_arg("3").string_arg("-L").string_arg("-sS").string_arg("-w").string_arg( - GUID_MARKER "%{http_code} %{exitcode} %{errormsg}\\n"); -#undef GUID_MARKER + static std::vector libcurl_bulk_operation(DiagnosticContext& context, + View urls, + View outputs, + View headers) + { + if (!outputs.empty() && outputs.size() != urls.size()) + { + Checks::unreachable(VCPKG_LINE_INFO); + } + + std::vector return_codes(urls.size(), -1); + + CurlHeaders request_headers(headers); - std::vector ret; - ret.reserve(operation_args.size()); - add_curl_headers(prefix_cmd, headers); - while (ret.size() != operation_args.size()) + std::vector write_pointers; + write_pointers.reserve(urls.size()); + + std::vector easy_handles; + easy_handles.resize(urls.size()); + + CurlMultiHandle multi_handle; + for (size_t request_index = 0; request_index < urls.size(); ++request_index) { - // there's an edge case that we aren't handling here where not even one operation fits with the configured - // headers but this seems unlikely + const auto& url = urls[request_index]; + auto& easy_handle = easy_handles[request_index]; + auto* curl = easy_handle.get(); + + set_common_curl_easy_options(easy_handle, url, request_headers); + if (outputs.empty()) + { + curl_easy_setopt(curl, CURLOPT_PRIVATE, reinterpret_cast(static_cast(request_index))); + } + else + { + const auto& output = outputs[request_index]; + std::error_code ec; + auto& request_write_pointer = write_pointers.emplace_back(output, Append::NO, ec); + if (ec) + { + context.report_error(format_filesystem_call_error(ec, "fopen", {output})); + Checks::unreachable(VCPKG_LINE_INFO); + } + + curl_easy_setopt(curl, CURLOPT_PRIVATE, static_cast(&request_write_pointer)); + // note explicit cast to void* necessary to go through ... + curl_easy_setopt(curl, CURLOPT_WRITEDATA, static_cast(&request_write_pointer)); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &write_file_callback); + multi_handle.add_easy_handle(easy_handle); + } + } - // form a maximum length command line of operations: - auto batch_cmd = prefix_cmd; - size_t last_try_op = ret.size(); - while (last_try_op != operation_args.size() && batch_cmd.try_append(operation_args[last_try_op])) + int still_running = 0; + do + { + CURLMcode mc = curl_multi_perform(multi_handle.get(), &still_running); + if (mc != CURLM_OK) { - ++last_try_op; + Debug::println("curl_multi_perform failed:"); + Debug::println(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(mc)) + .append_raw(fmt::format(" ({}).", curl_multi_strerror(mc)))); + Checks::unreachable(VCPKG_LINE_INFO); } - // actually run curl - bool new_curl_seen = false; - std::vector debug_lines; - auto maybe_this_batch_exit_code = cmd_execute_and_stream_lines(context, batch_cmd, [&](StringView line) { - debug_lines.emplace_back(line.data(), line.size()); - new_curl_seen |= parse_curl_status_line(context, ret, guid_marker, line); - }); + // we use curl_multi_wait rather than curl_multi_poll for wider compatibility + mc = curl_multi_wait(multi_handle.get(), nullptr, 0, 1000, nullptr); + if (mc != CURLM_OK) + { + Debug::println("curl_multi_wait failed:"); + Debug::println(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(mc)) + .append_raw(fmt::format(" ({}).", curl_multi_strerror(mc)))); + Checks::unreachable(VCPKG_LINE_INFO); + } + } while (still_running); - if (auto this_batch_exit_code = maybe_this_batch_exit_code.get()) + // drain all messages + int messages_in_queue = 0; + while (auto* msg = curl_multi_info_read(multi_handle.get(), &messages_in_queue)) + { + if (msg->msg == CURLMSG_DONE) { - if (!new_curl_seen) + CURL* handle = msg->easy_handle; + if (msg->data.result == CURLE_OK) { - // old version of curl, we only have the result code for the last operation - context.report_error(msgCurlFailedGeneric, msg::exit_code = *this_batch_exit_code); - } + size_t idx; + void* curlinfo_private; + curl_easy_getinfo(handle, CURLINFO_PRIVATE, &curlinfo_private); + if (outputs.empty()) + { + idx = reinterpret_cast(curlinfo_private); + } + else + { + if (!curlinfo_private) + { + Checks::unreachable(VCPKG_LINE_INFO); + } + auto request_write_handle = static_cast(curlinfo_private); + idx = request_write_handle - write_pointers.data(); + } - if (ret.size() != last_try_op) + long response_code; + curl_easy_getinfo(handle, CURLINFO_RESPONSE_CODE, &response_code); + return_codes[idx] = static_cast(response_code); + } + else { - // curl didn't process everything we asked of it; this usually means curl crashed - auto command_line = std::move(batch_cmd).extract(); - replace_secrets(command_line, secrets); - context.report_error_with_log(Strings::join("\n", debug_lines), - msgCurlFailedToReturnExpectedNumberOfExitCodes, - msg::exit_code = *this_batch_exit_code, - msg::command_line = command_line); - return ret; + context.report_error( + msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(msg->data.result)) + .append_raw(fmt::format(" ({}).", curl_easy_strerror(msg->data.result)))); } } - else - { - // couldn't even launch curl, record this as the last fatal error and give up - return ret; - } } + return return_codes; + } - return ret; + static std::vector libcurl_bulk_check(DiagnosticContext& context, + View urls, + View headers) + { + return libcurl_bulk_operation(context, + urls, + {}, // no output + headers); } - std::vector url_heads(DiagnosticContext& context, - View urls, - View headers, - View secrets) + std::vector url_heads(DiagnosticContext& context, View urls, View headers) { - return curl_bulk_operation( - context, - Util::fmap(urls, [](const std::string& url) { return Command{}.string_arg(url_encode_spaces(url)); }), - "--head", - headers, - secrets); + return libcurl_bulk_check(context, urls, headers); } std::vector download_files_no_cache(DiagnosticContext& context, View> url_pairs, - View headers, - View secrets) + View headers) { - return curl_bulk_operation(context, - Util::fmap(url_pairs, - [](const std::pair& url_pair) { - return Command{} - .string_arg(url_encode_spaces(url_pair.first)) - .string_arg("-o") - .string_arg(url_pair.second); - }), - "--create-dirs", - headers, - secrets); + return libcurl_bulk_operation(context, + Util::fmap(url_pairs, [](auto&& kv) -> std::string { return kv.first; }), + Util::fmap(url_pairs, [](auto&& kv) -> Path { return kv.second; }), + headers); } bool submit_github_dependency_graph_snapshot(DiagnosticContext& context, @@ -781,8 +298,6 @@ namespace vcpkg const std::string& github_repository, const Json::Object& snapshot) { - static constexpr StringLiteral guid_marker = "fcfad8a3-bb68-4a54-ad00-dab1ff671ed2"; - std::string uri; if (auto github_server_url = maybe_github_server_url.get()) { @@ -797,93 +312,93 @@ namespace vcpkg fmt::format_to( std::back_inserter(uri), "/repos/{}/dependency-graph/snapshots", url_encode_spaces(github_repository)); - auto cmd = Command{"curl"}; - cmd.string_arg("-w").string_arg("\\n" + guid_marker.to_string() + "%{http_code}"); - cmd.string_arg("-X").string_arg("POST"); - { - std::string headers[] = { - "Accept: application/vnd.github+json", - "Authorization: Bearer " + github_token, - "X-GitHub-Api-Version: 2022-11-28", - }; - add_curl_headers(cmd, headers); - } + CurlEasyHandle handle; + CURL* curl = handle.get(); - cmd.string_arg(uri); - cmd.string_arg("-d").string_arg("@-"); + std::string post_data = Json::stringify(snapshot); - RedirectedProcessLaunchSettings settings; - settings.stdin_content = Json::stringify(snapshot); - int code = 0; - auto result = cmd_execute_and_stream_lines(context, cmd, settings, [&code](StringView line) { - if (line.starts_with(guid_marker)) - { - code = std::strtol(line.data() + guid_marker.size(), nullptr, 10); - } - }); + std::string headers[]{ + "Accept: application/vnd.github+json", + ("Authorization: Bearer " + github_token), + "X-GitHub-Api-Version: 2022-11-28", + "Content-Type: application/json", + }; + + CurlHeaders request_headers(headers); + set_common_curl_easy_options(handle, uri, request_headers); + curl_easy_setopt(curl, CURLOPT_USERAGENT, vcpkg_curl_user_agent); + curl_easy_setopt(curl, CURLOPT_POST, 1L); + curl_easy_setopt(curl, CURLOPT_POSTFIELDS, post_data.c_str()); + curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, post_data.length()); + + CURLcode result = curl_easy_perform(curl); + long response_code = 0; + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code); - auto r = result.get(); - if (r && *r == 0 && code >= 200 && code < 300) + if (result != CURLE_OK) { - return true; + context.report_error(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(result)) + .append_raw(fmt::format(" ({}).", curl_easy_strerror(result)))); + return false; } - return false; + + return response_code >= 200 && response_code < 300; + } + + static size_t read_file_callback(char* buffer, size_t size, size_t nitems, void* param) + { + auto* file = static_cast(param); + return file->read(buffer, size, nitems); } bool store_to_asset_cache(DiagnosticContext& context, StringView raw_url, const SanitizedUrl& sanitized_url, - StringLiteral method, View headers, const Path& file) { - static constexpr StringLiteral guid_marker = "9a1db05f-a65d-419b-aa72-037fb4d0672e"; - - if (raw_url.starts_with("ftp://")) + std::error_code ec; + ReadFilePointer fileptr(file, ec); + if (ec) { - // HTTP headers are ignored for FTP clients - auto ftp_cmd = Command{"curl"}; - ftp_cmd.string_arg(url_encode_spaces(raw_url)); - ftp_cmd.string_arg("-T").string_arg(file); - auto maybe_res = cmd_execute_and_capture_output(context, ftp_cmd); - if (auto res = maybe_res.get()) - { - if (res->exit_code == 0) - { - return true; - } - - context.report_error_with_log( - res->output, msgCurlFailedToPut, msg::exit_code = res->exit_code, msg::url = sanitized_url); - return false; - } - + context.report_error(format_filesystem_call_error(ec, "fopen", {file})); + return false; + } + auto file_size = fileptr.size(ec); + if (ec) + { + context.report_error(format_filesystem_call_error(ec, "fstat", {file})); return false; } - auto http_cmd = Command{"curl"}.string_arg("-X").string_arg(method); - add_curl_headers(http_cmd, headers); - http_cmd.string_arg("-w").string_arg("\\n" + guid_marker.to_string() + "%{http_code}"); - http_cmd.string_arg(raw_url); - http_cmd.string_arg("-T").string_arg(file); - int code = 0; - auto res = cmd_execute_and_stream_lines(context, http_cmd, [&code](StringView line) { - if (line.starts_with(guid_marker)) - { - code = std::strtol(line.data() + guid_marker.size(), nullptr, 10); - } - }); + CurlEasyHandle handle; + CURL* curl = handle.get(); - auto pres = res.get(); - if (!pres) + auto request_headers = raw_url.starts_with("ftp://") ? CurlHeaders() : CurlHeaders(headers); + auto upload_url = url_encode_spaces(raw_url); + curl_set_system_ssl_root_certs(curl); + curl_easy_setopt(curl, CURLOPT_USERAGENT, vcpkg_curl_user_agent); + curl_easy_setopt(curl, CURLOPT_HTTPHEADER, request_headers.get()); + curl_easy_setopt(curl, CURLOPT_URL, upload_url.c_str()); + curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L); + curl_easy_setopt(curl, CURLOPT_READDATA, static_cast(&fileptr)); + curl_easy_setopt(curl, CURLOPT_READFUNCTION, &read_file_callback); + curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE, static_cast(file_size)); + + auto result = curl_easy_perform(curl); + if (result != CURLE_OK) { + context.report_error(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(result)) + .append_raw(fmt::format(" ({}).", curl_easy_strerror(result)))); return false; } - if (*pres != 0 || (code >= 100 && code < 200) || code >= 300) + long response_code = 0; + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code); + + if ((response_code >= 100 && response_code < 200) || response_code >= 300) { - context.report_error(msg::format( - msgCurlFailedToPutHttp, msg::exit_code = *pres, msg::url = sanitized_url, msg::value = code)); + context.report_error(msg::format(msgCurlFailedToPut, msg::url = sanitized_url, msg::value = response_code)); return false; } @@ -937,169 +452,14 @@ namespace vcpkg return fmt::format(FMT_COMPILE("{}?{}"), base_url, fmt::join(query_params, "&")); } - Optional invoke_http_request(DiagnosticContext& context, - StringLiteral method, - View headers, - StringView raw_url, - View secrets, - StringView data) - { - auto cmd = Command{"curl"}.string_arg("-s").string_arg("-L"); - add_curl_headers(cmd, headers); - - cmd.string_arg("-X").string_arg(method); - - if (!data.empty()) - { - cmd.string_arg("--data-raw").string_arg(data); - } - - cmd.string_arg(url_encode_spaces(raw_url)); - - auto maybe_output = cmd_execute_and_capture_output(context, cmd); - if (auto output = check_zero_exit_code( - context, cmd, maybe_output, RedirectedProcessLaunchSettings{}.echo_in_debug, secrets)) - { - return *output; - } - - return nullopt; - } - -#if defined(_WIN32) - static WinHttpTrialResult download_winhttp_trial(DiagnosticContext& context, - MessageSink& machine_readable_progress, - const Filesystem& fs, - const WinHttpSession& s, - const Path& download_path_part_path, - SplitUrlView split_uri_view, - StringView hostname, - INTERNET_PORT port, - const SanitizedUrl& sanitized_url) - { - WinHttpConnection conn; - if (!conn.connect(context, s, hostname, port, sanitized_url)) - { - return WinHttpTrialResult::retry; - } - - WinHttpRequest req; - if (!req.open( - context, conn, split_uri_view.path_query_fragment, sanitized_url, split_uri_view.scheme == "https")) - { - return WinHttpTrialResult::retry; - } - - auto maybe_status = req.query_status(context, sanitized_url); - const auto status = maybe_status.get(); - if (!status) - { - return WinHttpTrialResult::retry; - } - - if (*status < 200 || *status >= 300) - { - context.report_error(msgDownloadFailedStatusCode, msg::url = sanitized_url, msg::value = *status); - return WinHttpTrialResult::failed; - } - - return req.write_response_body(context, - machine_readable_progress, - sanitized_url, - fs.open_for_write(download_path_part_path, VCPKG_LINE_INFO)); - } - - /// - /// Download a file using WinHTTP -- only supports HTTP and HTTPS - /// - static bool download_winhttp(DiagnosticContext& context, - MessageSink& machine_readable_progress, - const Filesystem& fs, - const Path& download_path_part_path, - SplitUrlView split_url_view, - const SanitizedUrl& sanitized_url) - { - // `download_winhttp` does not support user or port syntax in authorities - auto hostname = split_url_view.authority.value_or_exit(VCPKG_LINE_INFO).substr(2); - INTERNET_PORT port; - if (split_url_view.scheme == "https") - { - port = INTERNET_DEFAULT_HTTPS_PORT; - } - else if (split_url_view.scheme == "http") - { - port = INTERNET_DEFAULT_HTTP_PORT; - } - else - { - Checks::unreachable(VCPKG_LINE_INFO); - } - - // Make sure the directories are present, otherwise fopen_s fails - const auto dir = download_path_part_path.parent_path(); - if (!dir.empty()) - { - fs.create_directories(dir, VCPKG_LINE_INFO); - } - - WinHttpSession s; - if (!s.open(context, sanitized_url)) - { - return false; - } - - AttemptDiagnosticContext adc{context}; - switch (download_winhttp_trial(adc, - machine_readable_progress, - fs, - s, - download_path_part_path, - split_url_view, - hostname, - port, - sanitized_url)) - { - case WinHttpTrialResult::succeeded: adc.commit(); return true; - case WinHttpTrialResult::failed: adc.commit(); return false; - case WinHttpTrialResult::retry: break; - } - - for (size_t trials = 1; trials < 4; ++trials) - { - // 1s, 2s, 4s - const auto trialMs = 500 << trials; - adc.handle(); - context.statusln( - DiagnosticLine(DiagKind::Warning, - msg::format(msgDownloadFailedRetrying, msg::value = trialMs, msg::url = sanitized_url)) - .to_message_line()); - std::this_thread::sleep_for(std::chrono::milliseconds(trialMs)); - switch (download_winhttp_trial(adc, - machine_readable_progress, - fs, - s, - download_path_part_path, - split_url_view, - hostname, - port, - sanitized_url)) - { - case WinHttpTrialResult::succeeded: adc.commit(); return true; - case WinHttpTrialResult::failed: adc.commit(); return false; - case WinHttpTrialResult::retry: break; - } - } - - adc.commit(); - return false; - } -#endif - enum class DownloadPrognosis { Success, OtherError, - NetworkErrorProxyMightHelp + NetworkErrorProxyMightHelp, + // Transient error means either: a timeout, an FTP 4xx response code or an HTTP 408, 429, 500, 502, 503 or + // 504 response code. https://everything.curl.dev/usingcurl/downloads/retry.html#retry + TransientNetworkError }; static bool check_combine_download_prognosis(DownloadPrognosis& target, DownloadPrognosis individual_call) @@ -1133,6 +493,71 @@ namespace vcpkg } } + static DownloadPrognosis perform_download(DiagnosticContext& context, + MessageSink& machine_readable_progress, + StringView raw_url, + const Path& download_path, + View headers) + { + std::error_code ec; + WriteFilePointer fileptr(download_path, Append::NO, ec); + if (ec) + { + context.report_error(format_filesystem_call_error(ec, "fopen", {download_path})); + return DownloadPrognosis::OtherError; + } + + CurlHeaders request_headers(headers); + + CurlEasyHandle handle; + CURL* curl = handle.get(); + set_common_curl_easy_options(handle, raw_url, request_headers); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &write_file_callback); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, static_cast(&fileptr)); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L); // change from default to enable progress + curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, &progress_callback); + curl_easy_setopt(curl, CURLOPT_XFERINFODATA, static_cast(&machine_readable_progress)); + auto curl_code = curl_easy_perform(curl); + + if (curl_code == CURLE_OPERATION_TIMEDOUT) + { + context.report_error(msgCurlDownloadTimeout); + return DownloadPrognosis::TransientNetworkError; + } + + if (curl_code != CURLE_OK) + { + context.report_error(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(curl_code)) + .append_raw(fmt::format(" ({}).", curl_easy_strerror(curl_code)))); + return DownloadPrognosis::NetworkErrorProxyMightHelp; + } + + long response_code = -1; + auto get_info_code = curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code); + if (get_info_code != CURLE_OK) + { + context.report_error(msg::format(msgCurlFailedGeneric, msg::exit_code = static_cast(get_info_code)) + .append_raw(fmt::format(" ({}).", curl_easy_strerror(get_info_code)))); + return DownloadPrognosis::NetworkErrorProxyMightHelp; + } + + if ((response_code >= 200 && response_code < 300) || (raw_url.starts_with("file://") && response_code == 0)) + { + return DownloadPrognosis::Success; + } + + if (response_code == 429 || response_code == 408 || response_code == 500 || response_code == 502 || + response_code == 503 || response_code == 504) + { + context.report_error( + msg::format(msgCurlFailedHttpResponse, msg::exit_code = static_cast(response_code))); + return DownloadPrognosis::TransientNetworkError; + } + + context.report_error(msg::format(msgCurlFailedHttpResponse, msg::exit_code = static_cast(response_code))); + return DownloadPrognosis::NetworkErrorProxyMightHelp; + } + static DownloadPrognosis try_download_file(DiagnosticContext& context, MessageSink& machine_readable_progress, const Filesystem& fs, @@ -1152,59 +577,6 @@ namespace vcpkg #endif download_path_part_path += ".part"; -#if defined(_WIN32) - auto maybe_https_proxy_env = get_environment_variable(EnvironmentVariableHttpsProxy); - bool needs_proxy_auth = false; - if (auto proxy_url = maybe_https_proxy_env.get()) - { - needs_proxy_auth = proxy_url->find('@') != std::string::npos; - } - if (headers.size() == 0 && !needs_proxy_auth) - { - auto maybe_split_uri_view = parse_split_url_view(raw_url); - auto split_uri_view = maybe_split_uri_view.get(); - if (!split_uri_view) - { - context.report_error(msgInvalidUri, msg::value = sanitized_url); - return DownloadPrognosis::OtherError; - } - - if (split_uri_view->scheme == "https" || split_uri_view->scheme == "http") - { - auto maybe_authority = split_uri_view->authority.get(); - if (!maybe_authority) - { - context.report_error(msg::format(msgInvalidUri, msg::value = sanitized_url)); - return DownloadPrognosis::OtherError; - } - - auto authority = StringView{*maybe_authority}.substr(2); - // This check causes complex URLs (non-default port, embedded basic auth) to be passed down to - // curl.exe - if (Strings::find_first_of(authority, ":@") == authority.end()) - { - if (!download_winhttp(context, - machine_readable_progress, - fs, - download_path_part_path, - *split_uri_view, - sanitized_url)) - { - return DownloadPrognosis::NetworkErrorProxyMightHelp; - } - - if (!check_downloaded_file_hash( - context, fs, sanitized_url, download_path_part_path, maybe_sha512, out_sha512)) - { - return DownloadPrognosis::OtherError; - } - - fs.rename(download_path_part_path, download_path, VCPKG_LINE_INFO); - return DownloadPrognosis::Success; - } - } - } -#endif // Create directory in advance, otherwise curl will create it in 750 mode on unix style file systems. const auto dir = download_path_part_path.parent_path(); if (!dir.empty()) @@ -1212,85 +584,37 @@ namespace vcpkg fs.create_directories(dir, VCPKG_LINE_INFO); } - auto cmd = Command{"curl"} - .string_arg("--fail") - .string_arg("--retry") - .string_arg("3") - .string_arg("-L") - .string_arg(url_encode_spaces(raw_url)) - .string_arg("--create-dirs") - .string_arg("--output") - .string_arg(download_path_part_path); - add_curl_headers(cmd, headers); - bool seen_any_curl_errors = false; - // if seen_any_curl_errors, contains the curl error lines starting with "curl:" - // otherwise, contains all curl's output unless it is the machine readable output - std::vector likely_curl_errors; - auto maybe_exit_code = cmd_execute_and_stream_lines(context, cmd, [&](StringView line) { - const auto maybe_parsed = try_parse_curl_progress_data(line); - if (const auto parsed = maybe_parsed.get()) - { - machine_readable_progress.println(Color::none, - LocalizedString::from_raw(fmt::format("{}%", parsed->total_percent))); - return; - } + // Retry on transient errors: + // Transient error means either: a timeout, an FTP 4xx response code or an HTTP 408, 429, 500, 502, 503 or + // 504 response code. https://everything.curl.dev/usingcurl/downloads/retry.html#retry + using namespace std::chrono_literals; + static constexpr std::array attempt_delays = {0s, 1s, 2s}; + DownloadPrognosis prognosis = DownloadPrognosis::NetworkErrorProxyMightHelp; + for (size_t attempt_count = 0; attempt_count < attempt_delays.size(); attempt_count++) + { + std::this_thread::sleep_for(attempt_delays[attempt_count]); - static constexpr StringLiteral WarningColon = "warning: "; - if (Strings::case_insensitive_ascii_starts_with(line, WarningColon)) - { - context.statusln( - DiagnosticLine{DiagKind::Warning, LocalizedString::from_raw(line.substr(WarningColon.size()))} - .to_message_line()); - return; - } + prognosis = perform_download(context, machine_readable_progress, raw_url, download_path_part_path, headers); - // clang-format off - // example: - // 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0curl: (6) Could not resolve host: nonexistent.example.com - // clang-format on - static constexpr StringLiteral CurlColon = "curl:"; - auto curl_start = std::search(line.begin(), line.end(), CurlColon.begin(), CurlColon.end()); - if (curl_start == line.end()) + if (DownloadPrognosis::Success == prognosis) { - if (seen_any_curl_errors) - { - return; - } - - curl_start = line.begin(); + break; } - else + + if (DownloadPrognosis::TransientNetworkError != prognosis) { - if (!seen_any_curl_errors) - { - seen_any_curl_errors = true; - likely_curl_errors.clear(); - } + context.report_error(msg::format(msgDownloadNotTransientErrorWontRetry, msg::url = sanitized_url)); + return prognosis; } - likely_curl_errors.emplace_back(curl_start, line.end()); - }); - - const auto exit_code = maybe_exit_code.get(); - if (!exit_code) - { - return DownloadPrognosis::OtherError; + context.report_error(msg::format( + msgDownloadTransientErrorRetry, msg::count = attempt_count + 1, msg::value = attempt_delays.size())); } - if (*exit_code != 0) + if (DownloadPrognosis::Success != prognosis) { - std::set seen_errors; - for (StringView likely_curl_error : likely_curl_errors) - { - auto seen_position = seen_errors.lower_bound(likely_curl_error); - if (seen_position == seen_errors.end() || *seen_position != likely_curl_error) - { - seen_errors.emplace_hint(seen_position, likely_curl_error); - context.report(DiagnosticLine{DiagKind::Error, LocalizedString::from_raw(likely_curl_error)}); - } - } - - return DownloadPrognosis::NetworkErrorProxyMightHelp; + context.report_error(msg::format(msgDownloadTransientErrorRetriesExhausted, msg::url = sanitized_url)); + return prognosis; } if (!check_downloaded_file_hash(context, fs, sanitized_url, download_path_part_path, maybe_sha512, out_sha512)) @@ -1308,89 +632,6 @@ namespace vcpkg return s_headers; } - bool parse_curl_status_line(DiagnosticContext& context, - std::vector& http_codes, - StringLiteral prefix, - StringView this_line) - { - if (!this_line.starts_with(prefix)) - { - return false; - } - - auto first = this_line.begin(); - const auto last = this_line.end(); - first += prefix.size(); - const auto first_http_code = first; - - int http_code; - for (;; ++first) - { - if (first == last) - { - // this output is broken, even if we don't know %{exit_code} or ${errormsg}, the spaces in front - // of them should still be printed. - return false; - } - - if (!ParserBase::is_ascii_digit(*first)) - { - http_code = Strings::strto(StringView{first_http_code, first}).value_or_exit(VCPKG_LINE_INFO); - break; - } - } - - if (*first != ' ' || ++first == last) - { - // didn't see the space after the http_code - return false; - } - - if (*first == ' ') - { - // old curl that doesn't understand %{exit_code}, this is the space after it - http_codes.emplace_back(http_code); - return false; - } - - if (!ParserBase::is_ascii_digit(*first)) - { - // not exit_code - return false; - } - - const auto first_exit_code = first; - for (;;) - { - if (++first == last) - { - // didn't see the space after %{exit_code} - return false; - } - - if (*first == ' ') - { - // the space after exit_code, everything after this space is the error message if any - http_codes.emplace_back(http_code); - auto exit_code = Strings::strto(StringView{first_exit_code, first}).value_or_exit(VCPKG_LINE_INFO); - // note that this gets the space out of the output :) - if (exit_code != 0) - { - context.report_error(msg::format(msgCurlFailedGeneric, msg::exit_code = exit_code) - .append_raw(StringView{first, last})); - } - - return true; - } - - if (!ParserBase::is_ascii_digit(*first)) - { - // non numeric exit_code? - return false; - } - } - } - static DownloadPrognosis download_file_azurl_asset_cache(DiagnosticContext& context, MessageSink& machine_readable_progress, const AssetCachingSettings& asset_cache_settings, @@ -1659,12 +900,8 @@ namespace vcpkg context.statusln( msg::format(msgDownloadSuccesfulUploading, msg::path = display_path, msg::url = sanitized_upload_url)); WarningDiagnosticContext wdc{context}; - if (!store_to_asset_cache(wdc, - raw_upload_url, - sanitized_upload_url, - "PUT", - asset_cache_settings.m_write_headers, - download_path)) + if (!store_to_asset_cache( + wdc, raw_upload_url, sanitized_upload_url, asset_cache_settings.m_write_headers, download_path)) { context.report(DiagnosticLine{DiagKind::Warning, msg::format(msgFailedToStoreBackToMirror, @@ -1869,6 +1106,11 @@ namespace vcpkg context, download_path, display_path, asset_cache_settings, maybe_sha512); return true; } + else + { + asset_cache_attempt_context.commit(); + authoritative_attempt_context.commit(); + } while (++first_sanitized_url, ++first_raw_url != last_raw_url) { @@ -1984,12 +1226,8 @@ namespace vcpkg auto raw_upload_url = Strings::replace_all(*url_template, "", sha512); SanitizedUrl sanitized_upload_url{raw_upload_url, asset_cache_settings.m_secrets}; - return store_to_asset_cache(context, - raw_upload_url, - sanitized_upload_url, - "PUT", - asset_cache_settings.m_write_headers, - file_to_put); + return store_to_asset_cache( + context, raw_upload_url, sanitized_upload_url, asset_cache_settings.m_write_headers, file_to_put); } return true; diff --git a/src/vcpkg/base/files.cpp b/src/vcpkg/base/files.cpp index 452710c855..2dbad9a507 100644 --- a/src/vcpkg/base/files.cpp +++ b/src/vcpkg/base/files.cpp @@ -1562,6 +1562,35 @@ namespace vcpkg ec.clear(); } + uint64_t ReadFilePointer::size(LineInfo li) const + { + std::error_code ec; + auto result = this->size(ec); + if (ec) + { + exit_filesystem_call_error(li, ec, __func__, {m_path}); + } + + return result; + } + + uint64_t ReadFilePointer::size(std::error_code& ec) const + { + ec.clear(); +#if _WIN32 + return stdfs::file_size(to_stdfs_path(m_path), ec); +#else + struct stat st; + if (::fstat(::fileno(m_fs), &st) != 0) + { + ec.assign(errno, std::generic_category()); + return 0; + } + + return st.st_size; +#endif + } + WriteFilePointer::WriteFilePointer() noexcept = default; WriteFilePointer::WriteFilePointer(WriteFilePointer&&) noexcept = default; diff --git a/src/vcpkg/binarycaching.cpp b/src/vcpkg/binarycaching.cpp index d70b3f00ee..edf7a18790 100644 --- a/src/vcpkg/binarycaching.cpp +++ b/src/vcpkg/binarycaching.cpp @@ -482,7 +482,7 @@ namespace PrintingDiagnosticContext pdc{msg_sink}; WarningDiagnosticContext wdc{pdc}; auto maybe_success = - store_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, "PUT", templ.headers, zip_path); + store_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, templ.headers, zip_path); if (maybe_success) { count_stored++; @@ -526,7 +526,7 @@ namespace } WarningDiagnosticContext wdc{console_diagnostic_context}; - auto codes = download_files_no_cache(wdc, url_paths, m_url_template.headers, m_secrets); + auto codes = download_files_no_cache(wdc, url_paths, m_url_template.headers); for (size_t i = 0; i < codes.size(); ++i) { if (codes[i] == 200) @@ -545,7 +545,7 @@ namespace } WarningDiagnosticContext wdc{console_diagnostic_context}; - auto codes = url_heads(wdc, urls, {}, m_secrets); + auto codes = url_heads(wdc, urls, {}); for (size_t i = 0; i < codes.size(); ++i) { out_status[i] = codes[i] == 200 ? CacheAvailability::available : CacheAvailability::unavailable; @@ -589,7 +589,7 @@ namespace // cf. // https://learn.microsoft.com/en-us/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json - constexpr size_t max_single_write = 5000000000; + constexpr size_t max_single_write = 5000000000u; bool use_azcopy = file_size > max_single_write; PrintingDiagnosticContext pdc{msg_sink}; @@ -599,9 +599,8 @@ namespace { auto url = templ.instantiate_variables(request); auto maybe_success = - use_azcopy - ? azcopy_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, zip_path) - : store_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, "PUT", templ.headers, zip_path); + use_azcopy ? azcopy_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, zip_path) + : store_to_asset_cache(wdc, url, SanitizedUrl{url, m_secrets}, templ.headers, zip_path); if (maybe_success) { count_stored++; diff --git a/src/vcpkg/commands.cpp b/src/vcpkg/commands.cpp index 29430906b1..a90862b4fb 100644 --- a/src/vcpkg/commands.cpp +++ b/src/vcpkg/commands.cpp @@ -70,8 +70,8 @@ namespace vcpkg {CommandCheckToolsShaMetadata, command_check_tools_sha_and_exit}, {CommandInitRegistryMetadata, command_init_registry_and_exit}, {CommandVersionMetadata, command_version_and_exit}, -#if defined(_WIN32) {CommandZUploadMetricsMetadata, command_z_upload_metrics_and_exit}, +#if defined(_WIN32) {CommandZApplocalMetadata, command_z_applocal_and_exit}, #endif // defined(_WIN32) {CommandZGenerateDefaultMessageMapMetadata, command_z_generate_default_message_map_and_exit}, diff --git a/src/vcpkg/commands.z-check-tools-sha.cpp b/src/vcpkg/commands.z-check-tools-sha.cpp index 0356b3615a..eaa47f5c3e 100644 --- a/src/vcpkg/commands.z-check-tools-sha.cpp +++ b/src/vcpkg/commands.z-check-tools-sha.cpp @@ -1,4 +1,5 @@ #include +#include #include #include #include @@ -75,7 +76,7 @@ namespace vcpkg } msg::println(msgDownloadingTools, msg::count = urlAndPaths.size()); - auto result = download_files_no_cache(console_diagnostic_context, urlAndPaths, {}, {}); + auto result = download_files_no_cache(console_diagnostic_context, urlAndPaths, {}); // no headers std::unordered_map url_to_fixed_sha; auto http_codes_iter = result.begin(); @@ -106,7 +107,7 @@ namespace vcpkg ++http_codes_iter; } - if (!has_sha_error) + if (!has_http_error && !has_sha_error) { msg::println(msgAllShasValid); } diff --git a/src/vcpkg/commands.z-upload-metrics.cpp b/src/vcpkg/commands.z-upload-metrics.cpp index 21dcbd1ab5..7cb5523503 100644 --- a/src/vcpkg/commands.z-upload-metrics.cpp +++ b/src/vcpkg/commands.z-upload-metrics.cpp @@ -1,9 +1,9 @@ -#include - -#if defined(_WIN32) #include +#include #include +#include +#include #include #include @@ -26,8 +26,18 @@ namespace vcpkg const auto parsed = args.parse_arguments(CommandZUploadMetricsMetadata); const auto& payload_path = parsed.command_arguments[0]; auto payload = fs.read_contents(payload_path, VCPKG_LINE_INFO); - winhttp_upload_metrics(payload); + if (!curl_upload_metrics(payload)) + { + Debug::println("Failed to upload metrics"); + Checks::exit_fail(VCPKG_LINE_INFO); + } + + std::error_code ec; + fs.remove(payload_path, ec); + if (ec) + { + Debug::println("Failed to remove file after upload: {}", ec.message()); + } Checks::exit_success(VCPKG_LINE_INFO); } } -#endif // defined(_WIN32) diff --git a/src/vcpkg/metrics.cpp b/src/vcpkg/metrics.cpp index a1d088e5a6..9e86f97540 100644 --- a/src/vcpkg/metrics.cpp +++ b/src/vcpkg/metrics.cpp @@ -1,4 +1,6 @@ #include +#include +#include #include #include #include @@ -17,6 +19,7 @@ #include #include +#include #include #include @@ -477,99 +480,6 @@ namespace vcpkg std::atomic g_should_print_metrics = false; std::atomic g_metrics_enabled = false; -#if defined(_WIN32) - void winhttp_upload_metrics(StringView payload) - { - HINTERNET connect = nullptr, request = nullptr; - BOOL results = FALSE; - - const HINTERNET session = WinHttpOpen( - L"vcpkg/1.0", WINHTTP_ACCESS_TYPE_DEFAULT_PROXY, WINHTTP_NO_PROXY_NAME, WINHTTP_NO_PROXY_BYPASS, 0); - - unsigned long secure_protocols = WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2; - if (session && WinHttpSetOption(session, WINHTTP_OPTION_SECURE_PROTOCOLS, &secure_protocols, sizeof(DWORD))) - { - connect = WinHttpConnect(session, L"dc.services.visualstudio.com", INTERNET_DEFAULT_HTTPS_PORT, 0); - } - - if (connect) - { - request = WinHttpOpenRequest(connect, - L"POST", - L"/v2/track", - nullptr, - WINHTTP_NO_REFERER, - WINHTTP_DEFAULT_ACCEPT_TYPES, - WINHTTP_FLAG_SECURE); - } - - if (request) - { - auto mutable_payload = payload.to_string(); - if (MAXDWORD <= mutable_payload.size()) abort(); - std::wstring hdrs = L"Content-Type: application/json\r\n"; - results = WinHttpSendRequest(request, - hdrs.c_str(), - static_cast(hdrs.size()), - static_cast(mutable_payload.data()), - static_cast(mutable_payload.size()), - static_cast(mutable_payload.size()), - 0); - } - - if (results) - { - results = WinHttpReceiveResponse(request, nullptr); - } - - DWORD http_code = 0, junk = sizeof(DWORD); - - if (results) - { - results = WinHttpQueryHeaders(request, - WINHTTP_QUERY_STATUS_CODE | WINHTTP_QUERY_FLAG_NUMBER, - nullptr, - &http_code, - &junk, - WINHTTP_NO_HEADER_INDEX); - } - - std::vector response_buffer; - if (results) - { - DWORD available_data = 0, read_data = 0, total_data = 0; - while ((results = WinHttpQueryDataAvailable(request, &available_data)) == TRUE && available_data > 0) - { - response_buffer.resize(response_buffer.size() + available_data); - - results = WinHttpReadData(request, &response_buffer[total_data], available_data, &read_data); - - if (!results) - { - break; - } - - total_data += read_data; - - response_buffer.resize(total_data); - } - } - - if (!results) - { -#ifndef NDEBUG - __debugbreak(); - auto err = GetLastError(); - fprintf(stderr, "[DEBUG] failed to connect to server: %08lu\n", err); -#endif // NDEBUG - } - - if (request) WinHttpCloseHandle(request); - if (connect) WinHttpCloseHandle(connect); - if (session) WinHttpCloseHandle(session); - } -#endif // ^^^ _WIN32 - void flush_global_metrics(const Filesystem& fs) { if (!g_metrics_enabled.load()) @@ -608,32 +518,99 @@ namespace vcpkg fs.write_contents(vcpkg_metrics_txt_path, payload, ec); if (ec) return; -#if defined(_WIN32) - const Path temp_folder_path_exe = temp_folder_path / "vcpkg-" VCPKG_BASE_VERSION_AS_STRING ".exe"; + const Path temp_folder_path_exe = temp_folder_path / "vcpkg-" VCPKG_BASE_VERSION_AS_STRING +#if defined(WIN32) + ".exe" +#endif + ; fs.copy_file(get_exe_path_of_current_process(), temp_folder_path_exe, CopyOptions::skip_existing, ec); if (ec) return; + Command builder; builder.string_arg(temp_folder_path_exe); builder.string_arg("z-upload-metrics"); builder.string_arg(vcpkg_metrics_txt_path); cmd_execute_background(builder); -#else - cmd_execute_background(Command("curl") - .string_arg("https://dc.services.visualstudio.com/v2/track") - .string_arg("--max-time") - .string_arg("60") - .string_arg("-H") - .string_arg("Content-Type: application/json") - .string_arg("-X") - .string_arg("POST") - .string_arg("--tlsv1.2") - .string_arg("--data") - .string_arg(Strings::concat("@", vcpkg_metrics_txt_path)) - .raw_arg(">/dev/null") - .raw_arg("2>&1") - .raw_arg(";") - .string_arg("rm") - .string_arg(vcpkg_metrics_txt_path)); -#endif + } + + static size_t string_append_cb(void* buff, size_t size, size_t nmemb, void* param) + { + auto* str = reinterpret_cast(param); + if (!str || !buff) return 0; + if (size != 1) return 0; + str->append(reinterpret_cast(buff), nmemb); + return size * nmemb; + } + + bool parse_metrics_response(StringView response_body) + { + auto maybe_json = Json::parse_object(response_body, "metrics_response"); + auto json = maybe_json.get(); + if (!json) return false; + + auto maybe_received = json->get(AppInsightsResponseItemsReceived); + auto maybe_accepted = json->get(AppInsightsResponseItemsAccepted); + auto maybe_errors = json->get(AppInsightsResponseErrors); + + if (maybe_received && maybe_accepted && maybe_errors && maybe_received->is_integer() && + maybe_accepted->is_integer() && maybe_errors->is_array()) + { + auto item_received = maybe_received->integer(VCPKG_LINE_INFO); + auto item_accepted = maybe_accepted->integer(VCPKG_LINE_INFO); + auto errors = maybe_errors->array(VCPKG_LINE_INFO); + return (errors.size() == 0) && (item_received == item_accepted); + } + Debug::println("Metrics response has unexpected format"); + return false; + } + + bool curl_upload_metrics(const std::string& payload) + { + if (payload.length() > static_cast(std::numeric_limits::max())) + { + Debug::println("Metrics payload too large to upload"); + return false; + } + + CurlEasyHandle handle; + CURL* curl = handle.get(); + + std::string headers[] = { + "Content-Type: application/json", + }; + CurlHeaders request_headers(headers); + + curl_easy_setopt(curl, CURLOPT_URL, "https://dc.services.visualstudio.com/v2/track"); + curl_easy_setopt(curl, CURLOPT_POSTFIELDS, payload.c_str()); + curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, static_cast(payload.length())); + curl_easy_setopt(curl, CURLOPT_HTTPHEADER, request_headers.get()); + curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60L); + curl_easy_setopt(curl, CURLOPT_SSLVERSION, CURL_SSLVERSION_TLSv1_2); + curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); // follow redirects + curl_easy_setopt(curl, CURLOPT_USERAGENT, vcpkg_curl_user_agent); + curl_set_system_ssl_root_certs(curl); + + std::string buff; + curl_easy_setopt(curl, CURLOPT_WRITEDATA, static_cast(&buff)); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &string_append_cb); + + long response_code = 0; + CURLcode res = curl_easy_perform(curl); + bool is_success = false; + if (res == CURLE_OK) + { + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code); + Debug::println(fmt::format("Metrics upload response code: {}", response_code)); + Debug::println("Metrics upload response body: ", buff); + if (response_code == 200) + { + is_success = parse_metrics_response(buff); + } + } + else + { + Debug::println("Metrics upload failed: ", curl_easy_strerror(res)); + } + return is_success; } }