diff --git a/.devcontainer/scripts/prep-container-build b/.devcontainer/scripts/prep-container-build index 80d0a9f84c3..f6bbb01248e 100755 --- a/.devcontainer/scripts/prep-container-build +++ b/.devcontainer/scripts/prep-container-build @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/env bash # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -18,9 +18,14 @@ if [ ! -f .env ]; then echo "Creating empty .env file for devcontainer." touch .env fi +# Add some info about the host OS to the .env file. +egrep -v '^HOST_OSTYPE=' .env > .env.tmp || true +echo "HOST_OSTYPE=$OSTYPE" >> .env.tmp +mv .env.tmp .env + # Also prep the random NGINX_PORT for the docker-compose command. if ! [ -e .devcontainer/.env ] || ! egrep -q "^NGINX_PORT=[0-9]+$" .devcontainer/.env; then - RANDOM=$$ + RANDOM=${RANDOM:-$$} NGINX_PORT=$((($RANDOM % 30000) + 1 + 80)) echo "NGINX_PORT=$NGINX_PORT" > .devcontainer/.env fi diff --git a/.devcontainer/scripts/run-devcontainer.ps1 b/.devcontainer/scripts/run-devcontainer.ps1 new file mode 100644 index 00000000000..2e8c490e835 --- /dev/null +++ b/.devcontainer/scripts/run-devcontainer.ps1 @@ -0,0 +1,45 @@ +#!/bin/bash +## +## Copyright (c) Microsoft Corporation. +## Licensed under the MIT License. +## + +# Quick hacky script to start a devcontainer in a non-vscode shell for testing. +# See Also: +# - ../build/build-devcontainer +# - "devcontainer open" subcommand from + +#Set-PSDebug -Trace 2 +$ErrorActionPreference = 'Stop' + +# Move to repo root. +Set-Location "$PSScriptRoot/../.." +$repo_root = (Get-Item . | Select-Object -ExpandProperty FullName) +$repo_name = (Get-Item . | Select-Object -ExpandProperty Name) +$repo_root_id = $repo_root.GetHashCode() +$container_name = "$repo_name.$repo_root_id" + +# Be sure to use the host workspace folder if available. +$workspace_root = $repo_root + +$docker_gid = 0 + +New-Item -Type Directory -ErrorAction Ignore "${env:TMP}/$container_name/dc/shellhistory" + +docker run -it --rm ` + --name "$container_name" ` + --user vscode ` + --env USER=vscode ` + --group-add $docker_gid ` + -v "${env:USERPROFILE}/.azure:/dc/azure" ` + -v "${env:TMP}/$container_name/dc/shellhistory:/dc/shellhistory" ` + -v "/var/run/docker.sock:/var/run/docker.sock" ` + -v "${workspace_root}:/workspaces/$repo_name" ` + --workdir "/workspaces/$repo_name" ` + --env CONTAINER_WORKSPACE_FOLDER="/workspaces/$repo_name" ` + --env LOCAL_WORKSPACE_FOLDER="$workspace_root" ` + --env http_proxy="${env:http_proxy:-}" ` + --env https_proxy="${env:https_proxy:-}" ` + --env no_proxy="${env:no_proxy:-}" ` + mlos-devcontainer ` + $args diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml index 1db506ce66a..c0d41a3d72c 100644 --- a/.github/workflows/devcontainer.yml +++ b/.github/workflows/devcontainer.yml @@ -26,7 +26,9 @@ concurrency: cancel-in-progress: true jobs: - DevContainer: + DevContainerLintBuildTestPublish: + name: DevContainer Lint/Build/Test/Publish + runs-on: ubuntu-latest permissions: @@ -259,9 +261,12 @@ jobs: docker tag mlos-devcontainer:latest ${{ secrets.ACR_LOGINURL }}/mlos-devcontainer:$image_tag docker push ${{ secrets.ACR_LOGINURL }}/mlos-devcontainer:$image_tag - DeployDocs: + + PublishDocs: + name: Publish Documentation + if: github.ref == 'refs/heads/main' - needs: DevContainer + needs: DevContainerLintBuildTestPublish runs-on: ubuntu-latest # Required for github-pages-deploy-action to push to the gh-pages branch. diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 7e280901875..c9ba2bdcaa0 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -19,7 +19,9 @@ concurrency: cancel-in-progress: true jobs: - Linux: + LinuxCondaBuildTest: + name: Linux Build/Test with Conda + runs-on: ubuntu-latest permissions: diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 00000000000..37bd8e39e40 --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,218 @@ +# Note: this file is based on the linux.yml + +name: MLOS MacOS + +on: + workflow_dispatch: + inputs: + tags: + description: Manual MLOS MacOS run + push: + branches: [ main ] + pull_request: + branches: [ main ] + merge_group: + types: [checks_requested] + schedule: + - cron: "1 0 * * *" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }} + cancel-in-progress: true + +jobs: + MacOSCondaBuildTest: + name: MacOS Build/Test with Conda + + runs-on: macos-latest + + permissions: + contents: read + + # Test multiple versions of python. + strategy: + fail-fast: false + matrix: + python_version: + # Empty string is the floating most recent version of python + # (useful to catch new compatibility issues in nightly builds) + - "" + # For now we only test the latest version of python on MacOS. + #- "3.8" + #- "3.9" + #- "3.10" + #- "3.11" + #- "3.12" + #- "3.13" + + env: + cache_cur_date: unset + cache_cur_hour: unset + cache_prev_hour: unset + CONDA_ENV_NAME: unset + # See notes about $CONDA below. + CONDA_DIR: unset + # When parallel jobs are used, group the output to make debugging easier. + MAKEFLAGS: -Oline + + steps: + - uses: actions/checkout@v4 + + - uses: conda-incubator/setup-miniconda@v3 + + - name: Set cache timestamp variables + id: set_cache_vars + run: | + set -x + if [ -z "${{ matrix.python_version }}" ]; then + CONDA_ENV_NAME=mlos + else + CONDA_ENV_NAME="mlos-${{ matrix.python_version }}" + fi + echo "CONDA_ENV_NAME=$CONDA_ENV_NAME" >> $GITHUB_ENV + echo "cache_cur_date=$(date -u +%Y-%m-%d)" >> $GITHUB_ENV + echo "cache_cur_hour=$(date -u +%H)" >> $GITHUB_ENV + echo "cache_prev_hour=$(date -u -d'1 hour ago' +%H)" >> $GITHUB_ENV + # $CONDA should be set by the setup-miniconda action. + # We set a separate environment variable to allow the dependabot tool + # to parse this file since it expects all env vars to be declared above. + echo "CONDA_DIR=$CONDA" >> $GITHUB_ENV + echo "PIP_CACHE_DIR=$(conda run -n base pip cache dir)" >> $GITHUB_ENV + + #- name: Restore cached conda environment + - name: Restore cached conda packages + id: restore-conda-cache + if: ${{ github.event_name != 'schedule' }} + uses: actions/cache@v4 + with: + #path: ${{ env.CONDA_DIR }}/envs/${{ env.CONDA_ENV_NAME }} + path: ${{ env.CONDA_DIR }}/pkgs + key: conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }}-${{ env.cache_cur_hour }} + restore-keys: | + conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }}-${{ env.cache_prev_hour }} + conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }} + + - name: Restore cached pip packages + id: restore-pip-cache + if: ${{ github.event_name != 'schedule' }} + uses: actions/cache@v4 + with: + path: ${{ env.PIP_CACHE_DIR }} + key: conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }}-${{ env.cache_cur_hour }} + restore-keys: | + conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }}-${{ env.cache_prev_hour }} + conda-${{ runner.os }}-${{ env.CONDA_ENV_NAME }}-${{ hashFiles('conda-envs/${{ env.CONDA_ENV_NAME }}.yml') }}-${{ hashFiles('mlos_*/pyproject.toml') }}-${{ hashFiles('mlos_*/setup.py') }}-${{ env.cache_cur_date }} + + - name: Log some environment variables for debugging + run: | + set -x + printenv + echo "cache_cur_date: $cache_cur_date" + echo "cache_cur_hour: $cache_cur_hour" + echo "cache_prev_hour: $cache_prev_hour" + echo "cache-hit: ${{ steps.restore-conda-cache.outputs.cache-hit }}" + + - name: Update and configure conda + run: | + set -x + conda config --set channel_priority strict + conda update -v -y -n base -c defaults --all + + # Try and speed up the pipeline by using a faster solver: + - name: Install and default to mamba solver + run: | + set -x + conda install -v -y -n base conda-libmamba-solver + # Try to set either of the configs for the solver. + conda config --set experimental_solver libmamba || true + conda config --set solver libmamba || true + echo "CONDA_EXPERIMENTAL_SOLVER=libmamba" >> $GITHUB_ENV + echo "EXPERIMENTAL_SOLVER=libmamba" >> $GITHUB_ENV + + - name: Create/update mlos conda environment + run: make CONDA_ENV_NAME=$CONDA_ENV_NAME CONDA_INFO_LEVEL=-v conda-env + + - name: Log conda info + run: | + conda info + conda config --show + conda config --show-sources + conda list -n $CONDA_ENV_NAME + ls -l $CONDA_DIR/envs/$CONDA_ENV_NAME/lib/python*/site-packages/ + conda run -n $CONDA_ENV_NAME pip cache dir + conda run -n $CONDA_ENV_NAME pip cache info + + - name: Verify expected version of python in conda env + if: ${{ matrix.python_version == '' }} + timeout-minutes: 2 + run: | + set -x + conda run -n mlos python -c \ + 'from sys import version_info as vers; assert (vers.major, vers.minor) == (3, 13), f"Unexpected python version: {vers}"' + + # This is moreso about code cleanliness, which is a dev thing, not a + # functionality thing, and the rules for that change between python versions, + # so only do this for the default in the devcontainer. + #- name: Run lint checks + # run: make CONDA_ENV_NAME=$CONDA_ENV_NAME check + + # Only run the coverage checks on the devcontainer job. + - name: Run tests + run: make CONDA_ENV_NAME=$CONDA_ENV_NAME SKIP_COVERAGE=true test + + - name: Generate and test binary distribution files + run: make CONDA_ENV_NAME=$CONDA_ENV_NAME CONDA_INFO_LEVEL=-v dist dist-test + + + MacOSDevContainerBuildTest: + name: MacOS DevContainer Build/Test + runs-on: macos-latest + + # Skip this for now. + # Note: no linux platform build support due to lack of nested virtualization on M series chips. + # https://github.com/orgs/community/discussions/69211#discussioncomment-7242133 + if: false + + permissions: + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Install docker + timeout-minutes: 15 + run: | + # Install the docker desktop app. + brew install --cask docker + brew install docker-buildx + brew install jq + # Make sure the cli knows where to find the buildx plugin. + mkdir -p ~/.docker + (cat ~/.docker/config.json 2>/dev/null || echo "{}") \ + | jq '.cliPluginsExtraDirs = ((.cliPluginsExtraDirs // []) + ["/opt/homebrew/lib/docker-cli-plugins"])' \ + | tee ~/.docker/config.json.new + mv ~/.docker/config.json.new ~/.docker/config.json + cat ~/.docker/config.json + # Restart docker service. + ps auxwww | grep -i docker || true + osascript -e 'quit app "Docker"' || true; open -a Docker; while [ -z "$(docker info 2> /dev/null )" ]; do printf "."; sleep 1; done; echo "" + + - name: Check docker + run: | + # Check and see if it's running. + ps auxwww | grep -i docker || true + ls -l /var/run/docker.sock + # Dump some debug info. + docker --version + docker info + docker system info || true + docker ps + DOCKER_BUILDKIT=1 docker builder ls + + - name: Build the devcontainer + run: | + .devcontainer/build/build-devcontainer.sh + + - name: Basic test of the devcontainer + run: | + .devcontainer/script/run-devcontainer.sh conda run -n mlos python --version | grep "Python 3.13" diff --git a/.github/workflows/markdown-link-check.yml b/.github/workflows/markdown-link-check.yml index 5edfe706bb0..ebf9ed24454 100644 --- a/.github/workflows/markdown-link-check.yml +++ b/.github/workflows/markdown-link-check.yml @@ -19,6 +19,7 @@ concurrency: jobs: # Check in-repo markdown links markdown-link-check: + name: Check Markdown links runs-on: ubuntu-latest permissions: contents: read diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 88f6bb15ddf..41adb19cfa9 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -20,7 +20,9 @@ concurrency: cancel-in-progress: true jobs: - Windows: + WindowsCondaBuildTest: + name: Windows Build/Test with Conda + runs-on: windows-latest permissions: @@ -123,3 +125,35 @@ jobs: - name: Generate and test binary distribution files run: | .github/workflows/build-dist-test.ps1 + + + WindowsDevContainerBuildTest: + name: Windows DevContainer Build/Test + # Skipped for now since building Linux containers on Windows Github Action Runners is not yet supported. + if: false + + runs-on: windows-latest + + defaults: + run: + shell: pwsh + + permissions: + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Check docker + run: | + docker info + docker builder ls | Select-String linux # current returns '' (not yet supported) + docker builder inspect + + - name: Build the devcontainer + run: | + .devcontainer/build/build-devcontainer.ps1 + + - name: Basic test of the devcontainer + run: | + .devcontainer/script/run-devcontainer.ps1 conda run -n mlos python --version diff --git a/Makefile b/Makefile index 672a1ce7c22..93bd4fd0be1 100644 --- a/Makefile +++ b/Makefile @@ -549,7 +549,7 @@ dist-test-env: dist build/dist-test-env.$(PYTHON_VERSION).build-stamp build/dist-test-env.$(PYTHON_VERSION).build-stamp: build/conda-env.${CONDA_ENV_NAME}.build-stamp # Use the same version of python as the one we used to build the wheels. -build/dist-test-env.$(PYTHON_VERSION).build-stamp: PYTHON_VERS_REQ=$(shell conda list -n ${CONDA_ENV_NAME} | egrep '^python\s+' | sed -r -e 's/^python\s+//' | cut -d' ' -f1 | cut -d. -f1-2) +build/dist-test-env.$(PYTHON_VERSION).build-stamp: PYTHON_VERS_REQ=$(shell conda list -n ${CONDA_ENV_NAME} | egrep '^python\s+' | sed -r -e 's/^python[ \t]+//' | cut -d' ' -f1 | cut -d. -f1-2) build/dist-test-env.$(PYTHON_VERSION).build-stamp: mlos_core/dist/tmp/mlos_core-latest-py3-none-any.whl build/dist-test-env.$(PYTHON_VERSION).build-stamp: mlos_bench/dist/tmp/mlos_bench-latest-py3-none-any.whl build/dist-test-env.$(PYTHON_VERSION).build-stamp: mlos_viz/dist/tmp/mlos_viz-latest-py3-none-any.whl diff --git a/README.md b/README.md index 31cc6da0213..2c1160dc938 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![MLOS DevContainer](https://github.com/microsoft/MLOS/actions/workflows/devcontainer.yml/badge.svg)](https://github.com/microsoft/MLOS/actions/workflows/devcontainer.yml) [![MLOS Linux](https://github.com/microsoft/MLOS/actions/workflows/linux.yml/badge.svg)](https://github.com/microsoft/MLOS/actions/workflows/linux.yml) +[![MLOS MacOS](https://github.com/microsoft/MLOS/actions/workflows/macos.yml/badge.svg)](https://github.com/microsoft/MLOS/actions/workflows/macos.yml) [![MLOS Windows](https://github.com/microsoft/MLOS/actions/workflows/windows.yml/badge.svg)](https://github.com/microsoft/MLOS/actions/workflows/windows.yml) [![Code Coverage Status](https://microsoft.github.io/MLOS/_images/coverage.svg)](https://microsoft.github.io/MLOS/htmlcov/index.html) diff --git a/mlos_bench/mlos_bench/environments/local/local_env.py b/mlos_bench/mlos_bench/environments/local/local_env.py index 989ae960398..754cdd34065 100644 --- a/mlos_bench/mlos_bench/environments/local/local_env.py +++ b/mlos_bench/mlos_bench/environments/local/local_env.py @@ -209,7 +209,7 @@ def run(self) -> Tuple[Status, datetime, Optional[Dict[str, TunableValue]]]: ) data = pandas.DataFrame([data.value.to_list()], columns=data.metric.to_list()) # Try to convert string metrics to numbers. - data = data.apply( # type: ignore[assignment] # (false positive) + data = data.apply( pandas.to_numeric, errors="coerce", ).fillna(data) diff --git a/mlos_bench/mlos_bench/services/remote/azure/azure_fileshare.py b/mlos_bench/mlos_bench/services/remote/azure/azure_fileshare.py index 6fa447da225..29a3829a136 100644 --- a/mlos_bench/mlos_bench/services/remote/azure/azure_fileshare.py +++ b/mlos_bench/mlos_bench/services/remote/azure/azure_fileshare.py @@ -110,7 +110,7 @@ def download( data = file_client.download_file() with open(local_path, "wb") as output_file: _LOG.debug("Download file: %s -> %s", remote_path, local_path) - data.readinto(output_file) # type: ignore[no-untyped-call] + data.readinto(output_file) except ResourceNotFoundError as ex: # Translate into non-Azure exception: raise FileNotFoundError(f"Cannot download: {remote_path}") from ex diff --git a/mlos_bench/mlos_bench/storage/sql/common.py b/mlos_bench/mlos_bench/storage/sql/common.py index 3b0c6c31fb0..918ed54ff2a 100644 --- a/mlos_bench/mlos_bench/storage/sql/common.py +++ b/mlos_bench/mlos_bench/storage/sql/common.py @@ -191,7 +191,7 @@ def get_results_df( columns="param", values="value", ) - configs_df = configs_df.apply( # type: ignore[assignment] # (fp) + configs_df = configs_df.apply( pandas.to_numeric, errors="coerce", ).fillna(configs_df) @@ -237,7 +237,7 @@ def get_results_df( columns="metric", values="value", ) - results_df = results_df.apply( # type: ignore[assignment] # (fp) + results_df = results_df.apply( pandas.to_numeric, errors="coerce", ).fillna(results_df) diff --git a/mlos_bench/mlos_bench/tests/conftest.py b/mlos_bench/mlos_bench/tests/conftest.py index bc0a8aa1897..2aa2138dabf 100644 --- a/mlos_bench/mlos_bench/tests/conftest.py +++ b/mlos_bench/mlos_bench/tests/conftest.py @@ -5,7 +5,8 @@ """Common fixtures for mock TunableGroups and Environment objects.""" import os -from typing import Any, Generator, List +import sys +from typing import Any, Generator, List, Union import pytest from fasteners import InterProcessLock, InterProcessReaderWriterLock @@ -58,6 +59,17 @@ def mock_env_no_noise(tunable_groups: TunableGroups) -> MockEnv: # Fixtures to configure the pytest-docker plugin. +@pytest.fixture(scope="session") +def docker_setup() -> Union[List[str], str]: + """Setup for docker services.""" + if sys.platform == "darwin" or os.environ.get("HOST_OSTYPE", "").lower().startswith("darwin"): + # Workaround an oddity on macOS where the "docker-compose up" + # command always recreates the containers. + # That leads to races when multiple workers are trying to + # start and use the same services. + return ["up --build -d --no-recreate"] + else: + return ["up --build -d"] @pytest.fixture(scope="session") diff --git a/mlos_bench/mlos_bench/tests/optimizers/mlos_core_opt_smac_test.py b/mlos_bench/mlos_bench/tests/optimizers/mlos_core_opt_smac_test.py index 23aa56e48cb..5f96b552e5b 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/mlos_core_opt_smac_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/mlos_core_opt_smac_test.py @@ -70,9 +70,11 @@ def test_init_mlos_core_smac_relative_output_directory(tunable_groups: TunableGr """Test relative path output directory initialization of mlos_core SMAC optimizer. """ + uid = os.environ.get("PYTEST_XDIST_WORKER", "") + output_dir = _OUTPUT_DIR + "." + uid test_opt_config = { "optimizer_type": "SMAC", - "output_directory": _OUTPUT_DIR, + "output_directory": output_dir, "seed": SEED, } opt = MlosCoreOptimizer(tunable_groups, test_opt_config) @@ -82,7 +84,7 @@ def test_init_mlos_core_smac_relative_output_directory(tunable_groups: TunableGr assert path_join(str(opt._opt.base_optimizer.scenario.output_directory)).startswith( path_join(os.getcwd(), str(test_opt_config["output_directory"])) ) - shutil.rmtree(_OUTPUT_DIR) + shutil.rmtree(output_dir) def test_init_mlos_core_smac_relative_output_directory_with_run_name( @@ -91,9 +93,11 @@ def test_init_mlos_core_smac_relative_output_directory_with_run_name( """Test relative path output directory initialization of mlos_core SMAC optimizer. """ + uid = os.environ.get("PYTEST_XDIST_WORKER", "") + output_dir = _OUTPUT_DIR + "." + uid test_opt_config = { "optimizer_type": "SMAC", - "output_directory": _OUTPUT_DIR, + "output_directory": output_dir, "run_name": "test_run", "seed": SEED, } @@ -106,7 +110,7 @@ def test_init_mlos_core_smac_relative_output_directory_with_run_name( os.getcwd(), str(test_opt_config["output_directory"]), str(test_opt_config["run_name"]) ) ) - shutil.rmtree(_OUTPUT_DIR) + shutil.rmtree(output_dir) def test_init_mlos_core_smac_relative_output_directory_with_experiment_id( diff --git a/mlos_bench/mlos_bench/tests/services/remote/ssh/test_ssh_service.py b/mlos_bench/mlos_bench/tests/services/remote/ssh/test_ssh_service.py index 5b335477a98..d06516b3fe3 100644 --- a/mlos_bench/mlos_bench/tests/services/remote/ssh/test_ssh_service.py +++ b/mlos_bench/mlos_bench/tests/services/remote/ssh/test_ssh_service.py @@ -132,4 +132,4 @@ def test_ssh_service_context_handler() -> None: if __name__ == "__main__": # For debugging in Windows which has issues with pytest detection in vscode. - pytest.main(["-n1", "--dist=no", "-k", "test_ssh_service_background_thread"]) + pytest.main(["-n0", "--dist=no", "-k", "test_ssh_service_context_handler"]) diff --git a/mlos_bench/mlos_bench/tests/services/remote/ssh/up.sh b/mlos_bench/mlos_bench/tests/services/remote/ssh/up.sh index 42bc984e6e5..f0f152975dc 100755 --- a/mlos_bench/mlos_bench/tests/services/remote/ssh/up.sh +++ b/mlos_bench/mlos_bench/tests/services/remote/ssh/up.sh @@ -28,3 +28,5 @@ echo "OK: private key available at '$scriptdir/id_rsa'. Connect to the ssh-serve docker compose -p "$PROJECT_NAME" port ssh-server ${PORT:-2254} | cut -d: -f2 echo "INFO: And this port for the alt-server container:" docker compose -p "$PROJECT_NAME" port alt-server ${PORT:-2254} | cut -d: -f2 +echo "INFO: And this port for the reboot-server container:" +docker compose -p "$PROJECT_NAME" port reboot-server ${PORT:-2254} | cut -d: -f2