Skip to content

Commit 90bbb0e

Browse files
committed
chore: Add MinIO support for S3 snapshot tests
1 parent 484fefc commit 90bbb0e

File tree

7 files changed

+168
-0
lines changed

7 files changed

+168
-0
lines changed

.github/actions/regression-tests/action.yml

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,11 @@ inputs:
3737
default: ""
3838
required: false
3939
type: string
40+
with-s3:
41+
required: false
42+
type: string
43+
default: 'false'
44+
description: "Whether the build has S3 (AWS) support"
4045

4146
runs:
4247
using: "composite"
@@ -74,6 +79,32 @@ runs:
7479
echo "===================After freeing up space ============================================"
7580
df -h
7681
82+
- name: Run S3 snapshot tests with MinIO
83+
if: inputs.with-s3 == 'true'
84+
shell: bash
85+
run: |
86+
cd ${GITHUB_WORKSPACE}/tests
87+
pip3 install -r dragonfly/requirements.txt
88+
89+
export DRAGONFLY_PATH="${GITHUB_WORKSPACE}/${{inputs.build-folder-name}}/${{inputs.dfly-executable}}"
90+
91+
# Download MinIO binary (atomic: download to .tmp, then rename)
92+
ARCH=$(uname -m)
93+
case "$ARCH" in
94+
x86_64) ARCH="amd64" ;;
95+
aarch64) ARCH="arm64" ;;
96+
*) echo "Unsupported MinIO architecture: $ARCH"; exit 1 ;;
97+
esac
98+
MINIO_DIR="$HOME/.cache/dragonfly-tests"
99+
mkdir -p "$MINIO_DIR"
100+
if [ ! -f "$MINIO_DIR/minio" ]; then
101+
curl -fsSL "https://dl.min.io/server/minio/release/linux-${ARCH}/minio" -o "$MINIO_DIR/minio.tmp"
102+
chmod +x "$MINIO_DIR/minio.tmp"
103+
mv "$MINIO_DIR/minio.tmp" "$MINIO_DIR/minio"
104+
fi
105+
106+
S3_ENDPOINT=http://localhost:9000 timeout 10m pytest -k "s3" --timeout=300 --color=yes dragonfly/snapshot_test.py --log-cli-level=INFO -v
107+
77108
- name: Run PyTests
78109
id: main
79110
shell: bash

.github/workflows/epoll-regression-tests.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ jobs:
6464
build-folder-name: build
6565
filter: ${{ matrix.build-type == 'Release' && 'not empty' || 'not opt_only' }}
6666
s3-bucket: ${{ secrets.S3_REGTEST_BUCKET }}
67+
with-s3: 'true'
6768
# Chain ternary oprator of the form (which can be nested)
6869
# (expression == condition && <true expression> || <false expression>)
6970
epoll: ${{ matrix.proactor == 'Epoll' && 'epoll' || 'iouring' }}

.github/workflows/ioloop-v2-regtests.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ jobs:
5656
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY }}
5757
aws-secret-access-key: ${{ secrets.AWS_S3_ACCESS_SECRET }}
5858
s3-bucket: ${{ secrets.S3_REGTEST_BUCKET }}
59+
with-s3: 'true'
5960
df-arg: "experimental_io_loop_v2"
6061

6162
- name: Upload logs on failure

.github/workflows/regression-tests.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ jobs:
6464
build-folder-name: build
6565
filter: ${{ matrix.build-type == 'Release' && 'not debug_only' || 'not opt_only' }}
6666
s3-bucket: ${{ secrets.S3_REGTEST_BUCKET }}
67+
with-s3: 'true'
6768

6869
- name: Upload logs on failure
6970
if: failure()

.github/workflows/release.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ jobs:
111111
gspace-secret: ${{ secrets.GSPACES_BOT_DF_BUILD }}
112112
build-folder-name: ${{ env.RELEASE_DIR }}
113113
filter: 'not debug_only'
114+
with-s3: 'true'
114115
- name: Save artifacts
115116
run: |
116117
# place all artifacts at the same location

tests/dragonfly/conftest.py

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,14 +35,131 @@
3535
LAST_LOGS = "/tmp/last_test_log_dir.txt"
3636

3737

38+
def _download_minio_binary(dest: Path):
39+
"""Download MinIO binary to dest if not already cached.
40+
41+
Downloads to a temporary file first, then renames atomically to avoid
42+
leaving a corrupt binary on interrupted downloads.
43+
"""
44+
import platform
45+
import urllib.request
46+
47+
system = platform.system().lower()
48+
arch = platform.machine()
49+
arch_map = {"x86_64": "amd64", "aarch64": "arm64", "arm64": "arm64"}
50+
arch = arch_map.get(arch, arch)
51+
url = f"https://dl.min.io/server/minio/release/{system}-{arch}/minio"
52+
logging.info(f"Downloading MinIO binary from {url}")
53+
tmp_dest = dest.with_suffix(".tmp")
54+
try:
55+
urllib.request.urlretrieve(url, tmp_dest)
56+
tmp_dest.chmod(0o755)
57+
tmp_dest.rename(dest)
58+
except Exception:
59+
tmp_dest.unlink(missing_ok=True)
60+
raise
61+
62+
63+
def _start_minio_server(endpoint):
64+
"""Start MinIO subprocess and configure env vars for S3 tests."""
65+
import boto3
66+
from urllib.parse import urlparse
67+
68+
cache_dir = Path.home() / ".cache" / "dragonfly-tests"
69+
cache_dir.mkdir(parents=True, exist_ok=True)
70+
minio_bin = cache_dir / "minio"
71+
72+
if not minio_bin.exists():
73+
_download_minio_binary(minio_bin)
74+
75+
parsed = urlparse(endpoint)
76+
address = f":{parsed.port or 9000}"
77+
78+
data_dir = Path(mkdtemp(prefix="minio_data_"))
79+
minio_log = data_dir / "minio.log"
80+
log_file = open(minio_log, "w")
81+
proc = subprocess.Popen(
82+
[str(minio_bin), "server", str(data_dir), "--address", address],
83+
env={**os.environ, "MINIO_ROOT_USER": "minioadmin", "MINIO_ROOT_PASSWORD": "minioadmin"},
84+
stdout=log_file,
85+
stderr=subprocess.STDOUT,
86+
)
87+
88+
bucket = "dragonfly-test"
89+
s3 = boto3.client(
90+
"s3",
91+
endpoint_url=endpoint,
92+
aws_access_key_id="minioadmin",
93+
aws_secret_access_key="minioadmin",
94+
region_name="us-east-1",
95+
)
96+
97+
for attempt in range(30):
98+
try:
99+
s3.create_bucket(Bucket=bucket)
100+
break
101+
except Exception:
102+
if proc.poll() is not None:
103+
log_file.close()
104+
logs = minio_log.read_text()
105+
shutil.rmtree(data_dir, ignore_errors=True)
106+
raise RuntimeError(
107+
f"MinIO process exited with code {proc.returncode}.\nLogs:\n{logs}"
108+
)
109+
time.sleep(1)
110+
else:
111+
proc.terminate()
112+
log_file.close()
113+
logs = minio_log.read_text()
114+
shutil.rmtree(data_dir, ignore_errors=True)
115+
raise RuntimeError(f"MinIO did not become ready in time.\nLogs:\n{logs}")
116+
117+
log_file.close()
118+
os.environ["DRAGONFLY_S3_BUCKET"] = bucket
119+
os.environ["AWS_ACCESS_KEY_ID"] = "minioadmin"
120+
os.environ["AWS_SECRET_ACCESS_KEY"] = "minioadmin"
121+
os.environ["AWS_ENDPOINT_URL"] = endpoint
122+
123+
return proc, data_dir
124+
125+
126+
_minio_proc = None
127+
_minio_data_dir = None
128+
129+
38130
# runs on pytest start
39131
def pytest_configure(config):
132+
global _minio_proc, _minio_data_dir
133+
40134
# clean everything
41135
if os.path.exists(FAILED_PATH):
42136
shutil.rmtree(FAILED_PATH)
43137
if os.path.exists(BASE_LOG_DIR):
44138
shutil.rmtree(BASE_LOG_DIR)
45139

140+
# Start MinIO if S3_ENDPOINT is set (must happen before test collection
141+
# so that @pytest.mark.skipif checking DRAGONFLY_S3_BUCKET sees it)
142+
endpoint = os.environ.get("S3_ENDPOINT")
143+
if endpoint:
144+
_minio_proc, _minio_data_dir = _start_minio_server(endpoint)
145+
146+
147+
def pytest_unconfigure(config):
148+
global _minio_proc, _minio_data_dir
149+
150+
if _minio_proc is not None:
151+
_minio_proc.terminate()
152+
try:
153+
_minio_proc.wait(timeout=10)
154+
except subprocess.TimeoutExpired:
155+
_minio_proc.kill()
156+
_minio_proc.wait()
157+
_minio_proc = None
158+
159+
if _minio_data_dir is not None:
160+
shutil.rmtree(_minio_data_dir, ignore_errors=True)
161+
_minio_data_dir = None
162+
46163

47164
@pytest.fixture(scope="class")
48165
def df_log_dir(request):

tests/dragonfly/instance.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -439,6 +439,22 @@ def create(self, existing_port=None, path=None, version=100, **kwargs) -> DflyIn
439439
if version >= 1.26:
440440
args.setdefault("fiber_safety_margin=4096")
441441

442+
# When a custom S3 endpoint is configured (e.g. MinIO), pass it to Dragonfly
443+
s3_endpoint = os.environ.get("S3_ENDPOINT")
444+
if s3_endpoint:
445+
from urllib.parse import urlparse
446+
447+
# Normalize scheme-less values (e.g. "localhost:9000") so urlparse
448+
# correctly populates hostname/port instead of treating it as a path.
449+
to_parse = s3_endpoint if "://" in s3_endpoint else "http://" + s3_endpoint
450+
parsed = urlparse(to_parse)
451+
endpoint_host = parsed.hostname or ""
452+
if parsed.port:
453+
endpoint_host = f"{endpoint_host}:{parsed.port}"
454+
if endpoint_host:
455+
args.setdefault("s3_endpoint", endpoint_host)
456+
args.setdefault("s3_use_https", "false" if parsed.scheme == "http" else "true")
457+
442458
for k, v in args.items():
443459
args[k] = v.format(**self.params.env) if isinstance(v, str) else v
444460

0 commit comments

Comments
 (0)