Skip to content

Commit 1b78428

Browse files
committed
test coverage dataimportcron source pvc uptodated
1 parent 429c1d7 commit 1b78428

File tree

6 files changed

+168
-39
lines changed

6 files changed

+168
-39
lines changed

tests/conftest.py

+8
Original file line numberDiff line numberDiff line change
@@ -2902,3 +2902,11 @@ def nmstate_namespace(admin_client):
29022902
@pytest.fixture()
29032903
def ipv6_single_stack_cluster(ipv4_supported_cluster, ipv6_supported_cluster):
29042904
return ipv6_supported_cluster and not ipv4_supported_cluster
2905+
2906+
2907+
@pytest.fixture()
2908+
def data_import_cron_namespace(unprivileged_client):
2909+
yield from create_ns(
2910+
unprivileged_client=unprivileged_client,
2911+
name="data-import-cron-using-default-sc",
2912+
)

tests/infrastructure/golden_images/update_boot_source/test_ssp_data_import_crons.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
)
2222
from utilities.constants import BIND_IMMEDIATE_ANNOTATION, TIMEOUT_1MIN, TIMEOUT_2MIN, TIMEOUT_5MIN, TIMEOUT_10MIN
2323
from utilities.hco import ResourceEditorValidateHCOReconcile
24-
from utilities.infra import create_ns
2524
from utilities.ssp import (
2625
get_data_import_crons,
2726
matrix_auto_boot_data_import_cron_prefixes,
@@ -168,12 +167,12 @@ def vm_from_custom_data_import_cron(custom_data_source_scope_function, namespace
168167
yield vm
169168

170169

171-
@pytest.fixture()
172-
def data_import_cron_namespace(unprivileged_client):
173-
yield from create_ns(
174-
unprivileged_client=unprivileged_client,
175-
name="data-import-cron-using-default-sc",
176-
)
170+
# @pytest.fixture()
171+
# def data_import_cron_namespace(unprivileged_client):
172+
# yield from create_ns(
173+
# unprivileged_client=unprivileged_client,
174+
# name="data-import-cron-using-default-sc",
175+
# )
177176

178177

179178
@pytest.fixture()

tests/storage/cdi_upload/test_upload_virtctl.py

+24-26
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,8 @@
2424
ErrorMsg,
2525
check_disk_count_in_vm,
2626
check_upload_virtctl_result,
27-
create_dummy_first_consumer_pod,
2827
create_dv,
2928
get_downloaded_artifact,
30-
sc_is_hpp_with_immediate_volume_binding,
3129
sc_volume_binding_mode_is_wffc,
3230
virtctl_upload_dv,
3331
)
@@ -277,30 +275,30 @@ def test_virtctl_image_upload_with_exist_dv(download_image, namespace, storage_c
277275
check_disk_count_in_vm(vm=vm)
278276

279277

280-
@pytest.fixture()
281-
def empty_pvc(
282-
namespace,
283-
storage_class_matrix__module__,
284-
storage_class_name_scope_module,
285-
worker_node1,
286-
):
287-
with PersistentVolumeClaim(
288-
name="empty-pvc",
289-
namespace=namespace.name,
290-
storage_class=storage_class_name_scope_module,
291-
volume_mode=storage_class_matrix__module__[storage_class_name_scope_module]["volume_mode"],
292-
accessmodes=storage_class_matrix__module__[storage_class_name_scope_module]["access_mode"],
293-
size="1Gi",
294-
hostpath_node=worker_node1.name
295-
if sc_is_hpp_with_immediate_volume_binding(sc=storage_class_name_scope_module)
296-
else None,
297-
) as pvc:
298-
if sc_volume_binding_mode_is_wffc(sc=storage_class_name_scope_module):
299-
# For PVC to bind on WFFC, it must be consumed
300-
# (this was previously solved by hard coding hostpath_node at all times)
301-
create_dummy_first_consumer_pod(pvc=pvc)
302-
pvc.wait_for_status(status=PersistentVolumeClaim.Status.BOUND, timeout=60)
303-
yield pvc
278+
# @pytest.fixture()
279+
# def empty_pvc(
280+
# namespace,
281+
# storage_class_matrix__module__,
282+
# storage_class_name_scope_module,
283+
# worker_node1,
284+
# ):
285+
# with PersistentVolumeClaim(
286+
# name="empty-pvc",
287+
# namespace=namespace.name,
288+
# storage_class=storage_class_name_scope_module,
289+
# volume_mode=storage_class_matrix__module__[storage_class_name_scope_module]["volume_mode"],
290+
# accessmodes=storage_class_matrix__module__[storage_class_name_scope_module]["access_mode"],
291+
# size="1Gi",
292+
# hostpath_node=worker_node1.name
293+
# if sc_is_hpp_with_immediate_volume_binding(sc=storage_class_name_scope_module)
294+
# else None,
295+
# ) as pvc:
296+
# if sc_volume_binding_mode_is_wffc(sc=storage_class_name_scope_module):
297+
# # For PVC to bind on WFFC, it must be consumed
298+
# # (this was previously solved by hard coding hostpath_node at all times)
299+
# create_dummy_first_consumer_pod(pvc=pvc)
300+
# pvc.wait_for_status(status=PersistentVolumeClaim.Status.BOUND, timeout=60)
301+
# yield pvc
304302

305303

306304
@pytest.mark.sno

tests/storage/conftest.py

+29
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from ocp_resources.config_map import ConfigMap
1616
from ocp_resources.csi_driver import CSIDriver
1717
from ocp_resources.deployment import Deployment
18+
from ocp_resources.persistent_volume_claim import PersistentVolumeClaim
1819
from ocp_resources.resource import ResourceEditor
1920
from ocp_resources.route import Route
2021
from ocp_resources.secret import Secret
@@ -59,8 +60,10 @@
5960
)
6061
from utilities.storage import (
6162
create_cirros_dv_for_snapshot_dict,
63+
create_dummy_first_consumer_pod,
6264
data_volume,
6365
get_downloaded_artifact,
66+
sc_is_hpp_with_immediate_volume_binding,
6467
sc_volume_binding_mode_is_wffc,
6568
write_file,
6669
)
@@ -591,3 +594,29 @@ def storage_class_name_scope_module(storage_class_matrix__module__):
591594
@pytest.fixture(scope="session")
592595
def cluster_csi_drivers_names():
593596
yield [csi_driver.name for csi_driver in list(CSIDriver.get())]
597+
598+
599+
@pytest.fixture()
600+
def empty_pvc(
601+
namespace,
602+
storage_class_matrix__module__,
603+
storage_class_name_scope_module,
604+
worker_node1,
605+
):
606+
with PersistentVolumeClaim(
607+
name="empty-pvc",
608+
namespace=namespace.name,
609+
storage_class=storage_class_name_scope_module,
610+
volume_mode=storage_class_matrix__module__[storage_class_name_scope_module]["volume_mode"],
611+
accessmodes=storage_class_matrix__module__[storage_class_name_scope_module]["access_mode"],
612+
size="1Gi",
613+
hostpath_node=worker_node1.name
614+
if sc_is_hpp_with_immediate_volume_binding(sc=storage_class_name_scope_module)
615+
else None,
616+
) as pvc:
617+
if sc_volume_binding_mode_is_wffc(sc=storage_class_name_scope_module):
618+
# For PVC to bind on WFFC, it must be consumed
619+
# (this was previously solved by hard coding hostpath_node at all times)
620+
create_dummy_first_consumer_pod(pvc=pvc)
621+
pvc.wait_for_status(status=PersistentVolumeClaim.Status.BOUND, timeout=60)
622+
yield pvc

tests/storage/test_data_import_cron.py

+73
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from ocp_resources.volume_snapshot import VolumeSnapshot
1616
from timeout_sampler import TimeoutExpiredError, TimeoutSampler
1717

18+
from tests.storage.utils import wait_for_data_import_cron_ready_and_updated
1819
from utilities.constants import (
1920
BIND_IMMEDIATE_ANNOTATION,
2021
OUTDATED,
@@ -25,6 +26,8 @@
2526
Images,
2627
)
2728
from utilities.storage import (
29+
create_dummy_first_consumer_pod,
30+
sc_volume_binding_mode_is_wffc,
2831
wait_for_succeeded_dv,
2932
wait_for_volume_snapshot_ready_to_use,
3033
)
@@ -205,6 +208,63 @@ def second_object_cleanup(
205208
resource_class(namespace=namespace.name, name=second_object_name).clean_up()
206209

207210

211+
@pytest.fixture()
212+
def data_import_cron_pvc_source(
213+
unprivileged_client,
214+
data_import_cron_namespace,
215+
storage_class_with_filesystem_volume_mode,
216+
pvc_for_source_data_import_cron,
217+
):
218+
# cron_template_spec = golden_images_data_import_cron_spec.template.spec
219+
with DataImportCron(
220+
name="pvc-import-cron",
221+
namespace=data_import_cron_namespace.name,
222+
schedule="*/1 * * * *",
223+
managed_data_source="pvc-import-datasource",
224+
annotations=BIND_IMMEDIATE_ANNOTATION,
225+
template={
226+
"spec": {
227+
"source": {
228+
"pvc": {
229+
"name": pvc_for_source_data_import_cron.name,
230+
"namespace": pvc_for_source_data_import_cron.namespace,
231+
}
232+
},
233+
"sourceFormat": "pvc",
234+
"volumeMode": "Filesystem",
235+
"storage": {
236+
"storageClassName": storage_class_with_filesystem_volume_mode,
237+
"accessModes": [PersistentVolumeClaim.AccessMode.RWO],
238+
"resources": {"requests": {"storage": "10Gi"}},
239+
},
240+
}
241+
},
242+
) as data_import_cron:
243+
wait_for_data_import_cron_ready_and_updated(
244+
namespace=data_import_cron_namespace.name, name=data_import_cron.name
245+
)
246+
yield data_import_cron
247+
248+
249+
@pytest.fixture()
250+
def pvc_for_source_data_import_cron(
251+
namespace, storage_class_with_filesystem_volume_mode, storage_class_name_scope_module
252+
):
253+
with PersistentVolumeClaim(
254+
name="source-pvc-test",
255+
namespace=namespace.name,
256+
accessmodes=PersistentVolumeClaim.AccessMode.RWO,
257+
size="1Gi",
258+
storage_class=storage_class_with_filesystem_volume_mode,
259+
) as pvc:
260+
if sc_volume_binding_mode_is_wffc(sc=storage_class_name_scope_module):
261+
# For PVC to bind on WFFC, it must be consumed
262+
# (this was previously solved by hard coding hostpath_node at all times)
263+
create_dummy_first_consumer_pod(pvc=pvc)
264+
pvc.wait_for_status(status=PersistentVolumeClaim.Status.BOUND, timeout=60)
265+
yield pvc
266+
267+
208268
@pytest.mark.gating
209269
@pytest.mark.polarion("CNV-7602")
210270
def test_data_import_cron_garbage_collection(
@@ -224,3 +284,16 @@ def test_data_import_cron_garbage_collection(
224284
assert resource_class(namespace=namespace.name, name=second_object_name).exists, (
225285
f"Second {resource_class.kind} '{second_object_name}' does not exist"
226286
)
287+
288+
289+
class TestDataImportCronPvcSource:
290+
@pytest.mark.polarion("CNV-11842")
291+
def test_data_import_cron_with_pvc_source_ready(
292+
self, namespace, pvc_for_source_data_import_cron, data_import_cron_pvc_source
293+
):
294+
pvc_uid = pvc_for_source_data_import_cron.instance.metadata.uid
295+
digest_full = data_import_cron_pvc_source.instance.status.currentImports[0].Digest
296+
digest_uid = digest_full.split("uid:")[1] # Extract just the UUID part
297+
assert pvc_for_source_data_import_cron.instance.metadata.uid == digest_uid, (
298+
f"PVC UID {pvc_uid} does not match DataImportCron Digest {digest_uid}"
299+
)

tests/storage/utils.py

+28-6
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from ocp_resources.cluster_role import ClusterRole
99
from ocp_resources.config_map import ConfigMap
1010
from ocp_resources.daemonset import DaemonSet
11+
from ocp_resources.data_import_cron import DataImportCron
1112
from ocp_resources.datavolume import DataVolume
1213
from ocp_resources.hostpath_provisioner import HostPathProvisioner
1314
from ocp_resources.pod import Pod
@@ -23,12 +24,7 @@
2324
from pytest_testconfig import config as py_config
2425
from timeout_sampler import TimeoutExpiredError, TimeoutSampler
2526

26-
from utilities.constants import (
27-
CDI_UPLOADPROXY,
28-
TIMEOUT_2MIN,
29-
TIMEOUT_30MIN,
30-
Images,
31-
)
27+
from utilities.constants import CDI_UPLOADPROXY, TIMEOUT_2MIN, TIMEOUT_5SEC, TIMEOUT_30MIN, Images
3228
from utilities.hco import ResourceEditorValidateHCOReconcile
3329
from utilities.infra import (
3430
cleanup_artifactory_secret_and_config_map,
@@ -487,3 +483,29 @@ def clean_up_multiprocess(processes, object_list):
487483
print(f"Error killing process {process}, associated with {object_name}: {e}")
488484
finally:
489485
process.close()
486+
487+
488+
def wait_for_data_import_cron_ready_and_updated(namespace, name):
489+
up_to_date_status = "UpToDate"
490+
LOGGER.info(f"Wait for dataimportcron '{name}' in '{namespace}' to be '{up_to_date_status}'")
491+
try:
492+
for sample in TimeoutSampler(
493+
wait_timeout=TIMEOUT_2MIN,
494+
sleep=TIMEOUT_5SEC,
495+
func=get_data_import_cron_conditions,
496+
namespace=namespace,
497+
name=name,
498+
):
499+
if sample:
500+
return True
501+
except TimeoutExpiredError:
502+
fail_msg = f"failed to reach {up_to_date_status} status True"
503+
LOGGER.error(f"The dataimportcron {name} {fail_msg}")
504+
raise
505+
506+
507+
def get_data_import_cron_conditions(namespace, name):
508+
data_import_cron = DataImportCron(namespace=namespace, name=name)
509+
for condition in data_import_cron.instance.status.get("conditions"):
510+
if condition["type"] == "UpToDate":
511+
return condition["status"]

0 commit comments

Comments
 (0)