Skip to content

Commit fef7892

Browse files
committed
remove Flip augmentation that does not exist in the registry
1 parent 9e5de10 commit fef7892

File tree

3 files changed

+8
-8
lines changed

3 files changed

+8
-8
lines changed

luxonis_ml/data/augmentations/albumentations_engine.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -243,13 +243,12 @@ def apply_to_array(
243243

244244
def _check_augmentation_warnings(
245245
self, config_item: dict[str, Any], available_target_types: set
246-
) -> bool:
246+
) -> None:
247247
augmentation_name = config_item["name"]
248248

249249
if "keypoints" in available_target_types and augmentation_name in [
250250
"HorizontalFlip",
251251
"VerticalFlip",
252-
"Flip",
253252
]:
254253
logger.warning(
255254
f"Using '{augmentation_name}' with keypoints."
@@ -359,10 +358,11 @@ def __init__(
359358
available_target_types = set(self.targets.values())
360359

361360
for config_item in config:
361+
cfg = AlbumentationConfigItem(**config_item)
362+
362363
self._check_augmentation_warnings(
363364
config_item, available_target_types
364365
)
365-
cfg = AlbumentationConfigItem(**config_item)
366366

367367
transform = self.create_transformation(cfg)
368368

tests/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,6 @@ def augmentation_config() -> list[Params]:
125125
{"name": "MixUp", "params": {"p": 1.0}},
126126
{"name": "Defocus", "params": {"p": 1.0}},
127127
{"name": "Sharpen", "params": {"p": 1.0}},
128-
{"name": "Flip", "params": {"p": 1.0}},
129128
{"name": "RandomRotate90", "params": {"p": 1.0}},
130129
]
131130

tests/test_data/test_augmentations/test_special.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,6 @@ def test_skip_augmentations():
3939
{
4040
"name": "Perspective",
4141
},
42-
{
43-
"name": "Flip",
44-
},
4542
{
4643
"name": "HorizontalFlip",
4744
},
@@ -87,10 +84,14 @@ def test_skip_augmentations():
8784
batched_transform_names = [
8885
t.__class__.__name__ for t in augmentations.batch_transform.transforms
8986
]
90-
87+
print(spatial_transform_names)
9188
assert spatial_transform_names == [
9289
"Perspective",
9390
"Lambda",
91+
"HorizontalFlip",
92+
"Lambda",
93+
"VerticalFlip",
94+
"Lambda",
9495
"Rotate",
9596
"Lambda",
9697
]

0 commit comments

Comments
 (0)