Skip to content

Commit 04d23fb

Browse files
authored
Remove import guard in bionemo-llm (#804)
We don't need a backup apex import in bionemo-llm's datamodule, since we can be sure that import exists in our megatron version Signed-off-by: Peter St. John <[email protected]>
1 parent b1fbe57 commit 04d23fb

File tree

1 file changed

+1
-8
lines changed

1 file changed

+1
-8
lines changed

sub-packages/bionemo-llm/src/bionemo/llm/data/datamodule.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
from typing import Any, Dict, Literal
1919

2020
import lightning.pytorch as pl
21+
from megatron.core.num_microbatches_calculator import update_num_microbatches
2122
from nemo.lightning.data import WrappedDataLoader
2223
from nemo.lightning.pytorch.plugins import MegatronDataSampler
23-
from nemo.utils import logging
2424
from torch.utils.data import DataLoader, Dataset
2525

2626
from bionemo.llm.data import collate
@@ -58,13 +58,6 @@ def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
5858
state_dict: the datamodule state returned by ``state_dict``.
5959
6060
"""
61-
try:
62-
from megatron.core.num_microbatches_calculator import update_num_microbatches
63-
64-
except (ImportError, ModuleNotFoundError):
65-
logging.warning("Megatron num_microbatches_calculator not found, using Apex version.")
66-
from apex.transformer.pipeline_parallel.utils import update_num_microbatches
67-
6861
consumed_samples = state_dict["consumed_samples"]
6962
self.data_sampler.init_consumed_samples = consumed_samples
7063
self.data_sampler.prev_consumed_samples = consumed_samples

0 commit comments

Comments
 (0)