Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions torch_xla/distributed/xla_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, prefix_store, rank, size, timeout):
def getBackendName(self):
return 'xla'

# pytorch's process group is unable to retrive the group size from python level. It should
# pytorch's process group is unable to retrieve the group size from python level. It should
# already been support in C++ level: https://github.com/pytorch/pytorch/blob/7b1988f9222f3dec5cc2012afce84218199748ae/torch/csrc/distributed/c10d/ProcessGroup.cpp#L148-L152
# For now we manually set the group name property as a temporary solution.
def _set_group_name(self, name: str) -> None:
Expand Down Expand Up @@ -391,7 +391,7 @@ def new_xla_process_group(ranks=None,
else:
pg._mesh = [ranks]
else:
logging.warn(
logging.warning(
f'Can\'t infer process group mesh from given ranks "{str(ranks)}". '
'The process group will use the entire world as its collective comm group.'
)
Expand Down
2 changes: 1 addition & 1 deletion torchax/torchax/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def jax_device(self):

@property
def data(self):
logger.warn("In-place to .data modifications still results a copy on TPU")
logger.warning("In-place to .data modifications still results a copy on TPU")
return self

@data.setter
Expand Down
Loading