Skip to content

Commit

Permalink
remove duplicate entries in pyproject and reformat distributed file
Browse files Browse the repository at this point in the history
  • Loading branch information
jarlsondre committed Nov 28, 2024
1 parent 018cc47 commit 6895472
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 18 deletions.
6 changes: 0 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,6 @@ dev = [
"ipython",
"isort>=5.13.2",
]
macos = [
"prov4ml[apple]@git+https://github.com/matbun/ProvML"
]
linux = [
"prov4ml[linux]@git+https://github.com/matbun/ProvML"
]
docs = [
"sphinx-rtd-theme==2.0.0",
"nbsphinx==0.9.4",
Expand Down
24 changes: 12 additions & 12 deletions src/itwinai/torch/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,9 @@ def create_dataloader(
shuffle=shuffle,
)
elif not isinstance(sampler, DistributedSampler):
raise RuntimeError("User-provided sampler must implement DistributedSampler.")
raise RuntimeError(
"User-provided sampler must implement DistributedSampler."
)
# shuffle and batch_sampler must be unset
return DataLoader(
dataset=dataset,
Expand Down Expand Up @@ -409,9 +411,7 @@ def init(self) -> None:
which is already initialized.
"""
if not distributed_resources_available():
raise RuntimeError(
"Trying to run distributed on insufficient resources."
)
raise RuntimeError("Trying to run distributed on insufficient resources.")
if self.is_initialized:
raise DistributedStrategyError("Strategy was already initialized")
dist.init_process_group(backend=self.backend)
Expand Down Expand Up @@ -578,9 +578,7 @@ def init(self) -> None:

self.deepspeed = deepspeed
if not distributed_resources_available():
raise RuntimeError(
"Trying to run distributed on insufficient resources."
)
raise RuntimeError("Trying to run distributed on insufficient resources.")

if self.is_initialized:
raise DistributedStrategyError("Strategy was already initialized")
Expand Down Expand Up @@ -699,7 +697,9 @@ def gather_obj(self, obj: Any, dst_rank: int = 0) -> Optional[List[Any]]:
dist.gather_object(obj, dst=dst_rank)

@check_initialized
def gather(self, tensor: torch.Tensor, dst_rank: int = 0) -> Optional[List[torch.Tensor]]:
def gather(
self, tensor: torch.Tensor, dst_rank: int = 0
) -> Optional[List[torch.Tensor]]:
"""Gathers a tensor from the whole group in a list
(to all workers).
Expand Down Expand Up @@ -747,9 +747,7 @@ def init(self) -> None:
already initialized.
"""
if not distributed_resources_available():
raise RuntimeError(
"Trying to run distributed on insufficient resources."
)
raise RuntimeError("Trying to run distributed on insufficient resources.")
if self.is_initialized:
raise DistributedStrategyError("Strategy was already initialized")

Expand Down Expand Up @@ -880,7 +878,9 @@ def gather_obj(self, obj: Any, dst_rank: int = 0) -> Optional[list[Any]]:
return result

@check_initialized
def gather(self, tensor: torch.Tensor, dst_rank: int = 0) -> Optional[List[torch.Tensor]]:
def gather(
self, tensor: torch.Tensor, dst_rank: int = 0
) -> Optional[List[torch.Tensor]]:
"""Gathers a tensor from the whole group in a list
(to all workers). Under the hood it relies on allgather as gather is
not supported by Horovod.
Expand Down

0 comments on commit 6895472

Please sign in to comment.