Skip to content

Commit

Permalink
apply Black 2024 style in fbcode (4/16)
Browse files Browse the repository at this point in the history
Differential Revision: D54447727

Pull Request resolved: #842
  • Loading branch information
yikaiMeta committed Mar 4, 2024
1 parent b1e4db5 commit 433718c
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 28 deletions.
3 changes: 1 addition & 2 deletions torchx/pipelines/kfp/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ class ContainerFactory(Protocol):
kfp.dsl.ContainerOp.
"""

def __call__(self, *args: object, **kwargs: object) -> dsl.ContainerOp:
...
def __call__(self, *args: object, **kwargs: object) -> dsl.ContainerOp: ...


class KFPContainerFactory(ContainerFactory, Protocol):
Expand Down
2 changes: 1 addition & 1 deletion torchx/runner/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -654,7 +654,7 @@ def _scheduler(self, scheduler: str) -> Scheduler:
def _scheduler_app_id(
self,
app_handle: AppHandle,
check_session: bool = True
check_session: bool = True,
# pyre-fixme[24]: SchedulerOpts is a generic, and we don't have access to the corresponding type
) -> Tuple[Scheduler, str, str]:
"""
Expand Down
3 changes: 1 addition & 2 deletions torchx/schedulers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@

class SchedulerFactory(Protocol):
# pyre-fixme: Scheduler opts
def __call__(self, session_name: str, **kwargs: object) -> Scheduler:
...
def __call__(self, session_name: str, **kwargs: object) -> Scheduler: ...


def _defer_load_scheduler(path: str) -> SchedulerFactory:
Expand Down
6 changes: 3 additions & 3 deletions torchx/schedulers/docker_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,9 +292,9 @@ def _submit_dryrun(self, app: AppDef, cfg: DockerOpts) -> AppDryRunInfo[DockerJo
if resource.memMB >= 0:
# To support PyTorch dataloaders we need to set /dev/shm to
# larger than the 64M default.
c.kwargs["mem_limit"] = c.kwargs[
"shm_size"
] = f"{int(resource.memMB)}m"
c.kwargs["mem_limit"] = c.kwargs["shm_size"] = (
f"{int(resource.memMB)}m"
)
if resource.cpu >= 0:
c.kwargs["nano_cpus"] = int(resource.cpu * 1e9)
if resource.gpu > 0:
Expand Down
15 changes: 5 additions & 10 deletions torchx/schedulers/local_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,21 +271,16 @@ class PopenProtocol(Protocol):
"""

@property
def pid(self) -> int:
...
def pid(self) -> int: ...

@property
def returncode(self) -> int:
...
def returncode(self) -> int: ...

def wait(self, timeout: Optional[float] = None) -> int:
...
def wait(self, timeout: Optional[float] = None) -> int: ...

def poll(self) -> Optional[int]:
...
def poll(self) -> Optional[int]: ...

def kill(self) -> None:
...
def kill(self) -> None: ...


@dataclass
Expand Down
12 changes: 6 additions & 6 deletions torchx/schedulers/ray/ray_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,12 @@ def __init__(self, replicas: List[RayActor]) -> None:
else:
self.min_replicas = replicas[0].min_replicas # pyre-ignore[8]

self.placement_groups: List[
PlacementGroup
] = [] # all the placement groups, shall never change
self.actor_info_of_id: Dict[
str, ActorInfo
] = {} # store the info used to recover an actor
self.placement_groups: List[PlacementGroup] = (
[]
) # all the placement groups, shall never change
self.actor_info_of_id: Dict[str, ActorInfo] = (
{}
) # store the info used to recover an actor
self.active_tasks: List["ray.ObjectRef"] = [] # list of active tasks

self.terminating: bool = False # if the job has finished and being terminated
Expand Down
6 changes: 2 additions & 4 deletions torchx/tracker/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,7 @@ class AppRunTrackableSource:
artifact_name: Optional[str]


class Lineage:
...
class Lineage: ...


class TrackerBase(ABC):
Expand Down Expand Up @@ -332,5 +331,4 @@ def sources(self) -> Iterable[AppRunTrackableSource]:

return model_run_sources

def children(self) -> Iterable[AppRun]:
...
def children(self) -> Iterable[AppRun]: ...

0 comments on commit 433718c

Please sign in to comment.