From ae049c4c70087897ae8b9171e75f9b4c15db0978 Mon Sep 17 00:00:00 2001 From: Kunal Bhalla Date: Mon, 24 Jul 2023 20:50:11 -0700 Subject: [PATCH] Spellings (#742) Summary: Pull Request resolved: https://github.com/pytorch/torchx/pull/742 TSIA Reviewed By: manav-a Differential Revision: D47722348 fbshipit-source-id: d98991229a3677276726448ab86e980f33e84bbc --- torchx/schedulers/api.py | 4 ++-- torchx/schedulers/local_scheduler.py | 6 +++--- torchx/specs/api.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/torchx/schedulers/api.py b/torchx/schedulers/api.py index 450ee7c68..52f2a6f16 100644 --- a/torchx/schedulers/api.py +++ b/torchx/schedulers/api.py @@ -97,7 +97,7 @@ def __hash__(self) -> int: class Scheduler(abc.ABC, Generic[T]): """ An interface abstracting functionalities of a scheduler. - Implementors need only implement those methods annotated with + Implementers need only implement those methods annotated with ``@abc.abstractmethod``. """ @@ -148,7 +148,7 @@ def submit( def schedule(self, dryrun_info: AppDryRunInfo) -> str: """ Same as ``submit`` except that it takes an ``AppDryRunInfo``. - Implementors are encouraged to implement this method rather than + Implementers are encouraged to implement this method rather than directly implementing ``submit`` since ``submit`` can be trivially implemented by: diff --git a/torchx/schedulers/local_scheduler.py b/torchx/schedulers/local_scheduler.py index fcc60a508..dab781116 100644 --- a/torchx/schedulers/local_scheduler.py +++ b/torchx/schedulers/local_scheduler.py @@ -99,7 +99,7 @@ class ReplicaParam: class ImageProvider(abc.ABC): """ - Manages downloading and setting up an on localhost. This is only needed for + Manages downloading and setting up an image on localhost. This is only needed for ``LocalhostScheduler`` since typically real schedulers will do this on-behalf of the user. """ @@ -766,7 +766,7 @@ def _submit_dryrun( def _cuda_device_count(self) -> int: # this method deliberately does not use ``torch.cuda.device_count()`` # to avoid taking a dependency on pytorch - # this make sit possible to avoid a BUCK dependency (internally at Meta) + # this makes it possible to avoid a BUCK dependency (internally at Meta) # on //caffe2:torch which slows down builds of //torchx:* rules gpu_cmd = "nvidia-smi -L" try: @@ -832,7 +832,7 @@ def auto_set_CUDA_VISIBLE_DEVICES( """\n ====================================================================== Running multiple role replicas that require GPUs without -setting `CUDA_VISIBLE_DEVICES` may result in multiple +setting `CUDA_VISIBLE_DEVICES` may result in multiple processes using the same GPU device with undesired consequences such as CUDA OutOfMemory errors. diff --git a/torchx/specs/api.py b/torchx/specs/api.py index 247084251..779e1b8de 100644 --- a/torchx/specs/api.py +++ b/torchx/specs/api.py @@ -366,7 +366,7 @@ class AppDef: Args: name: Name of application roles: List of roles - metadata: metadata to the app (treament of metadata is scheduler dependent) + metadata: metadata to the app (treatment of metadata is scheduler dependent) """ name: str