• Docs >
  • Module code >
  • pytorch_lightning.plugins.environments.torchelastic_environment
Shortcuts

Source code for pytorch_lightning.plugins.environments.torchelastic_environment

# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os

from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
from pytorch_lightning.utilities import rank_zero_deprecation, rank_zero_warn

log = logging.getLogger(__name__)


[docs]class TorchElasticEnvironment(ClusterEnvironment): """Environment for fault-tolerant and elastic training with `torchelastic <https://pytorch.org/elastic/>`_""" def __init__(self) -> None: super().__init__() # TODO: remove in 1.7 if hasattr(self, "is_using_torchelastic") and callable(self.is_using_torchelastic): rank_zero_deprecation( f"`{self.__class__.__name__}.is_using_torchelastic` has been deprecated in v1.6 and will be removed in" " v1.7. Implement the static method `detect()` instead (do not forget to add the `@staticmethod`" " decorator)." ) @property def creates_processes_externally(self) -> bool: return True @property def main_address(self) -> str: if "MASTER_ADDR" not in os.environ: rank_zero_warn("MASTER_ADDR environment variable is not defined. Set as localhost") os.environ["MASTER_ADDR"] = "127.0.0.1" log.debug(f"MASTER_ADDR: {os.environ['MASTER_ADDR']}") return os.environ["MASTER_ADDR"] @property def main_port(self) -> int: if "MASTER_PORT" not in os.environ: rank_zero_warn("MASTER_PORT environment variable is not defined. Set as 12910") os.environ["MASTER_PORT"] = "12910" log.debug(f"MASTER_PORT: {os.environ['MASTER_PORT']}") return int(os.environ["MASTER_PORT"])
[docs] @staticmethod def detect() -> bool: """Returns ``True`` if the current process was launched using the torchelastic command.""" required_env_vars = {"RANK", "GROUP_RANK", "LOCAL_RANK", "LOCAL_WORLD_SIZE"} return required_env_vars.issubset(os.environ.keys())
[docs] def world_size(self) -> int: return int(os.environ["WORLD_SIZE"])
def set_world_size(self, size: int) -> None: log.debug("TorchElasticEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.")
[docs] def global_rank(self) -> int: return int(os.environ["RANK"])
def set_global_rank(self, rank: int) -> None: log.debug( "TorchElasticEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored." )
[docs] def local_rank(self) -> int: return int(os.environ["LOCAL_RANK"])
[docs] def node_rank(self) -> int: return int(os.environ.get("GROUP_RANK", 0))

© Copyright Copyright (c) 2018-2021, William Falcon et al... Revision 46f718d2.

Built with Sphinx using a theme provided by Read the Docs.
Read the Docs v: latest
Versions
latest
stable
1.5.4
1.5.3
1.5.2
1.5.1
1.5.0
1.4.9
1.4.8
1.4.7
1.4.6
1.4.5
1.4.4
1.4.3
1.4.2
1.4.1
1.4.0
1.3.8
1.3.7
1.3.6
1.3.5
1.3.4
1.3.3
1.3.2
1.3.1
1.3.0
1.2.10
1.2.8
1.2.7
1.2.6
1.2.5
1.2.4
1.2.3
1.2.2
1.2.1
1.2.0
1.1.8
1.1.7
1.1.6
1.1.5
1.1.4
1.1.3
1.1.2
1.1.1
1.1.0
1.0.8
1.0.7
1.0.6
1.0.5
1.0.4
1.0.3
1.0.2
1.0.1
1.0.0
0.10.0
0.9.0
0.8.5
0.8.4
0.8.3
0.8.2
0.8.1
0.8.0
0.7.6
0.7.5
0.7.4
0.7.3
0.7.2
0.7.1
0.7.0
0.6.0
0.5.3
0.4.9
ipynb-update
docs-search
Downloads
html
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.