Shortcuts

Source code for pytorch_lightning.accelerators.accelerator

# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import abstractmethod
from typing import Any, Dict, Optional, Union

import torch
from torch.nn import Module

import pytorch_lightning as pl
from pytorch_lightning.plugins.precision import PrecisionPlugin
from pytorch_lightning.plugins.training_type import TrainingTypePlugin


[docs]class Accelerator: """The Accelerator Base Class. An Accelerator is meant to deal with one type of Hardware. Currently there are accelerators for: - CPU - GPU - TPU - IPU Each Accelerator gets two plugins upon initialization: One to handle differences from the training routine and one to handle different precisions. """ def __init__(self, precision_plugin: Optional[PrecisionPlugin], training_type_plugin: TrainingTypePlugin) -> None: """ Args: precision_plugin: the plugin to handle precision-specific parts .. deprecated:: The ``precision_plugin`` parameter has been deprecated and will be removed soon. Pass the precision plugin as a parameter to the ``TrainingTypePlugin`` instead. training_type_plugin: the plugin to handle different training routines """ self.training_type_plugin = training_type_plugin if precision_plugin is not None: self.training_type_plugin._precision_plugin = precision_plugin
[docs] def setup_environment(self) -> None: """Setup any processes or distributed connections. This is called before the LightningModule/DataModule setup hook which allows the user to access the accelerator environment before setup is complete. """ self.training_type_plugin.setup_environment()
[docs] def setup(self, trainer: "pl.Trainer") -> None: """Setup plugins for the trainer fit and creates optimizers. Args: trainer: the trainer instance """ self.training_type_plugin.setup(trainer)
@property def model(self) -> Module: """Returns the model. This can also be a wrapped LightningModule. For retrieving the pure LightningModule use :attr:`Accelerator.lightning_module` """ return self.training_type_plugin.model @model.setter def model(self, new_model: Module) -> None: self.training_type_plugin.model = new_model @property def lightning_module(self) -> "pl.LightningModule": """Returns the pure LightningModule. To get the potentially wrapped model use :attr:`Accelerator.model` """ return self.training_type_plugin.lightning_module @property def root_device(self) -> torch.device: """Returns the root device.""" return self.training_type_plugin.root_device
[docs] def teardown(self) -> None: """This method is called to teardown the training process. It is the right place to release memory and free other resources. """ self.training_type_plugin.teardown()
[docs] def get_device_stats(self, device: Union[str, torch.device]) -> Dict[str, Any]: """Gets stats for a given device. Args: device: device for which to get stats Returns: Dictionary of device stats """ raise NotImplementedError
[docs] def on_train_start(self) -> None: """Called when train begins.""" return self.training_type_plugin.on_train_start()
[docs] @staticmethod @abstractmethod def auto_device_count() -> int: """Get the devices when set to auto."""

© Copyright Copyright (c) 2018-2021, William Falcon et al... Revision 46f718d2.

Built with Sphinx using a theme provided by Read the Docs.
Read the Docs v: latest
Versions
latest
stable
1.5.4
1.5.3
1.5.2
1.5.1
1.5.0
1.4.9
1.4.8
1.4.7
1.4.6
1.4.5
1.4.4
1.4.3
1.4.2
1.4.1
1.4.0
1.3.8
1.3.7
1.3.6
1.3.5
1.3.4
1.3.3
1.3.2
1.3.1
1.3.0
1.2.10
1.2.8
1.2.7
1.2.6
1.2.5
1.2.4
1.2.3
1.2.2
1.2.1
1.2.0
1.1.8
1.1.7
1.1.6
1.1.5
1.1.4
1.1.3
1.1.2
1.1.1
1.1.0
1.0.8
1.0.7
1.0.6
1.0.5
1.0.4
1.0.3
1.0.2
1.0.1
1.0.0
0.10.0
0.9.0
0.8.5
0.8.4
0.8.3
0.8.2
0.8.1
0.8.0
0.7.6
0.7.5
0.7.4
0.7.3
0.7.2
0.7.1
0.7.0
0.6.0
0.5.3
0.4.9
ipynb-update
docs-search
Downloads
html
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.