Shortcuts

Source code for pytorch_lightning.plugins.precision.hpu

# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional, Union

from lightning_lite.utilities.enums import PrecisionType
from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _HPU_AVAILABLE

if _HPU_AVAILABLE:
    from habana_frameworks.torch.hpex import hmp


[docs]class HPUPrecisionPlugin(PrecisionPlugin): """Plugin that enables bfloat/half support on HPUs. Args: precision: The precision to use. opt_level: Choose optimization level for hmp. bf16_file_path: Path to bf16 ops list in hmp O1 mode. fp32_file_path: Path to fp32 ops list in hmp O1 mode. verbose: Enable verbose mode for hmp. """ def __init__( self, precision: Union[str, int], opt_level: str = "O2", bf16_file_path: Optional[str] = None, fp32_file_path: Optional[str] = None, verbose: bool = False, ) -> None: if not _HPU_AVAILABLE: raise MisconfigurationException("HPU precision plugin requires HPU devices.") supported_precision_values = (16, 32, "bf16") if precision not in supported_precision_values: raise ValueError( f"`Trainer(accelerator='hpu', precision={precision!r})` is not supported." f" `precision` must be one of: {supported_precision_values}." ) super().__init__() self.precision = precision if precision in (PrecisionType.HALF, PrecisionType.BFLOAT): hmp.convert( opt_level=opt_level, bf16_file_path=bf16_file_path, fp32_file_path=fp32_file_path, isVerbose=verbose )

© Copyright Copyright (c) 2018-2022, Lightning AI et al... Revision f4fcad36.

Built with Sphinx using a theme provided by Read the Docs.
Read the Docs v: latest
Versions
latest
stable
1.8.3post1
1.8.3.post0
1.8.3
1.8.2
1.8.1
1.8.0.post1
1.8.0
1.7.7
1.7.6
1.7.5
1.7.4
1.7.3
1.7.2
1.7.1
1.7.0
1.6.5
1.6.4
1.6.3
1.6.2
1.6.1
1.6.0
1.5.10
1.5.9
1.5.8
1.5.7
1.5.6
1.5.5
1.5.4
1.5.3
1.5.2
1.5.1
1.5.0
1.4.9
1.4.8
1.4.7
1.4.6
1.4.5
1.4.4
1.4.3
1.4.2
1.4.1
1.4.0
1.3.8
1.3.7
1.3.6
1.3.5
1.3.4
1.3.3
1.3.2
1.3.1
1.3.0
1.2.10
1.2.8
1.2.7
1.2.6
1.2.5
1.2.4
1.2.3
1.2.2
1.2.1
1.2.0
1.1.8
1.1.7
1.1.6
1.1.5
1.1.4
1.1.3
1.1.2
1.1.1
1.1.0
1.0.8
1.0.7
1.0.6
1.0.5
1.0.4
1.0.3
1.0.2
1.0.1
1.0.0
0.10.0
0.9.0
0.8.5
0.8.4
0.8.3
0.8.2
0.8.1
0.8.0
0.7.6
0.7.5
0.7.4
0.7.3
0.7.2
0.7.1
0.7.0
0.6.0
0.5.3
0.4.9
Downloads
html
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.