Shortcuts

Source code for pytorch_lightning.callbacks.model_summary

# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Model Summary
=============

Generates a summary of all layers in a :class:`~pytorch_lightning.core.module.LightningModule`.

The string representation of this summary prints a table with columns containing
the name, type and number of parameters for each layer.

"""
import logging
from typing import List, Tuple, Union

import pytorch_lightning as pl
from pytorch_lightning.callbacks.callback import Callback
from pytorch_lightning.utilities.model_summary import DeepSpeedSummary
from pytorch_lightning.utilities.model_summary import ModelSummary as Summary
from pytorch_lightning.utilities.model_summary import summarize
from pytorch_lightning.utilities.model_summary.model_summary import _format_summary_table

log = logging.getLogger(__name__)


[docs]class ModelSummary(Callback): r""" Generates a summary of all layers in a :class:`~pytorch_lightning.core.module.LightningModule`. Args: max_depth: The maximum depth of layer nesting that the summary will include. A value of 0 turns the layer summary off. Example:: >>> from pytorch_lightning import Trainer >>> from pytorch_lightning.callbacks import ModelSummary >>> trainer = Trainer(callbacks=[ModelSummary(max_depth=1)]) """ def __init__(self, max_depth: int = 1) -> None: self._max_depth: int = max_depth
[docs] def on_fit_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: if not self._max_depth: return None model_summary = self._summary(trainer, pl_module) summary_data = model_summary._get_summary_data() total_parameters = model_summary.total_parameters trainable_parameters = model_summary.trainable_parameters model_size = model_summary.model_size if trainer.is_global_zero: self.summarize(summary_data, total_parameters, trainable_parameters, model_size)
def _summary(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> Union[DeepSpeedSummary, Summary]: from pytorch_lightning.strategies.deepspeed import DeepSpeedStrategy if isinstance(trainer.strategy, DeepSpeedStrategy) and trainer.strategy.zero_stage_3: return DeepSpeedSummary(pl_module, max_depth=self._max_depth) return summarize(pl_module, max_depth=self._max_depth) @staticmethod def summarize( summary_data: List[Tuple[str, List[str]]], total_parameters: int, trainable_parameters: int, model_size: float, ) -> None: summary_table = _format_summary_table(total_parameters, trainable_parameters, model_size, *summary_data) log.info("\n" + summary_table)

© Copyright Copyright (c) 2018-2022, Lightning AI et al... Revision 92fe1887.

Built with Sphinx using a theme provided by Read the Docs.
Read the Docs v: stable
Versions
latest
stable
1.8.3post1
1.8.3.post0
1.8.3
1.8.2
1.8.1
1.8.0.post1
1.8.0
1.7.7
1.7.6
1.7.5
1.7.4
1.7.3
1.7.2
1.7.1
1.7.0
1.6.5
1.6.4
1.6.3
1.6.2
1.6.1
1.6.0
1.5.10
1.5.9
1.5.8
1.5.7
1.5.6
1.5.5
1.5.4
1.5.3
1.5.2
1.5.1
1.5.0
1.4.9
1.4.8
1.4.7
1.4.6
1.4.5
1.4.4
1.4.3
1.4.2
1.4.1
1.4.0
1.3.8
1.3.7
1.3.6
1.3.5
1.3.4
1.3.3
1.3.2
1.3.1
1.3.0
1.2.10
1.2.8
1.2.7
1.2.6
1.2.5
1.2.4
1.2.3
1.2.2
1.2.1
1.2.0
1.1.8
1.1.7
1.1.6
1.1.5
1.1.4
1.1.3
1.1.2
1.1.1
1.1.0
1.0.8
1.0.7
1.0.6
1.0.5
1.0.4
1.0.3
1.0.2
1.0.1
1.0.0
0.10.0
0.9.0
0.8.5
0.8.4
0.8.3
0.8.2
0.8.1
0.8.0
0.7.6
0.7.5
0.7.4
0.7.3
0.7.2
0.7.1
0.7.0
0.6.0
0.5.3
0.4.9
Downloads
html
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.