Shortcuts

Source code for pytorch_lightning.callbacks.model_summary

# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Model Summary
=============

Generates a summary of all layers in a :class:`~pytorch_lightning.core.lightning.LightningModule`.

The string representation of this summary prints a table with columns containing
the name, type and number of parameters for each layer.

"""
import logging
from typing import List, Tuple

import pytorch_lightning as pl
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.utilities.model_summary import _format_summary_table, summarize

log = logging.getLogger(__name__)


[docs]class ModelSummary(Callback): r""" Generates a summary of all layers in a :class:`~pytorch_lightning.core.lightning.LightningModule`. Args: max_depth: The maximum depth of layer nesting that the summary will include. A value of 0 turns the layer summary off. Example:: >>> from pytorch_lightning import Trainer >>> from pytorch_lightning.callbacks import ModelSummary >>> trainer = Trainer(callbacks=[ModelSummary(max_depth=1)]) """ def __init__(self, max_depth: int = 1) -> None: self._max_depth: int = max_depth
[docs] def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: if not self._max_depth: return None model_summary = summarize(pl_module, max_depth=self._max_depth) summary_data = model_summary._get_summary_data() total_parameters = model_summary.total_parameters trainable_parameters = model_summary.trainable_parameters model_size = model_summary.model_size if trainer.is_global_zero: self.summarize(summary_data, total_parameters, trainable_parameters, model_size)
@staticmethod def summarize( summary_data: List[Tuple[str, List[str]]], total_parameters: int, trainable_parameters: int, model_size: float, ) -> None: summary_table = _format_summary_table(total_parameters, trainable_parameters, model_size, *summary_data) log.info("\n" + summary_table)

© Copyright Copyright (c) 2018-2021, William Falcon et al... Revision 46f718d2.

Built with Sphinx using a theme provided by Read the Docs.
Read the Docs v: latest
Versions
latest
stable
1.5.4
1.5.3
1.5.2
1.5.1
1.5.0
1.4.9
1.4.8
1.4.7
1.4.6
1.4.5
1.4.4
1.4.3
1.4.2
1.4.1
1.4.0
1.3.8
1.3.7
1.3.6
1.3.5
1.3.4
1.3.3
1.3.2
1.3.1
1.3.0
1.2.10
1.2.8
1.2.7
1.2.6
1.2.5
1.2.4
1.2.3
1.2.2
1.2.1
1.2.0
1.1.8
1.1.7
1.1.6
1.1.5
1.1.4
1.1.3
1.1.2
1.1.1
1.1.0
1.0.8
1.0.7
1.0.6
1.0.5
1.0.4
1.0.3
1.0.2
1.0.1
1.0.0
0.10.0
0.9.0
0.8.5
0.8.4
0.8.3
0.8.2
0.8.1
0.8.0
0.7.6
0.7.5
0.7.4
0.7.3
0.7.2
0.7.1
0.7.0
0.6.0
0.5.3
0.4.9
ipynb-update
docs-search
Downloads
html
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.