Module livelossplot.outputs.tensorboard_logger

Expand source code
from datetime import datetime
from os import path
from typing import Optional

from livelossplot.main_logger import MainLogger
from livelossplot.outputs.base_output import BaseOutput


class TensorboardLogger(BaseOutput):
    """
    Class write logs to TensorBoard (using pure TensorBoard, not one from TensorFlow).
    """
    def __init__(self, logdir: str = "./tensorboard_logs/", run_id: Optional[str] = None):
        """
        Args:
            logdir: dir where TensorBoard events will be written
            run_id: name for log id, otherwise it usses datetime
        """
        from tensorboard import summary
        self.summary = summary
        run_id = datetime.now().isoformat()[:-7].replace("T", " ").replace(":", "_") if run_id is None else run_id
        self._path = path.join(logdir, run_id)
        self.writer = summary.create_file_writer(self._path)

    def close(self):
        """Close tensorboard writer"""
        self.writer.close()

    def log_scalar(self, name: str, value: float, global_step: float):
        """
        Args:
            name: name of metric
            value: float value of metric
            global_step: current step of the training loop
        """
        with self.writer.as_default():
            self.summary.scalar(name, value, step=global_step)
        self.writer.flush()

    def send(self, logger: MainLogger):
        """Take log history from logger and store it in tensorboard event"""
        for name, log_items in logger.log_history.items():
            last_log_item = log_items[-1]
            self.log_scalar(name, last_log_item.value, last_log_item.step)

Classes

class TensorboardLogger (logdir: str = './tensorboard_logs/', run_id: Optional[str] = None)

Class write logs to TensorBoard (using pure TensorBoard, not one from TensorFlow).

Args

logdir
dir where TensorBoard events will be written
run_id
name for log id, otherwise it usses datetime
Expand source code
class TensorboardLogger(BaseOutput):
    """
    Class write logs to TensorBoard (using pure TensorBoard, not one from TensorFlow).
    """
    def __init__(self, logdir: str = "./tensorboard_logs/", run_id: Optional[str] = None):
        """
        Args:
            logdir: dir where TensorBoard events will be written
            run_id: name for log id, otherwise it usses datetime
        """
        from tensorboard import summary
        self.summary = summary
        run_id = datetime.now().isoformat()[:-7].replace("T", " ").replace(":", "_") if run_id is None else run_id
        self._path = path.join(logdir, run_id)
        self.writer = summary.create_file_writer(self._path)

    def close(self):
        """Close tensorboard writer"""
        self.writer.close()

    def log_scalar(self, name: str, value: float, global_step: float):
        """
        Args:
            name: name of metric
            value: float value of metric
            global_step: current step of the training loop
        """
        with self.writer.as_default():
            self.summary.scalar(name, value, step=global_step)
        self.writer.flush()

    def send(self, logger: MainLogger):
        """Take log history from logger and store it in tensorboard event"""
        for name, log_items in logger.log_history.items():
            last_log_item = log_items[-1]
            self.log_scalar(name, last_log_item.value, last_log_item.step)

Ancestors

Methods

def close(self)

Close tensorboard writer

Expand source code
def close(self):
    """Close tensorboard writer"""
    self.writer.close()
def log_scalar(self, name: str, value: float, global_step: float)

Args

name
name of metric
value
float value of metric
global_step
current step of the training loop
Expand source code
def log_scalar(self, name: str, value: float, global_step: float):
    """
    Args:
        name: name of metric
        value: float value of metric
        global_step: current step of the training loop
    """
    with self.writer.as_default():
        self.summary.scalar(name, value, step=global_step)
    self.writer.flush()
def send(self, logger: MainLogger)

Take log history from logger and store it in tensorboard event

Expand source code
def send(self, logger: MainLogger):
    """Take log history from logger and store it in tensorboard event"""
    for name, log_items in logger.log_history.items():
        last_log_item = log_items[-1]
        self.log_scalar(name, last_log_item.value, last_log_item.step)

Inherited members