Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Logger tests and fixes #1009

Merged
merged 11 commits into from
Mar 3, 2020
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed a bug where the model checkpointer didn't write to the same directory as the logger ([#771](https://github.com/PyTorchLightning/pytorch-lightning/pull/771))
- Fixed a bug where the `TensorBoardLogger` class would create an additional empty log file during fitting ([#777](https://github.com/PyTorchLightning/pytorch-lightning/pull/777))
- Fixed a bug where `global_step` was advanced incorrectly when using `accumulate_grad_batches > 1` ([#832](https://github.com/PyTorchLightning/pytorch-lightning/pull/832))
- Fixed a bug when calling `self.logger.experiment` with multiple loggers ([#1009](https://github.com/PyTorchLightning/pytorch-lightning/pull/1009))
- Fixed a bug when calling `logger.append_tags` on a `NeptuneLogger` with a single tag ([#1009](https://github.com/PyTorchLightning/pytorch-lightning/pull/1009))

## [0.6.0] - 2020-01-21

Expand Down
8 changes: 2 additions & 6 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def __getitem__(self, index: int) -> LightningLoggerBase:

@property
def experiment(self) -> List[Any]:
return [logger.experiment() for logger in self._logger_iterable]
return [logger.experiment for logger in self._logger_iterable]

def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
[logger.log_metrics(metrics, step) for logger in self._logger_iterable]
Expand All @@ -122,11 +122,7 @@ def finalize(self, status: str):
def close(self):
[logger.close() for logger in self._logger_iterable]

@property
def rank(self) -> int:
return self._rank

@rank.setter
@LightningLoggerBase.rank.setter
def rank(self, value: int):
self._rank = value
for logger in self._logger_iterable:
Expand Down
18 changes: 10 additions & 8 deletions pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"""
import argparse
from logging import getLogger
from typing import Optional, Union, Dict
from typing import Optional, Dict, Union

try:
from comet_ml import Experiment as CometExperiment
Expand All @@ -20,8 +20,10 @@
# For more information, see: https://www.comet.ml/docs/python-sdk/releases/#release-300
from comet_ml.papi import API
except ImportError:
raise ImportError('Missing comet_ml package.')
raise ImportError('You want to use `comet_ml` logger which is not installed yet,'
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

strange, I am pretty sure I was fixing this already...

' install it with `pip install comet-ml`.')

import torch
from torch import is_tensor

from pytorch_lightning.utilities.debugging import MisconfigurationException
Expand Down Expand Up @@ -87,11 +89,7 @@ def __init__(self, api_key: Optional[str] = None, save_dir: Optional[str] = None
self._experiment = None

# Determine online or offline mode based on which arguments were passed to CometLogger
if save_dir is not None and api_key is not None:
# If arguments are passed for both save_dir and api_key, preference is given to online mode
self.mode = "online"
self.api_key = api_key
elif api_key is not None:
if api_key is not None:
self.mode = "online"
self.api_key = api_key
elif save_dir is not None:
Expand Down Expand Up @@ -168,7 +166,11 @@ def log_hyperparams(self, params: argparse.Namespace):
self.experiment.log_parameters(vars(params))

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
def log_metrics(
self,
metrics: Dict[str, Union[torch.Tensor, float]],
step: Optional[int] = None
):
# Comet.ml expects metrics to be a dictionary of detached tensors on CPU
for key, val in metrics.items():
if is_tensor(val):
Expand Down
9 changes: 5 additions & 4 deletions pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ def any_lightning_module_function_or_hook(...):
try:
import mlflow
except ImportError:
raise ImportError('Missing mlflow package.')
raise ImportError('You want to use `mlflow` logger which is not installed yet,'
' install it with `pip install mlflow`.')

from .base import LightningLoggerBase, rank_zero_only

Expand Down Expand Up @@ -79,7 +80,7 @@ def run_id(self):
if expt:
self._expt_id = expt.experiment_id
else:
logger.warning(f"Experiment with name {self.experiment_name} not found. Creating it.")
logger.warning(f'Experiment with name {self.experiment_name} not found. Creating it.')
self._expt_id = self._mlflow_client.create_experiment(name=self.experiment_name)

run = self._mlflow_client.create_run(experiment_id=self._expt_id, tags=self.tags)
Expand All @@ -97,7 +98,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
for k, v in metrics.items():
if isinstance(v, str):
logger.warning(
ethanwharris marked this conversation as resolved.
Show resolved Hide resolved
f"Discarding metric with string value {k}={v}"
f'Discarding metric with string value {k}={v}'
)
continue
self.experiment.log_metric(self.run_id, k, v, timestamp_ms, step)
Expand All @@ -106,7 +107,7 @@ def save(self):
pass

@rank_zero_only
def finalize(self, status: str = "FINISHED"):
def finalize(self, status: str = 'FINISHED'):
if status == 'success':
status = 'FINISHED'
self.experiment.set_terminated(self.run_id, status)
Expand Down
52 changes: 27 additions & 25 deletions pytorch_lightning/loggers/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
from neptune.experiments import Experiment
except ImportError:
raise ImportError('You want to use `neptune` logger which is not installed yet,'
' please install it e.g. `pip install neptune-client`.')
' install it with `pip install neptune-client`.')

import torch
from torch import is_tensor

# from .base import LightningLoggerBase, rank_zero_only
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only

logger = getLogger(__name__)
Expand Down Expand Up @@ -130,15 +130,15 @@ def any_lightning_module_function_or_hook(...):
self._kwargs = kwargs

if offline_mode:
self.mode = "offline"
self.mode = 'offline'
neptune.init(project_qualified_name='dry-run/project',
backend=neptune.OfflineBackend())
else:
self.mode = "online"
self.mode = 'online'
neptune.init(api_token=self.api_key,
project_qualified_name=self.project_name)

logger.info(f"NeptuneLogger was initialized in {self.mode} mode")
logger.info(f'NeptuneLogger was initialized in {self.mode} mode')

@property
def experiment(self) -> Experiment:
Expand Down Expand Up @@ -166,53 +166,58 @@ def experiment(self) -> Experiment:
@rank_zero_only
def log_hyperparams(self, params: argparse.Namespace):
for key, val in vars(params).items():
self.experiment.set_property(f"param__{key}", val)
self.experiment.set_property(f'param__{key}', val)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
def log_metrics(
self,
metrics: Dict[str, Union[torch.Tensor, float]],
step: Optional[int] = None
):
"""Log metrics (numeric values) in Neptune experiments

Args:
metrics: Dictionary with metric names as keys and measured quantities as values
step: Step number at which the metrics should be recorded, must be strictly increasing
"""

for key, val in metrics.items():
if is_tensor(val):
val = val.cpu().detach()

if step is None:
self.experiment.log_metric(key, val)
else:
self.experiment.log_metric(key, x=step, y=val)
self.log_metric(key, val, step=step)

@rank_zero_only
def finalize(self, status: str):
self.experiment.stop()

@property
def name(self) -> str:
if self.mode == "offline":
return "offline-name"
if self.mode == 'offline':
return 'offline-name'
else:
return self.experiment.name

@property
def version(self) -> str:
if self.mode == "offline":
return "offline-id-1234"
if self.mode == 'offline':
return 'offline-id-1234'
else:
return self.experiment.id

@rank_zero_only
def log_metric(self, metric_name: str, metric_value: float, step: Optional[int] = None):
def log_metric(
self,
metric_name: str,
metric_value: Union[torch.Tensor, float, str],
step: Optional[int] = None
):
"""Log metrics (numeric values) in Neptune experiments

Args:
metric_name: The name of log, i.e. mse, loss, accuracy.
metric_value: The value of the log (data-point).
step: Step number at which the metrics should be recorded, must be strictly increasing
"""
if is_tensor(metric_value):
metric_value = metric_value.cpu().detach()

if step is None:
self.experiment.log_metric(metric_name, metric_value)
else:
Expand All @@ -227,10 +232,7 @@ def log_text(self, log_name: str, text: str, step: Optional[int] = None):
text: The value of the log (data-point).
step: Step number at which the metrics should be recorded, must be strictly increasing
"""
if step is None:
self.experiment.log_metric(log_name, text)
else:
self.experiment.log_metric(log_name, x=step, y=text)
self.log_metric(log_name, text, step=step)

@rank_zero_only
def log_image(self, log_name: str, image: Union[str, Any], step: Optional[int] = None):
Expand Down Expand Up @@ -277,6 +279,6 @@ def append_tags(self, tags: Union[str, Iterable[str]]):
If multiple - comma separated - str are passed, all of them are added as tags.
If list of str is passed, all elements of the list are added as tags.
"""
if not isinstance(tags, Iterable):
if str(tags) == tags:
tags = [tags] # make it as an iterable is if it is not yet
self.experiment.append_tags(*tags)
5 changes: 4 additions & 1 deletion pytorch_lightning/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,10 @@ class TensorBoardLogger(LightningLoggerBase):
"""
NAME_CSV_TAGS = 'meta_tags.csv'

def __init__(self, save_dir: str, name: str = "default", version: Optional[Union[int, str]] = None, **kwargs):
def __init__(
self, save_dir: str, name: Optional[str] = "default",
version: Optional[Union[int, str]] = None, **kwargs
):
super().__init__()
self.save_dir = save_dir
self._name = name
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/loggers/test_tube.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
try:
from test_tube import Experiment
except ImportError:
raise ImportError('Missing test-tube package.')
raise ImportError('You want to use `test_tube` logger which is not installed yet,'
' install it with `pip install test-tube`.')

from .base import LightningLoggerBase, rank_zero_only

Expand Down
20 changes: 10 additions & 10 deletions pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@
import os
from typing import Optional, List, Dict

import torch.nn as nn

try:
import wandb
from wandb.wandb_run import Run
except ImportError:
raise ImportError('You want to use `wandb` logger which is not installed yet,'
' please install it e.g. `pip install wandb`.')
' install it with `pip install wandb`.')

from .base import LightningLoggerBase, rank_zero_only

Expand Down Expand Up @@ -50,7 +52,7 @@ def __init__(self, name: Optional[str] = None, save_dir: Optional[str] = None,
super().__init__()
self._name = name
self._save_dir = save_dir
self._anonymous = "allow" if anonymous else None
self._anonymous = 'allow' if anonymous else None
self._id = version or id
self._tags = tags
self._project = project
Expand Down Expand Up @@ -79,27 +81,25 @@ def experiment(self) -> Run:
"""
if self._experiment is None:
if self._offline:
os.environ["WANDB_MODE"] = "dryrun"
os.environ['WANDB_MODE'] = 'dryrun'
self._experiment = wandb.init(
name=self._name, dir=self._save_dir, project=self._project, anonymous=self._anonymous,
id=self._id, resume="allow", tags=self._tags, entity=self._entity)
id=self._id, resume='allow', tags=self._tags, entity=self._entity)
return self._experiment

def watch(self, model, log="gradients", log_freq=100):
wandb.watch(model, log, log_freq)
def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100):
wandb.watch(model, log=log, log_freq=log_freq)

@rank_zero_only
def log_hyperparams(self, params: argparse.Namespace):
self.experiment.config.update(params)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
metrics["global_step"] = step
if step is not None:
metrics['global_step'] = step
self.experiment.log(metrics)

def save(self):
pass

@rank_zero_only
def finalize(self, status: str = 'success'):
try:
Expand Down
Empty file added tests/loggers/__init__.py
Empty file.
Loading