Skip to content

Commit

Permalink
Merge branch 'master' into optimizer_logging_2
Browse files Browse the repository at this point in the history
  • Loading branch information
tchaton authored Dec 6, 2020
2 parents 8820e52 + 9b1afa8 commit 2bc49bf
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 1 deletion.
63 changes: 63 additions & 0 deletions .drone.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
Copyright The PyTorch Lightning team.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/

// https://github.com/drone/drone-jsonnet-config/blob/master/.drone.jsonnet

local pipeline(name, image) = {
kind: "pipeline",
type: "docker",
name: name,
steps: [
{
name: "testing",
image: image,
environment: {
"CODECOV_TOKEN": {
from_secret: "codecov_token"
},
"MKL_THREADING_LAYER": "GNU",
},
commands: [
"python --version",
"pip --version",
"nvidia-smi",
"pip install -r ./requirements/devel.txt --upgrade-strategy only-if-needed -v --no-cache-dir",
"pip list",
"coverage run --source pytorch_lightning -m pytest pytorch_lightning tests -v -ra --color=yes --durations=25",
"python -m pytest benchmarks pl_examples -v -ra --color=yes --maxfail=2 --durations=0",
"coverage report",
"codecov --token $CODECOV_TOKEN --flags=gpu,pytest --name='GPU-coverage' --env=linux --build $DRONE_BUILD_NUMBER --commit $DRONE_COMMIT",
"python tests/collect_env_details.py"
],
},
],
trigger: {
branch: [
"master",
"release/*"
],
event: [
"push",
"pull_request"
]
},
depends_on: if name == "torch-GPU-nightly" then ["torch-GPU"]
};

[
pipeline("torch-GPU", "pytorchlightning/pytorch_lightning:base-cuda-py3.7-torch1.6"),
pipeline("torch-GPU-nightly", "pytorchlightning/pytorch_lightning:base-cuda-py3.7-torch1.7"),
]
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ include pyproject.toml
# Exclude build configs
exclude *.yml
exclude *.yaml
exclude *.jsonnet

# Exclude pyright config
exclude .pyrightconfig.json
Expand Down
8 changes: 8 additions & 0 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,11 @@ def nop(*args, **kw):
def __getattr__(self, _):
return self.nop

def __getitem__(self, idx):
# enables self.logger[0].experiment.add_image
# and self.logger.experiment[0].add_image(...)
return self


class DummyLogger(LightningLoggerBase):
""" Dummy logger for internal use. Is usefull if we want to disable users
Expand Down Expand Up @@ -437,6 +442,9 @@ def name(self):
def version(self):
pass

def __getitem__(self, idx):
return self


def merge_dicts(
dicts: Sequence[Mapping],
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/plugins/plugin_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _convert_str_to_plugin(self, plugin):
f" {plugin} is not a supported lightning custom plugin."
" If you're trying to pass a custom plugin, please pass this as an object to"
" Trainer(plugins=[MyPlugin()]."
f" Supported plugins as string input: {(e.name for e in LightningCustomPlugins)}."
f" Supported plugins as string input: {[e.name for e in LightningCustomPlugins]}."
)
plugin_cls = LightningCustomPlugins[plugin].value
return plugin_cls(trainer=self.trainer)
Expand Down
11 changes: 11 additions & 0 deletions tests/loggers/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

from pytorch_lightning import Trainer
from pytorch_lightning.loggers import LightningLoggerBase, LoggerCollection
from pytorch_lightning.loggers.base import DummyLogger, DummyExperiment
from pytorch_lightning.utilities import rank_zero_only
from tests.base import EvalModelTemplate

Expand Down Expand Up @@ -215,6 +216,16 @@ def log_metrics(self, metrics, step):
assert logger.history == {0: {'loss': 0.5623850983416314}, 1: {'loss': 0.4778883735637184}}


def test_dummyexperiment_support_indexing():
experiment = DummyExperiment()
assert experiment[0] == experiment


def test_dummylogger_support_indexing():
logger = DummyLogger()
assert logger[0] == logger


def test_np_sanitization():
class CustomParamsLogger(CustomLogger):
def __init__(self):
Expand Down

0 comments on commit 2bc49bf

Please sign in to comment.