Skip to content

Commit 751e1ef

Browse files
sdesrozisDesroziers
authored andcommitted
doctest for accumulation metrics (pytorch#2332)
Co-authored-by: Desroziers <[email protected]>
1 parent 7211e33 commit 751e1ef

File tree

2 files changed

+97
-8
lines changed

2 files changed

+97
-8
lines changed

docs/source/conf.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -347,8 +347,7 @@ def run(self):
347347
# create default evaluator for doctests
348348
349349
def eval_step(engine, batch):
350-
y_pred, y = batch
351-
return y_pred, y
350+
return batch
352351
353352
default_evaluator = Engine(eval_step)
354353

ignite/metrics/accumulation.py

+96-6
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,8 @@ class Average(VariableAccumulation):
9999
For input `x` being an ND `torch.Tensor` with N > 1, the first dimension is seen as the number of samples and
100100
is summed up and added to the accumulator: `accumulator += x.sum(dim=0)`
101101
102+
``output_tranform`` can be added to the metric to transform the output into the form expected by the metric.
103+
102104
Args:
103105
output_transform: a callable that is used to transform the
104106
:class:`~ignite.engine.engine.Engine`'s ``process_function``'s output into the
@@ -109,15 +111,53 @@ class Average(VariableAccumulation):
109111
default, CPU.
110112
111113
Examples:
112-
.. code-block:: python
113114
114-
evaluator = ...
115+
.. testcode::
116+
117+
metric = Average()
118+
metric.attach(default_evaluator, 'avg')
119+
# Case 1. input is er
120+
data = torch.Tensor([0, 1, 2, 3, 4])
121+
state = default_evaluator.run(data)
122+
print(state.metrics['avg'])
123+
124+
.. testoutput::
125+
126+
2.0
127+
128+
.. testcode::
129+
130+
metric = Average()
131+
metric.attach(default_evaluator, 'avg')
132+
# Case 2. input is a 1D torch.Tensor
133+
data = torch.Tensor([
134+
[0, 0, 0],
135+
[1, 1, 1],
136+
[2, 2, 2],
137+
[3, 3, 3]
138+
])
139+
state = default_evaluator.run(data)
140+
print(state.metrics['avg'])
115141
116-
custom_var_mean = Average(output_transform=lambda output: output['custom_var'])
117-
custom_var_mean.attach(evaluator, 'mean_custom_var')
142+
.. testoutput::
118143
119-
state = evaluator.run(dataset)
120-
# state.metrics['mean_custom_var'] -> average of output['custom_var']
144+
tensor([1.5000, 1.5000, 1.5000], dtype=torch.float64)
145+
146+
.. testcode::
147+
148+
metric = Average()
149+
metric.attach(default_evaluator, 'avg')
150+
# Case 3. input is a ND torch.Tensor
151+
data = [
152+
torch.Tensor([[0, 0, 0], [1, 1, 1]]),
153+
torch.Tensor([[2, 2, 2], [3, 3, 3]])
154+
]
155+
state = default_evaluator.run(data)
156+
print(state.metrics['avg'])
157+
158+
.. testoutput::
159+
160+
tensor([1.5000, 1.5000, 1.5000], dtype=torch.float64)
121161
"""
122162

123163
def __init__(
@@ -166,6 +206,56 @@ class GeometricAverage(VariableAccumulation):
166206
For input `x` being an ND `torch.Tensor` with N > 1, the first dimension is seen as the number of samples and
167207
is aggregated and added to the accumulator: `accumulator *= prod(x, dim=0)`
168208
209+
``output_tranform`` can be added to the metric to transform the output into the form expected by the metric.
210+
211+
Examples:
212+
213+
.. testcode::
214+
215+
metric = GeometricAverage()
216+
metric.attach(default_evaluator, 'avg')
217+
# Case 1. input is er
218+
data = torch.Tensor([1, 2, 3])
219+
state = default_evaluator.run(data)
220+
print(state.metrics['avg'])
221+
222+
.. testoutput::
223+
224+
1.8171...
225+
226+
.. testcode::
227+
228+
metric = GeometricAverage()
229+
metric.attach(default_evaluator, 'avg')
230+
# Case 2. input is a 1D torch.Tensor
231+
data = torch.Tensor([
232+
[1, 1, 1],
233+
[2, 2, 2],
234+
[3, 3, 3],
235+
[4, 4, 4],
236+
])
237+
state = default_evaluator.run(data)
238+
print(state.metrics['avg'])
239+
240+
.. testoutput::
241+
242+
tensor([2.2134, 2.2134, 2.2134], dtype=torch.float64)
243+
244+
.. testcode::
245+
246+
metric = GeometricAverage()
247+
metric.attach(default_evaluator, 'avg')
248+
# Case 3. input is a ND torch.Tensor
249+
data = [
250+
torch.Tensor([[1, 1, 1], [2, 2, 2]]),
251+
torch.Tensor([[3, 3, 3], [4, 4, 4]])
252+
]
253+
state = default_evaluator.run(data)
254+
print(state.metrics['avg'])
255+
256+
.. testoutput::
257+
258+
tensor([2.2134, 2.2134, 2.2134], dtype=torch.float64)
169259
"""
170260

171261
def __init__(

0 commit comments

Comments
 (0)