Skip to content

Commit

Permalink
[Dy2St]Support call backward() without params in dy2st (PaddlePaddle#…
Browse files Browse the repository at this point in the history
…49812)

* Support call backward() without params in dy2st
  • Loading branch information
0x45f committed Feb 1, 2023
1 parent dddc5d9 commit d057bb9
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 14 deletions.
3 changes: 2 additions & 1 deletion paddle/fluid/operators/select_output_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,8 @@ class SelectOutputInferShape : public framework::InferShapeBase {
void operator()(framework::InferShapeContext *context) const override {
OP_INOUT_CHECK(context->HasInput("X"), "Input", "X", "SelectOutput");
OP_INOUT_CHECK(context->HasInput("Mask"), "Input", "Mask", "SelectOutput");
OP_INOUT_CHECK(context->HasOutputs("Out"), "Output", "Out", "SelectOutput");
OP_INOUT_CHECK(
context->HasOutputs("Out", true), "Output", "Out", "SelectOutput");
}
};

Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/pybind/eager_legacy_custom_python_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ static PyObject *eager_api_run_program(PyObject *self,
PyObject *kwargs) {
PyThreadState *tstate = nullptr;
try {
auto X = GetTensorListFromArgs("run_program", "X", args, 0, false);
auto X = GetTensorListFromArgs("run_program", "X", args, 0, true);
auto Params = GetTensorListFromArgs("run_program", "Params", args, 1, true);
auto Out = GetTensorPtrListFromArgs("run_program", "Out", args, 2, false);
auto Out = GetTensorPtrListFromArgs("run_program", "Out", args, 2, true);
auto OutScope =
GetScopePtrListFromArgs("run_program", "OutScope", args, 3, false);
auto DOut = GetTensorPtrListFromArgs("run_program", "DOut", args, 4, true);
Expand Down
13 changes: 2 additions & 11 deletions python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,6 @@ def _get_scope(self, program_id=None, use_scope_cache=False):
else:
return core.Scope()

@LazyInitialized
def __fake_vars(self):
return _create_fake_var()

@LazyInitialized
def _double_grads(self):
return self._get_double_grads(self._origin_main_program)
Expand Down Expand Up @@ -489,7 +485,7 @@ def _append_backward_desc(self, main_program):
if isinstance(out, framework.Variable):
targets.append(program.global_block().var(out.name))

if targets and self._params:
if targets:
backward.gradients(targets=targets, inputs=[])

start_idx = len(
Expand Down Expand Up @@ -889,12 +885,7 @@ def _check_params_all_inited(self, main_program):
% name)

def _valid_vars(self, vars):
"""
Note: run_program_op.InferShape requires `X`/'Out' not be null.
But it's common in dy2static, fake varBase is created to handle the
problem.
"""
return vars if vars else self.__fake_vars
return vars if vars else None


def _create_fake_var():
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import numpy as np

import paddle


class Net(paddle.nn.Layer):
def __init__(self):
super(Net, self).__init__()

@paddle.jit.to_static
def forward(self, x):
out = x + 1
return out


class TestBackwardWithoutParams(unittest.TestCase):
def test_run(self):
net = Net()

x = paddle.ones([2, 2])
x.stop_gradient = False
out = net(x)
loss = paddle.mean(out)
loss.backward()
np.testing.assert_equal(x.grad.numpy(), np.full(x.shape, 0.25))


if __name__ == '__main__':
unittest.main()
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,7 @@ def transformed_result_compare(self):
if not isinstance(dy_outs, (tuple, list)):
dy_outs = (dy_outs, )

self.dygraph_func.eval()
st_outs = self.get_static_output()
if not isinstance(st_outs, (tuple, list)):
st_outs = (st_outs, )
Expand Down

0 comments on commit d057bb9

Please sign in to comment.