From a6f98654bbfd7d3f3cfc7cf4071b06dfe957859c Mon Sep 17 00:00:00 2001 From: qipengh Date: Wed, 16 Mar 2022 15:10:41 +0800 Subject: [PATCH] fix cross_entropy when run static graph mode of mlu and npu --- python/paddle/nn/functional/loss.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index e6efde836284a..8fa0747200527 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -1792,12 +1792,16 @@ def cross_entropy(input, helper = LayerHelper('softmax_with_cross_entropy', **locals()) softmax = helper.create_variable_for_type_inference(dtype=input.dtype) out = helper.create_variable_for_type_inference(dtype=input.dtype) + + outputs = {'Softmax': softmax, 'Loss': out} + if core.is_compiled_with_npu() or core.is_compiled_with_mlu(): + backprop = helper.create_variable_for_type_inference(dtype=input.dtype) + outputs['Backprop'] = backprop helper.append_op( type='softmax_with_cross_entropy', inputs={'Logits': input, 'Label': label}, - outputs={'Softmax': softmax, - 'Loss': out}, + outputs=outputs, attrs=attrs) if weight is not None: