From edc198862bfd48335447f244fe29dc136ec4d283 Mon Sep 17 00:00:00 2001 From: haohongxiang Date: Tue, 5 Oct 2021 10:33:09 +0000 Subject: [PATCH] update --- .../dygraph_optimizer/hybrid_parallel_optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/distributed/fleet/meta_optimizers/dygraph_optimizer/hybrid_parallel_optimizer.py b/python/paddle/distributed/fleet/meta_optimizers/dygraph_optimizer/hybrid_parallel_optimizer.py index 0dbf890128a5c..b00ef2cdcb0e1 100755 --- a/python/paddle/distributed/fleet/meta_optimizers/dygraph_optimizer/hybrid_parallel_optimizer.py +++ b/python/paddle/distributed/fleet/meta_optimizers/dygraph_optimizer/hybrid_parallel_optimizer.py @@ -74,12 +74,12 @@ def _dygraph_clip(self, params_grads): return params_grads global_norm_var_dist = layers.concat(sum_square_list_dist) if len( - sum_square_list_dist) == 0 else layers.concat( + sum_square_list_dist) != 0 else layers.concat( [paddle.to_tensor([0.])]) global_norm_var_dist = layers.reduce_sum(global_norm_var_dist) global_norm_var_not_dist = layers.concat( sum_square_list_not_dist) if len( - sum_square_list_not_dist) == 0 else layers.concat( + sum_square_list_not_dist) != 0 else layers.concat( [paddle.to_tensor([0.])]) global_norm_var_not_dist = layers.reduce_sum(global_norm_var_not_dist)