Skip to content
4 changes: 2 additions & 2 deletions python/paddle/nn/functional/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import paddle
from paddle import _C_ops
from paddle.utils.decorator_utils import ParamAliasDecorator
from paddle.utils.decorator_utils import param_one_alias

from ...base.data_feeder import check_variable_and_dtype
from ...base.layer_helper import LayerHelper
Expand Down Expand Up @@ -162,7 +162,7 @@ def embedding_renorm_(
return weight


@ParamAliasDecorator({"x": ["input"]})
@param_one_alias(["x", "input"])
def embedding(
x: Tensor,
weight: Tensor,
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/sparse/unary.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,7 +882,7 @@ def expm1(x: Tensor, name: str | None = None) -> Tensor:
return _C_ops.sparse_expm1(x)


@param_one_alias({"x": "input"})
@param_one_alias(["x", "input"])
def reshape(x: Tensor, shape: ShapeLike, name: str | None = None) -> Tensor:
"""
Changes the shape of ``x`` without changing its value, requiring x to be a SparseCooTensor or SparseCsrTensor.
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/tensor/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from paddle import _C_ops
from paddle.tensor.creation import full
from paddle.tensor.math import broadcast_shape
from paddle.utils.decorator_utils import ParamAliasDecorator
from paddle.utils.decorator_utils import ParamAliasDecorator, param_two_alias
from paddle.utils.inplace_utils import inplace_apis_in_dygraph_only

from ..base.data_feeder import check_type, check_variable_and_dtype
Expand Down Expand Up @@ -1330,7 +1330,7 @@ def bitwise_and_(x: Tensor, y: Tensor, name: str | None = None) -> Tensor:
return _C_ops.bitwise_and_(x, y)


@ParamAliasDecorator({"x": ["input"], "y": ["other"]})
@param_two_alias(["x", "input"], ["y", "other"])
def bitwise_or(
x: Tensor, y: Tensor, out: Tensor | None = None, name: str | None = None
) -> Tensor:
Expand Down
5 changes: 3 additions & 2 deletions python/paddle/tensor/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from paddle.utils.decorator_utils import (
ParamAliasDecorator,
param_one_alias,
param_two_alias,
view_decorator,
)
from paddle.utils.inplace_utils import inplace_apis_in_dygraph_only
Expand Down Expand Up @@ -3471,7 +3472,7 @@ def squeeze_(
return _C_ops.squeeze_(input, axes)


@ParamAliasDecorator({"x": ["input"], "axis": ["dim"]})
@param_two_alias(["x", "input"], ["axis", "dim"])
def unique_consecutive(
x: Tensor,
return_inverse: bool = False,
Expand Down Expand Up @@ -4988,7 +4989,7 @@ def get_attr_expand_shape(list_expand_shape):
return out


@param_one_alias({"x": "input"})
@param_one_alias(["x", "input"])
def reshape(x: Tensor, shape: ShapeLike, name: str | None = None) -> Tensor:
"""
Changes the shape of ``x`` without changing its data.
Expand Down
5 changes: 3 additions & 2 deletions python/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from paddle.base.libpaddle import DataType
from paddle.common_ops_import import VarDesc, dygraph_utils
from paddle.pir import Value
from paddle.utils.decorator_utils import ParamAliasDecorator
from paddle.utils.decorator_utils import ParamAliasDecorator, param_two_alias
from paddle.utils.inplace_utils import inplace_apis_in_dygraph_only

from ..base.data_feeder import (
Expand Down Expand Up @@ -4963,7 +4963,7 @@ def isnan(x: Tensor, name: str | None = None) -> Tensor:
return out


@ParamAliasDecorator({"x": ["input"], "axis": ["dim"]})
@param_two_alias(["x", "input"], ["axis", "dim"])
def prod(
x: Tensor,
axis: int | Sequence[int] | None = None,
Expand Down Expand Up @@ -6628,6 +6628,7 @@ def lcm_(x: Tensor, y: Tensor, name: str | None = None) -> Tensor:
return out


@ParamAliasDecorator({"x": ["input"], "axis": ["dim"]})
def diff(
x: Tensor,
n: int = 1,
Expand Down
3 changes: 3 additions & 0 deletions python/paddle/tensor/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
in_pir_mode,
use_pir_api,
)
from paddle.utils.decorator_utils import param_one_alias

from ..base.data_feeder import (
check_dtype,
Expand Down Expand Up @@ -442,6 +443,7 @@ def log_normal_(
return normal_(x, mean=mean, std=std).exp_()


@param_one_alias(["x", "input"])
def multinomial(
x: Tensor,
num_samples: int = 1,
Expand Down Expand Up @@ -1949,6 +1951,7 @@ def rand(
return uniform(shape, dtype, min=0.0, max=1.0, name=name)


@param_one_alias(["lam", "lambd"])
def exponential_(
x: Tensor, lam: float = 1.0, name: str | None = None
) -> Tensor:
Expand Down
8 changes: 7 additions & 1 deletion python/paddle/tensor/stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@
in_dynamic_mode,
in_dynamic_or_pir_mode,
)
from paddle.utils.decorator_utils import ParamAliasDecorator
from paddle.utils.decorator_utils import (
ParamAliasDecorator,
param_two_alias_one_default,
)

from ..base.data_feeder import check_type, check_variable_and_dtype
from ..common_ops_import import Variable
Expand Down Expand Up @@ -473,6 +476,7 @@ def nanmedian(


@overload
@param_two_alias_one_default(["x", "input"], ["axis", "dim"], ["mode", 'min'])
def median(
x: Tensor,
axis: int = ...,
Expand All @@ -483,6 +487,7 @@ def median(


@overload
@param_two_alias_one_default(["x", "input"], ["axis", "dim"], ["mode", 'min'])
def median(
x: Tensor,
axis: int | None = ...,
Expand All @@ -492,6 +497,7 @@ def median(
) -> Tensor: ...


@param_two_alias_one_default(["x", "input"], ["axis", "dim"], ["mode", 'min'])
def median(
x,
axis=None,
Expand Down
114 changes: 111 additions & 3 deletions python/paddle/utils/decorator_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import functools
import inspect
import warnings
from collections.abc import Iterable
from typing import Any, Callable, TypeVar, cast

Expand Down Expand Up @@ -92,13 +93,120 @@ def process(
return args, processed_kwargs


def param_one_alias(alias_mapping):
class SetDefaultParaAliasDecorator(DecoratorBase):
"""Support default parameter settings, implementation of parameter alias processing decorator"""

def __init__(
self,
alias_mapping: dict[str, Iterable[str]],
default_params: dict[str, Any],
) -> None:
super().__init__()
# Check alias_mapping types
if not isinstance(alias_mapping, dict):
raise TypeError("alias_mapping must be a dictionary")
for k, v in alias_mapping.items():
if not isinstance(v, (list, tuple, set)):
raise TypeError(f"Aliases for '{k}' must be iterable")

# Build a reverse alias map for faster lookup
self.alias_mapping = {}
for original, aliases in alias_mapping.items():
for alias in aliases:
self.alias_mapping[alias] = original

self.default_params = default_params
warnings.simplefilter("always", category=Warning)

def process(
self, args: tuple[Any, ...], kwargs: dict[str, Any]
) -> tuple[tuple[Any, ...], dict[str, Any]]:
"""Process parameters to handle alias mapping"""
if not kwargs:
return args, kwargs

is_torch_call = False

# Directly modify kwargs based on alias mapping (only modify if necessary)
for alias, original in self.alias_mapping.items():
if alias in kwargs:
if original not in kwargs:
kwargs[original] = kwargs.pop(alias)
is_torch_call = True
else:
raise ValueError(
f"Cannot specify both '{original}' and its alias '{alias}'"
)

if is_torch_call:
warnings.warn(
"Set default parameters " + str(self.default_params),
category=Warning,
)
for key, value in self.default_params.items():
if key not in kwargs:
kwargs[key] = value

return args, kwargs


def param_one_alias(alias_list):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not kwargs:
return func(*args, **kwargs)
if ("input" in kwargs) and ("x" not in kwargs):
kwargs["x"] = kwargs.pop("input")
if (alias_list[0] not in kwargs) and (alias_list[1] in kwargs):
kwargs[alias_list[0]] = kwargs.pop(alias_list[1])
return func(*args, **kwargs)

wrapper.__signature__ = inspect.signature(func)
return wrapper

return decorator


def param_two_alias(alias_list1, alias_list2):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not kwargs:
return func(*args, **kwargs)
if (alias_list1[0] not in kwargs) and (alias_list1[1] in kwargs):
kwargs[alias_list1[0]] = kwargs.pop(alias_list1[1])
if (alias_list2[0] not in kwargs) and (alias_list2[1] in kwargs):
kwargs[alias_list2[0]] = kwargs.pop(alias_list2[1])
return func(*args, **kwargs)

wrapper.__signature__ = inspect.signature(func)
return wrapper

return decorator


def param_two_alias_one_default(alias_list1, alias_list2, default_param):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not kwargs:
return func(*args, **kwargs)

is_torch_call = False

if (alias_list1[0] not in kwargs) and (alias_list1[1] in kwargs):
kwargs[alias_list1[0]] = kwargs.pop(alias_list1[1])
is_torch_call = True
if (alias_list2[0] not in kwargs) and (alias_list2[1] in kwargs):
kwargs[alias_list2[0]] = kwargs.pop(alias_list2[1])
is_torch_call = True

if is_torch_call:
warnings.warn(
"Set default parameters " + str(default_param),
category=Warning,
)
if default_param[0] not in kwargs:
kwargs[default_param[0]] = default_param[1]
return func(*args, **kwargs)

wrapper.__signature__ = inspect.signature(func)
Expand Down
29 changes: 29 additions & 0 deletions test/legacy_test/test_diff_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,35 @@ def set_args(self):
self.append = None


class TestDiffOpFp16_TorchAlias(TestDiffOp):
def test_fp16_with_gpu(self):
paddle.enable_static()
if paddle.base.core.is_compiled_with_cuda():
place = paddle.CUDAPlace(0)
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
):
input = np.random.random([4, 4]).astype("float16")
x = paddle.static.data(
name="input", shape=[4, 4], dtype="float16"
)
exe = paddle.static.Executor(place)
out = paddle.diff(
x,
n=self.n,
dim=self.axis,
prepend=self.prepend,
append=self.append,
)
fetches = exe.run(
feed={
"input": input,
},
fetch_list=[out],
)
paddle.disable_static()


if __name__ == '__main__':
paddle.enable_static()
unittest.main()
66 changes: 66 additions & 0 deletions test/legacy_test/test_exponential_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,72 @@ def test_fixed_random_number(self):

paddle.enable_static()

def test_fixed_random_number_torch_alias(self):
# Test GPU Fixed random number, which is generated by 'curandStatePhilox4_32_10_t'
if not paddle.is_compiled_with_cuda():
return

# Different GPU generatte different random value. Only test V100 here.
if "V100" not in paddle.device.cuda.get_device_name():
return

paddle.disable_static()
paddle.set_device('gpu')
paddle.seed(2021)

x = paddle.empty([64, 3, 1024, 1024], dtype="float32")
x.exponential_(lambd=1.0)
x_np = x.numpy()
expect = [
0.80073667,
0.2249291,
0.07734892,
1.25392,
0.14013891,
0.45736602,
1.9735607,
0.30490234,
0.57100505,
0.8115938,
]
np.testing.assert_allclose(x_np[0, 0, 0, 0:10], expect, rtol=1e-05)

x = paddle.empty([10, 10], dtype="float32")
x.exponential_(lambd=3.0)
x_np = x.numpy()
expect = [
0.02831675,
0.1691551,
0.6798956,
0.69347525,
0.0243443,
0.22180498,
0.30574575,
0.9839696,
0.2834912,
0.59420055,
]
np.testing.assert_allclose(x_np[5, 0:10], expect, rtol=1e-05)

x = paddle.empty([16, 2, 1024, 768], dtype="float64")
x.exponential_(lambd=0.25)
x_np = x.numpy()
expect = [
10.0541229,
12.67860643,
1.09850734,
7.35289643,
2.65471225,
3.86217432,
2.97902086,
2.92744479,
2.67927152,
0.19667352,
]
np.testing.assert_allclose(x_np[0, 0, 0, 100:110], expect, rtol=1e-05)

paddle.enable_static()


class TestExponentialFP16Op(OpTest):
def setUp(self):
Expand Down
Loading