Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【Error Message No. 21】paddle/cinn/dialect/* #63273

Merged
merged 3 commits into from
Apr 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -181,10 +181,23 @@ class DynamicToStaticConverter {
CHECK(shape_analysis_->HasShapeOrDataForValue(value));
const auto& origin_shape = GetOriginValueShape(value);
const auto& target_shape = GetTargetValueShape(value);
CHECK_EQ(origin_shape.size(), target_shape.size());
PADDLE_ENFORCE_EQ(
origin_shape.size(),
target_shape.size(),
phi::errors::InvalidArgument(
"The size of origin shape and target shape is not equal,"
"where the size of origin shape:%d but the size of target "
"shape:%d.",
origin_shape.size(),
target_shape.size()));
for (std::size_t i = 0; i < origin_shape.size(); ++i) {
if (origin_shape.at(i) == -1) {
CHECK_GT(target_shape.at(i), 0);
PADDLE_ENFORCE_GT(target_shape.at(i),
0,
phi::errors::InvalidArgument(
"The size of target shape is incorrect."
"Expected size is larger than 0, but receive %d.",
target_shape.at(i)));
update = true;
} else {
CHECK(origin_shape.at(i) == target_shape.at(i));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,15 @@ struct StaticDimToDynamicConverter {
const auto& origin_shape = GetOriginValueShape(value);
const auto& target_shape = GetTargetValueShape(
shape_analysis->GetShapeOrDataForValue(value).shape());
CHECK_EQ(origin_shape.size(), target_shape.size());
PADDLE_ENFORCE_EQ(
origin_shape.size(),
target_shape.size(),
phi::errors::InvalidArgument(
"The size of origin shape and target shape is not equal,"
"where the size of origin shape:%d but the size of target "
"shape:%d.",
origin_shape.size(),
target_shape.size()));
const auto& origin_type = value.type().dyn_cast<::pir::DenseTensorType>();
pir::DenseTensorType target_type =
pir::DenseTensorType::get(pir::IrContext::Instance(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1941,7 +1941,14 @@ class GeneralFusionMergePassHelper {
}
}

CHECK_GE(producer->consumer_groups().size(), candidates.size());
PADDLE_ENFORCE_GE(
producer->consumer_groups().size(),
candidates.size(),
phi::errors::InvalidArgument(
"The size of producer consumer groups is incorrect."
"Expected size is greater than or equal to %d, but receive %d.",
candidates.size(),
producer->consumer_groups().size()));
if (producer->consumer_groups().size() == 0 && candidates.size() == 0 &&
output_ops_set_.count(producer->CollectOps()[0]) == 0) {
producer->belong_groups.insert(*fusionable_consumers->begin());
Expand Down Expand Up @@ -2204,8 +2211,24 @@ class GeneralFusionMergePassHelper {
CHECK(consumer->belong_groups.size());
consumers.insert(*consumer->belong_groups.begin());
}
CHECK_EQ(group->producer_groups().size(), producers.size());
CHECK_EQ(group->consumer_groups().size(), consumers.size());
PADDLE_ENFORCE_EQ(
group->producer_groups().size(),
producers.size(),
phi::errors::InvalidArgument(
"The size of group's producer groups and producers is not equal,"
"where the size of group's producer groups:%d but the size of "
"producers:%d.",
group->producer_groups().size(),
producers.size()));
PADDLE_ENFORCE_EQ(
group->consumer_groups().size(),
consumers.size(),
phi::errors::InvalidArgument(
"The size of group's consumer groups and consumers is not equal,"
"where the size of group's consumer groups:%d but the size of "
"consumers:%d.",
group->consumer_groups().size(),
consumers.size()));
(*group->mut_producer_groups()) = producers;
(*group->mut_consumer_groups()) = consumers;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,9 @@ bool EraseOneExpand(
if (!SameInputOutputShape(expand, ShapeOrDataDimExprs4Value)) continue;
auto generate_shape_op =
expand.shape().defining_op<cinn::dialect::GenerateShapeOp>();
CHECK_NOTNULL(generate_shape_op);
PADDLE_ENFORCE_NOT_NULL(generate_shape_op,
phi::errors::PreconditionNotMet(
"The generate shape op must not be null."));
rewriter.ReplaceAllUsesWith(expand.out(), expand.x());
rewriter.EraseOp(expand);
if (generate_shape_op->use_empty()) {
Expand Down Expand Up @@ -280,7 +282,15 @@ void SetLeafBlockByGroupView(
}

auto new_group = CloneGroup(origin_group, block, &ir_mapping);
CHECK_EQ(origin_group->ops().size(), new_group->ops().size());
PADDLE_ENFORCE_EQ(
origin_group->ops().size(),
new_group->ops().size(),
phi::errors::InvalidArgument(
"The size of origin group ops and new group ops is not equal,"
"where the size of origin group ops:%d but the size of new group "
"ops:%d.",
origin_group->ops().size(),
new_group->ops().size()));
UpdateGroupShapeExprs(new_group,
origin_group,
ir_mapping,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,12 @@ struct CachedDimExprToValueConverter {

pir::Value ConvertToValueImpl(const symbol::Add<symbol::DimExpr>& dim_expr) {
const auto& [operands] = dim_expr;
CHECK_GT(operands->size(), 0);
PADDLE_ENFORCE_GT(operands->size(),
0,
phi::errors::InvalidArgument(
"The size of operands is incorrect."
"Expected size is larger than 0, but receive %d.",
operands->size()));
pir::Value acc = ConvertToValue(operands->at(0));
for (int i = 1; i < operands->size(); ++i) {
if (operands->at(i).isa<symbol::Negative<symbol::DimExpr>>()) {
Expand All @@ -162,7 +167,12 @@ struct CachedDimExprToValueConverter {

pir::Value ConvertToValueImpl(const symbol::Mul<symbol::DimExpr>& dim_expr) {
const auto& [operands] = dim_expr;
CHECK_GT(operands->size(), 0);
PADDLE_ENFORCE_GT(operands->size(),
0,
phi::errors::InvalidArgument(
"The size of operands is incorrect."
"Expected size is larger than 0, but receive %d.",
operands->size()));
pir::Value prod = ConvertToValue(operands->at(0));
for (int i = 1; i < operands->size(); ++i) {
if (operands->at(i).isa<symbol::Reciprocal<symbol::DimExpr>>()) {
Expand All @@ -182,7 +192,12 @@ struct CachedDimExprToValueConverter {

pir::Value ConvertToValueImpl(const symbol::Max<symbol::DimExpr>& dim_expr) {
const auto& [operands] = dim_expr;
CHECK_GT(operands->size(), 0);
PADDLE_ENFORCE_GT(operands->size(),
0,
phi::errors::InvalidArgument(
"The size of operands is incorrect."
"Expected size is larger than 0, but receive %d.",
operands->size()));
pir::Value max = ConvertToValue(operands->at(0));
for (int i = 1; i < operands->size(); ++i) {
pir::Value operand_value = ConvertToValue(operands->at(i));
Expand All @@ -193,7 +208,12 @@ struct CachedDimExprToValueConverter {

pir::Value ConvertToValueImpl(const symbol::Min<symbol::DimExpr>& dim_expr) {
const auto& [operands] = dim_expr;
CHECK_GT(operands->size(), 0);
PADDLE_ENFORCE_GT(operands->size(),
0,
phi::errors::InvalidArgument(
"The size of operands is incorrect."
"Expected size is larger than 0, but receive %d.",
operands->size()));
pir::Value min = ConvertToValue(operands->at(0));
for (int i = 1; i < operands->size(); ++i) {
pir::Value operand_value = ConvertToValue(operands->at(i));
Expand All @@ -205,7 +225,12 @@ struct CachedDimExprToValueConverter {
pir::Value ConvertToValueImpl(
const symbol::Broadcast<symbol::DimExpr>& dim_expr) {
const auto& [operands] = dim_expr;
CHECK_GT(operands->size(), 0);
PADDLE_ENFORCE_GT(operands->size(),
0,
phi::errors::InvalidArgument(
"The size of operands is incorrect."
"Expected size is larger than 0, but receive %d.",
operands->size()));
pir::Value broadcasted = ConvertToValue(operands->at(0));
for (int i = 1; i < operands->size(); ++i) {
pir::Value operand_value = ConvertToValue(operands->at(i));
Expand Down