Skip to content

Commit

Permalink
[AutoBump] Merge with fixes of 37263b6 (Sep 04)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgehre-amd committed Sep 25, 2024
2 parents 2fff529 + 37263b6 commit 9b20f53
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 27 deletions.
36 changes: 9 additions & 27 deletions mlir/lib/Dialect/Tosa/IR/TosaOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -856,35 +856,17 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
return success();
}

LogicalResult PadOp::verify() {
ShapedType inputType = llvm::cast<ShapedType>(getInput1().getType());
if (inputType.hasRank() && inputType.getRank() == 0) {
return emitOpError() << "input tensor rank must not be 0";
}
LogicalResult tosa::PadOp::verify() {
RankedTensorType inputType = getInput1().getType();
RankedTensorType outputType = getOutput().getType();
TensorType paddingType = getPadding().getType();

ShapedType paddingType = llvm::cast<ShapedType>(getPadding().getType());
if (paddingType.hasRank()) {
if (paddingType.getRank() != 2) {
return emitOpError() << "paddings must be a tensor of rank 2";
}
if (inputType.hasRank() && !paddingType.isDynamicDim(0) &&
inputType.getRank() != paddingType.getDimSize(0)) {
return emitOpError() << "paddings must be a tensor of shape ["
<< inputType.getRank() << ", 2]";
}
if (!paddingType.isDynamicDim(1) && paddingType.getDimSize(1) != 2) {
return emitOpError() << "paddings must be a tensor of shape ["
<< inputType.getRank() << ", 2]";
}
if (inputType.getRank() != outputType.getRank())
return emitOpError() << "expect same input and output tensor rank.";

if (paddingType.hasRank() && paddingType.getRank() != 2)
return emitOpError() << "expect 'padding' tensor rank equal to 2.";

DenseIntElementsAttr paddings;
if (matchPattern(getPadding(), m_Constant(&paddings))) {
if (llvm::any_of(paddings,
[](auto val) { return val.getSExtValue() < 0; })) {
return emitOpError() << "number of pad elements must be positive";
}
}
}
return success();
}

Expand Down
25 changes: 25 additions & 0 deletions mlir/test/Dialect/Tosa/invalid.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,31 @@ func.func @test_pad_zero_rank_input(%arg0: tensor<f32>, %arg1: tensor<i32>) -> t

// -----

func.func @test_pad_io_rank_mismatch(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
// expected-error@+1 {{'tosa.pad' op expect same input and output tensor rank.}}
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<13x21x3xf32>
return
}

// -----

func.func @test_pad_invalid_padding_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2xi32>) {
// expected-error@+1 {{'tosa.pad' op expect 'padding' tensor rank equal to 2.}}
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
return
}

// -----

func.func @test_pad_invalid_padConst_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
%0 = "tosa.const"() {value = dense<3.14> : tensor<1xf32>} : () -> tensor<1xf32>
// expected-error@+1 {{'tosa.pad' op operand #2 must be 0D tensor of number values, but got 'tensor<1xf32>'}}
%1 = tosa.pad %arg0, %arg1, %0 : (tensor<13x21xf32>, tensor<2x2xi32>, tensor<1xf32>) -> tensor<13x21xf32>
return
}

// -----

func.func @test_transpose_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3xi32>) -> tensor<3x13x21xf32> {
// expected-error@+1 {{'tosa.transpose' op perms of transpose is not constant}}
%0 = tosa.transpose %arg0, %arg1 : (tensor<13x21x3xf32>, tensor<3xi32>) -> tensor<3x13x21xf32>
Expand Down

0 comments on commit 9b20f53

Please sign in to comment.