Skip to content

Commit

Permalink
[AutoBump] Merge with fixes of 37263b6 (Sep 04)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgehre-amd committed Sep 25, 2024
2 parents 2fff529 + 37263b6 commit 48b573b
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 90 deletions.
36 changes: 9 additions & 27 deletions mlir/lib/Dialect/Tosa/IR/TosaOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -856,35 +856,17 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
return success();
}

LogicalResult PadOp::verify() {
ShapedType inputType = llvm::cast<ShapedType>(getInput1().getType());
if (inputType.hasRank() && inputType.getRank() == 0) {
return emitOpError() << "input tensor rank must not be 0";
}
LogicalResult tosa::PadOp::verify() {
RankedTensorType inputType = getInput1().getType();
RankedTensorType outputType = getOutput().getType();
TensorType paddingType = getPadding().getType();

ShapedType paddingType = llvm::cast<ShapedType>(getPadding().getType());
if (paddingType.hasRank()) {
if (paddingType.getRank() != 2) {
return emitOpError() << "paddings must be a tensor of rank 2";
}
if (inputType.hasRank() && !paddingType.isDynamicDim(0) &&
inputType.getRank() != paddingType.getDimSize(0)) {
return emitOpError() << "paddings must be a tensor of shape ["
<< inputType.getRank() << ", 2]";
}
if (!paddingType.isDynamicDim(1) && paddingType.getDimSize(1) != 2) {
return emitOpError() << "paddings must be a tensor of shape ["
<< inputType.getRank() << ", 2]";
}
if (inputType.getRank() != outputType.getRank())
return emitOpError() << "expect same input and output tensor rank.";

if (paddingType.hasRank() && paddingType.getRank() != 2)
return emitOpError() << "expect 'padding' tensor rank equal to 2.";

DenseIntElementsAttr paddings;
if (matchPattern(getPadding(), m_Constant(&paddings))) {
if (llvm::any_of(paddings,
[](auto val) { return val.getSExtValue() < 0; })) {
return emitOpError() << "number of pad elements must be positive";
}
}
}
return success();
}

Expand Down
79 changes: 16 additions & 63 deletions mlir/test/Dialect/Tosa/invalid.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -105,83 +105,36 @@ func.func @test_pad_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3x2xi32>

// -----

func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<?x?x?xi8> {
func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<13x21x3xi8> {
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
// expected-error@+1 {{'tosa.pad' op pad_const of pad is not constant}}
%1 = tosa.pad %arg0, %0, %arg1 : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<?x?x?xi8>
return %1 : tensor<?x?x?xi8>
%1 = tosa.pad %arg0, %0, %arg1 : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<13x21x3xi8>
return %1 : tensor<13x21x3xi8>
}

// -----

func.func @test_pad_output_shape_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<13x21x3xf32> {
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<13x21x3xf32>}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<13x21x3xf32>
return %1 : tensor<13x21x3xf32>
}

// -----

func.func @test_pad_type_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<15x23x5xi32> {
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<15x23x5xi32>}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<15x23x5xi32>
return %1 : tensor<15x23x5xi32>
}

// -----

func.func @test_pad_incorret_padding_rank(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
%0 = "tosa.const"() {value = dense<[0, 1]> : tensor<2xi32>} : () -> tensor<2xi32>
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of rank 2}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
return %1 : tensor<13x21xf32>
}

// -----

func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1], [1, 1]]> : tensor<4x2xi32>} : () -> tensor<4x2xi32>
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<4x2xi32>) -> tensor<13x21xf32>
return %1 : tensor<13x21xf32>
}

// -----

func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
%0 = "tosa.const"() {value = dense<[[0, 0, 0, 1], [0, 1, 1, 1]]> : tensor<2x4xi32>} : () -> tensor<2x4xi32>
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2x4xi32>) -> tensor<13x21xf32>
return %1 : tensor<13x21xf32>
}

// -----

func.func @test_pad_negative_padding(%arg0: tensor<13x21xf32>) -> tensor<?x?xf32> {
%0 = "tosa.const"() {value = dense<[[0, 0], [0, -1]]> : tensor<2x2xi32>} : () -> tensor<2x2xi32>
// expected-error@+1 {{'tosa.pad' op number of pad elements must be positive}}
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<?x?xf32>
return %1 : tensor<?x?xf32>
func.func @test_pad_io_rank_mismatch(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
// expected-error@+1 {{'tosa.pad' op expect same input and output tensor rank.}}
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<13x21x3xf32>
return
}

// -----

func.func @test_pad_incorrect_input(%arg0: f32, %arg1: i32) -> f32 {
// expected-error@+1 {{'tosa.pad' op operand #0 must be ranked tensor of number values, but got 'f32'}}
%1 = tosa.pad %arg0, %arg1 : (f32, i32) -> f32
return %1 : f32
func.func @test_pad_invalid_padding_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2xi32>) {
// expected-error@+1 {{'tosa.pad' op expect 'padding' tensor rank equal to 2.}}
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
return
}

// -----

func.func @test_pad_zero_rank_input(%arg0: tensor<f32>, %arg1: tensor<i32>) -> tensor<f32> {
// expected-error@+1 {{'tosa.pad' op input tensor rank must not be 0}}
%1 = tosa.pad %arg0, %arg1 : (tensor<f32>, tensor<i32>) -> tensor<f32>
return %1 : tensor<f32>
func.func @test_pad_invalid_padConst_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
%0 = "tosa.const"() {value = dense<3.14> : tensor<1xf32>} : () -> tensor<1xf32>
// expected-error@+1 {{'tosa.pad' op operand #2 must be 0D tensor of number values, but got 'tensor<1xf32>'}}
%1 = tosa.pad %arg0, %arg1, %0 : (tensor<13x21xf32>, tensor<2x2xi32>, tensor<1xf32>) -> tensor<13x21xf32>
return
}

// -----
Expand Down

0 comments on commit 48b573b

Please sign in to comment.