Skip to content

Commit

Permalink
Integrate LLVM at llvm/llvm-project@07d135e16a63
Browse files Browse the repository at this point in the history
Updates LLVM usage to match
[07d135e16a63](llvm/llvm-project@07d135e16a63)

PiperOrigin-RevId: 557549146
  • Loading branch information
tensorflower-gardener authored and TensorFlow MLIR Team committed Aug 16, 2023
1 parent 3e7dc58 commit e53cb50
Show file tree
Hide file tree
Showing 8 changed files with 45 additions and 46 deletions.
4 changes: 2 additions & 2 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ http_archive(
],
)

LLVM_COMMIT = "cad3130a23daa023d5bbf42b8ec8c06aa0532abf"
LLVM_COMMIT = "07d135e16a63dd75f86a26f0ff2be6f102ddb01c"

LLVM_SHA256 = "1737a2f44c2ad949ee5ce275d7495b752fca56e2fe4f81f8e4586d69265f7adb"
LLVM_SHA256 = "a325959f210b75ea6453f1c993ac1011ba7522a4f9c97a17fdda6b366840c672"

http_archive(
name = "llvm-raw",
Expand Down
2 changes: 1 addition & 1 deletion build_tools/llvm_version.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
cad3130a23daa023d5bbf42b8ec8c06aa0532abf
07d135e16a63dd75f86a26f0ff2be6f102ddb01c

2 changes: 1 addition & 1 deletion stablehlo/build_tools/llvm_version.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
cad3130a23daa023d5bbf42b8ec8c06aa0532abf
07d135e16a63dd75f86a26f0ff2be6f102ddb01c
38 changes: 19 additions & 19 deletions stablehlo/stablehlo/conversions/tosa/tests/binary.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,15 @@ func.func @compare_lt(%arg0 : tensor<10xf32>, %arg1 : tensor<10xf32>) -> tensor<

// CHECK-LABEL: @compare_ne
func.func @compare_ne(%arg0 : tensor<10xi32>, %arg1 : tensor<10xi32>) -> tensor<10xi1> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.equal"(%arg0, %arg1)
// CHECK-DAG: %[[VAR1:.*]] = "tosa.logical_not"(%[[VAR0]])
// CHECK-DAG: %[[VAR0:.*]] = tosa.equal %arg0, %arg1
// CHECK-DAG: %[[VAR1:.*]] = tosa.logical_not %[[VAR0]]
%0 = "stablehlo.compare"(%arg0, %arg1) {comparison_direction = #stablehlo<comparison_direction NE>} : (tensor<10xi32>, tensor<10xi32>) -> tensor<10xi1>
return %0 : tensor<10xi1>
}

// CHECK-LABEL: @concatenate
func.func @concatenate(%arg0 : tensor<3x3xf32>, %arg1 : tensor<3x3xf32>) -> tensor<6x3xf32> {
// CHECK: "tosa.concat"(%arg0, %arg1) <{axis = 0 : i64}> : (tensor<3x3xf32>, tensor<3x3xf32>) -> tensor<6x3xf32>
// CHECK: tosa.concat %arg0, %arg1 {axis = 0 : i32} : (tensor<3x3xf32>, tensor<3x3xf32>) -> tensor<6x3xf32>
%0 = "stablehlo.concatenate"(%arg0, %arg1) {dimension = 0 : i64} : (tensor<3x3xf32>, tensor<3x3xf32>) -> tensor<6x3xf32>
return %0 : tensor<6x3xf32>
}
Expand All @@ -60,40 +60,40 @@ func.func @divide_f32(%arg0 : tensor<10xf32>, %arg1 : tensor<10xf32>) -> tensor<

// CHECK-LABEL: @dot_vector_vector
func.func @dot_vector_vector(%arg0 : tensor<3xf32>, %arg1 : tensor<3xf32>) -> tensor<f32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.reshape"(%arg0) <{new_shape = array<i64: 1, 1, 3>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.reshape"(%arg1) <{new_shape = array<i64: 1, 3, 1>}>
// CHECK-DAG: %[[VAR2:.*]] = "tosa.matmul"(%[[VAR0]], %[[VAR1]])
// CHECK-DAG: %[[VAR3:.*]] = "tosa.reshape"(%[[VAR2]])
// CHECK-DAG: %[[VAR0:.*]] = tosa.reshape %arg0 {new_shape = array<i64: 1, 1, 3>}
// CHECK-DAG: %[[VAR1:.*]] = tosa.reshape %arg1 {new_shape = array<i64: 1, 3, 1>}
// CHECK-DAG: %[[VAR2:.*]] = tosa.matmul %[[VAR0]], %[[VAR1]]
// CHECK-DAG: %[[VAR3:.*]] = tosa.reshape %[[VAR2]]
%0 = "stablehlo.dot"(%arg0, %arg1) : (tensor<3xf32>, tensor<3xf32>) -> tensor<f32>
return %0 : tensor<f32>
}

// CHECK-LABEL: @dot_vector_matrix
func.func @dot_vector_matrix(%arg0 : tensor<2xf32>, %arg1 : tensor<2x3xf32>) -> tensor<3xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.reshape"(%arg0) <{new_shape = array<i64: 1, 1, 2>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.reshape"(%arg1) <{new_shape = array<i64: 1, 2, 3>}>
// CHECK-DAG: %[[VAR2:.*]] = "tosa.matmul"(%[[VAR0]], %[[VAR1]])
// CHECK-DAG: %[[VAR3:.*]] = "tosa.reshape"(%[[VAR2]])
// CHECK-DAG: %[[VAR0:.*]] = tosa.reshape %arg0 {new_shape = array<i64: 1, 1, 2>}
// CHECK-DAG: %[[VAR1:.*]] = tosa.reshape %arg1 {new_shape = array<i64: 1, 2, 3>}
// CHECK-DAG: %[[VAR2:.*]] = tosa.matmul %[[VAR0]], %[[VAR1]]
// CHECK-DAG: %[[VAR3:.*]] = tosa.reshape %[[VAR2]]
%0 = "stablehlo.dot"(%arg0, %arg1) : (tensor<2xf32>, tensor<2x3xf32>) -> tensor<3xf32>
return %0 : tensor<3xf32>
}

// CHECK-LABEL: @dot_matrix_vector
func.func @dot_matrix_vector(%arg0 : tensor<2x3xf32>, %arg1 : tensor<3xf32>) -> tensor<2xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.reshape"(%arg0) <{new_shape = array<i64: 1, 2, 3>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.reshape"(%arg1) <{new_shape = array<i64: 1, 3, 1>}>
// CHECK-DAG: %[[VAR2:.*]] = "tosa.matmul"(%[[VAR0]], %[[VAR1]])
// CHECK-DAG: %[[VAR3:.*]] = "tosa.reshape"(%[[VAR2]])
// CHECK-DAG: %[[VAR0:.*]] = tosa.reshape %arg0 {new_shape = array<i64: 1, 2, 3>}
// CHECK-DAG: %[[VAR1:.*]] = tosa.reshape %arg1 {new_shape = array<i64: 1, 3, 1>}
// CHECK-DAG: %[[VAR2:.*]] = tosa.matmul %[[VAR0]], %[[VAR1]]
// CHECK-DAG: %[[VAR3:.*]] = tosa.reshape %[[VAR2]]
%0 = "stablehlo.dot"(%arg0, %arg1) : (tensor<2x3xf32>, tensor<3xf32>) -> tensor<2xf32>
return %0 : tensor<2xf32>
}

// CHECK-LABEL: @dot_matrix_matrix
func.func @dot_matrix_matrix(%arg0 : tensor<2x3xf32>, %arg1 : tensor<3x4xf32>) -> tensor<2x4xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.reshape"(%arg0) <{new_shape = array<i64: 1, 2, 3>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.reshape"(%arg1) <{new_shape = array<i64: 1, 3, 4>}>
// CHECK-DAG: %[[VAR2:.*]] = "tosa.matmul"(%[[VAR0]], %[[VAR1]])
// CHECK-DAG: %[[VAR3:.*]] = "tosa.reshape"(%[[VAR2]])
// CHECK-DAG: %[[VAR0:.*]] = tosa.reshape %arg0 {new_shape = array<i64: 1, 2, 3>}
// CHECK-DAG: %[[VAR1:.*]] = tosa.reshape %arg1 {new_shape = array<i64: 1, 3, 4>}
// CHECK-DAG: %[[VAR2:.*]] = tosa.matmul %[[VAR0]], %[[VAR1]]
// CHECK-DAG: %[[VAR3:.*]] = tosa.reshape %[[VAR2]]
%0 = "stablehlo.dot"(%arg0, %arg1) : (tensor<2x3xf32>, tensor<3x4xf32>) -> tensor<2x4xf32>
return %0 : tensor<2x4xf32>
}
Expand Down
4 changes: 2 additions & 2 deletions stablehlo/stablehlo/conversions/tosa/tests/nullary.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ func.func @constant_f64() -> tensor<10xf64> {
// CHECK-LABEL: @iota_dimension_0
func.func @iota_dimension_0() -> tensor<4x8xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<[0.000000e+00, 1.000000e+00, 2.000000e+00, 3.000000e+00]> : tensor<4xf32>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.tile"(%[[VAR0]]) <{multiples = array<i64: 1, 8>}>
// CHECK-DAG: %[[VAR1:.*]] = tosa.tile %[[VAR0]] {multiples = array<i64: 1, 8>}
%0 = "stablehlo.iota"() {iota_dimension = 0 : i64} : () -> (tensor<4x8xf32>)
return %0 : tensor<4x8xf32>
}

// CHECK-LABEL: @iota_dimension_1
func.func @iota_dimension_1() -> tensor<4x8xi32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<[0, 1, 2, 3, 4, 5, 6, 7]> : tensor<8xi32>}>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.tile"(%[[VAR0]]) <{multiples = array<i64: 4, 1>}>
// CHECK-DAG: %[[VAR1:.*]] = tosa.tile %[[VAR0]] {multiples = array<i64: 4, 1>}
%0 = "stablehlo.iota"() {iota_dimension = 1 : i64} : () -> (tensor<4x8xi32>)
return %0 : tensor<4x8xi32>
}
2 changes: 1 addition & 1 deletion stablehlo/stablehlo/conversions/tosa/tests/ternary.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

// CHECK-LABEL: @concatenate
func.func @concatenate(%arg0 : tensor<5x2xf32>, %arg1 : tensor<5x5xf32>, %arg2 : tensor<5x7xf32>) -> tensor<5x14xf32> {
// CHECK: "tosa.concat"(%arg0, %arg1, %arg2) <{axis = 1 : i64}> : (tensor<5x2xf32>, tensor<5x5xf32>, tensor<5x7xf32>) -> tensor<5x14xf32>
// CHECK: tosa.concat %arg0, %arg1, %arg2 {axis = 1 : i32} : (tensor<5x2xf32>, tensor<5x5xf32>, tensor<5x7xf32>) -> tensor<5x14xf32>
%0 = "stablehlo.concatenate"(%arg0, %arg1, %arg2) {dimension = 1 : i64} : (tensor<5x2xf32>, tensor<5x5xf32>, tensor<5x7xf32>) -> tensor<5x14xf32>
return %0 : tensor<5x14xf32>
}
Expand Down
35 changes: 17 additions & 18 deletions stablehlo/stablehlo/conversions/tosa/tests/unary.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ func.func @exponential(%arg : tensor<10xf32>) -> tensor<10xf32> {
// CHECK-LABEL: @exponential_minus_one
func.func @exponential_minus_one(%arg : tensor<10xf32>) -> tensor<10xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<1.000000e+00>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.exp"(%arg0)
// CHECK-DAG: %[[VAR2:.*]] = "tosa.sub"(%[[VAR1]], %[[VAR0]])
// CHECK-DAG: %[[VAR1:.*]] = tosa.exp %arg0
// CHECK-DAG: %[[VAR2:.*]] = tosa.sub %[[VAR1]], %[[VAR0]]
%0 = "stablehlo.exponential_minus_one"(%arg) : (tensor<10xf32>) -> tensor<10xf32>
return %0 : tensor<10xf32>
}
Expand All @@ -47,9 +47,9 @@ func.func @floor(%arg : tensor<10xf32>) -> tensor<10xf32> {
// CHECK-LABEL: @is_finite
func.func @is_finite(%arg : tensor<10xf32>) -> tensor<10xi1> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<0x7F800000>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.abs"(%arg0)
// CHECK-DAG: %[[VAR2:.*]] = "tosa.equal"(%[[VAR1]], %[[VAR0]])
// CHECK-DAG: %[[VAR3:.*]] = "tosa.logical_not"(%[[VAR2]])
// CHECK-DAG: %[[VAR1:.*]] = tosa.abs %arg0
// CHECK-DAG: %[[VAR2:.*]] = tosa.equal %[[VAR1]], %[[VAR0]]
// CHECK-DAG: %[[VAR3:.*]] = tosa.logical_not %[[VAR2]]
%0 = "stablehlo.is_finite"(%arg) : (tensor<10xf32>) -> tensor<10xi1>
return %0 : tensor<10xi1>
}
Expand All @@ -64,8 +64,8 @@ func.func @log(%arg : tensor<10xf32>) -> tensor<10xf32> {
// CHECK-LABEL: @log_plus_one
func.func @log_plus_one(%arg : tensor<10xf16>) -> tensor<10xf16> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<1.000000e+00>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.add"(%arg0, %[[VAR0]])
// CHECK-DAG: %[[VAR2:.*]] = "tosa.log"(%[[VAR1]])
// CHECK-DAG: %[[VAR1:.*]] = tosa.add %arg0, %[[VAR0]]
// CHECK-DAG: %[[VAR2:.*]] = tosa.log %[[VAR1]]
%0 = "stablehlo.log_plus_one"(%arg) : (tensor<10xf16>) -> tensor<10xf16>
return %0 : tensor<10xf16>
}
Expand All @@ -79,7 +79,7 @@ func.func @negate(%arg : tensor<10xf32>) -> tensor<10xf32> {

// CHECK-LABEL: @slice
func.func @slice(%arg : tensor<4x3xf32>) -> tensor<2x2xf32> {
// CHECK: "tosa.slice"(%arg0) <{size = array<i64: 2, 2>, start = array<i64: 2, 1>}>
// CHECK: tosa.slice %arg0 {size = array<i64: 2, 2>, start = array<i64: 2, 1>}
%0 = "stablehlo.slice"(%arg) {
start_indices = dense<[2, 1]> : tensor<2xi64>,
limit_indices = dense<[4, 3]> : tensor<2xi64>,
Expand Down Expand Up @@ -122,7 +122,7 @@ func.func @tanh(%arg : tensor<10xf32>) -> tensor<10xf32> {
// CHECK-LABEL: @transpose
func.func @transpose(%arg0: tensor<1x2x3xf32>) -> tensor<3x2x1xf32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<[2, 1, 0]> : tensor<3xi64>}> : () -> tensor<3xi64>
// CHECK-DAG: %[[VAR1:.*]] = "tosa.transpose"(%arg0, %[[VAR0]])
// CHECK-DAG: %[[VAR1:.*]] = tosa.transpose %arg0, %[[VAR0]]
%0 = "stablehlo.transpose"(%arg0) {permutation = dense<[2, 1, 0]> : tensor<3xi64>} : (tensor<1x2x3xf32>) -> tensor<3x2x1xf32>
return %0 : tensor<3x2x1xf32>
}
Expand All @@ -131,15 +131,14 @@ func.func @transpose(%arg0: tensor<1x2x3xf32>) -> tensor<3x2x1xf32> {
func.func @while(%arg0: tensor<i32>) -> tensor<i32> {
// CHECK-DAG: %[[VAR0:.*]] = "tosa.const"() <{value = dense<3> : tensor<i32>}
// CHECK-DAG: %[[VAR1:.*]] = "tosa.const"() <{value = dense<1> : tensor<i32>}
// CHECK: %[[VAR2:.*]] = "tosa.while_loop"(%arg0) ({
// CHECK: ^bb0(%[[ARG0:.+]]: tensor<i32>):
// CHECK: %[[VAR3:.*]] = "tosa.equal"(%[[ARG0]], %[[VAR0]])
// CHECK: "tosa.yield"(%[[VAR3]])
// CHECK: }, {
// CHECK: ^bb0(%[[ARG0:.+]]: tensor<i32>):
// CHECK: %[[VAR4:.*]] = "tosa.add"(%[[ARG0]], %[[VAR1]])
// CHECK: "tosa.yield"(%[[VAR4]])
// CHECK: }) : (tensor<i32>) -> tensor<i32>
// CHECK: %[[VAR2:.*]] = tosa.while_loop (%[[ARG1:.+]] = %arg0) : (tensor<i32>) -> tensor<i32> {
// CHECK: %[[VAR3:.*]] = tosa.equal %[[ARG1]], %[[VAR0]]
// CHECK: tosa.yield %[[VAR3]]
// CHECK: } do {
// CHECK: ^bb0(%[[ARG1:.+]]: tensor<i32>):
// CHECK: %[[VAR4:.*]] = tosa.add %[[ARG1]], %[[VAR1]]
// CHECK: tosa.yield %[[VAR4]]
// CHECK: }
// CHECK: return %[[VAR2]] : tensor<i32>
// CHECK: }
%0 = "stablehlo.while"(%arg0) ( {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,13 +335,13 @@ struct ConvertStablehloReduceOp : public OpRewritePattern<stablehlo::ReduceOp> {
reduceOpResult =
rewriter
.create<tosa::ReduceSumOp>(op->getLoc(), innerTy, operand,
rewriter.getI64IntegerAttr(dimension))
rewriter.getI32IntegerAttr(dimension))
.getResult();
} else if (isa<stablehlo::MaxOp>(innerOp)) {
reduceOpResult =
rewriter
.create<tosa::ReduceMaxOp>(op->getLoc(), innerTy, operand,
rewriter.getI64IntegerAttr(dimension))
rewriter.getI32IntegerAttr(dimension))
.getResult();
} else {
return rewriter.notifyMatchFailure(
Expand Down

0 comments on commit e53cb50

Please sign in to comment.