blob: 2fa41c62918bbc6844b7166deab516acb56e41c2 [file] [log] [blame]
// RUN: %target-sil-opt -sil-print-all %s | %FileCheck %s
sil_stage raw
import Builtin
import Swift
// Dummy struct mimicking the real `Tensor` type.
struct Tensor<Scalar> {}
sil @attribute_test : $@convention(thin) () -> Tensor<Float> {
bb0:
%0 = graph_op "tf.Dummy"() {int1: i32 -3, int2: i8 4, int3: i64 42} : $Tensor<Float>
%1 = graph_op "tf.Dummy"() {hex1: f64 0x40091EB851EB851F, hex2: f32 0x4048F5C3} : $Tensor<Float>
%2 = graph_op "tf.Dummy"() {float1: f64 3.14, float2: f32 -3.14} : $Tensor<Float>
%3 = graph_op "tf.Dummy"() {string1: "hello", string2: "world"} : $Tensor<Float>
%4 = graph_op "tf.Dummy"() {metatype1: $Float, metatype2: $Tensor<Float>} : $Tensor<Float>
%5 = graph_op "tf.Dummy"() {array: [[i8 1, i32 -2], [f32 -1.0, $Float]]} : $Tensor<Float>
return %0 : $Tensor<Float>
}
// CHECK-LABEL: sil @attribute_test : $@convention(thin) () -> Tensor<Float> {
// CHECK: bb0:
// CHECK-NEXT: %0 = graph_op "tf.Dummy"() {int1: i32 -3, int2: i8 4, int3: i64 42} : $Tensor<Float>
// CHECK-NEXT: %1 = graph_op "tf.Dummy"() {hex1: f64 0x40091EB851EB851F /* 3.1400000000000001 */, hex2: f32 0x4048F5C3 /* 3.1400001 */} : $Tensor<Float>
// CHECK-NEXT: %2 = graph_op "tf.Dummy"() {float1: f64 0x40091EB851EB851F /* 3.1400000000000001 */, float2: f32 0xC048F5C3 /* -3.1400001 */} : $Tensor<Float>
// CHECK-NEXT: %3 = graph_op "tf.Dummy"() {string1: "hello", string2: "world"} : $Tensor<Float>
// CHECK-NEXT: %4 = graph_op "tf.Dummy"() {metatype1: $Float, metatype2: $Tensor<Float>} : $Tensor<Float>
// CHECK-NEXT: %5 = graph_op "tf.Dummy"() {array: {{\[\[}}i8 1, i32 -2], [f32 0xBF800000 /* -1 */, $Float]]} : $Tensor<Float>
// CHECK-NEXT: return %0 : $Tensor<Float>
// CHECK-NEXT: }
sil @chained_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> Tensor<Float> {
bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
%2 = graph_op "tf.Add"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
%3 = graph_op "tf.Mul"(%2 : $Tensor<Float>, %2 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
return %3 : $Tensor<Float>
}
// CHECK-LABEL: sil @chained_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> Tensor<Float> {
// CHECK: bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
// CHECK-NEXT: %2 = graph_op "tf.Add"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
// CHECK-NEXT: %3 = graph_op "tf.Mul"(%2 : $Tensor<Float>, %2 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
// CHECK-NEXT: return %3 : $Tensor<Float>
// CHECK-NEXT: }
sil @single_result_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> Tensor<Float> {
bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
%2 = graph_op "tf.Add"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
return %2 : $Tensor<Float>
}
// CHECK-LABEL: sil @single_result_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> Tensor<Float> {
// CHECK: bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
// CHECK-NEXT: %2 = graph_op "tf.Add"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>
// CHECK-NEXT: return %2 : $Tensor<Float>
// CHECK-NEXT: }
sil @multiple_result_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> (Tensor<Float>, Tensor<Float>) {
bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
(%2, %3) = graph_op "tf.Dummy"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>, $Tensor<Float>
%4 = tuple (%2 : $Tensor<Float>, %3 : $Tensor<Float>)
return %4 : $(Tensor<Float>, Tensor<Float>)
}
// CHECK-LABEL: sil @multiple_result_op_test : $@convention(thin) (Tensor<Float>, Tensor<Float>) -> (Tensor<Float>, Tensor<Float>) {
// CHECK: bb0(%0 : $Tensor<Float>, %1 : $Tensor<Float>):
// CHECK-NEXT: (%2, %3) = graph_op "tf.Dummy"(%0 : $Tensor<Float>, %1 : $Tensor<Float>) {T: $Float} : $Tensor<Float>, $Tensor<Float>
// CHECK-NEXT: %4 = tuple (%2 : $Tensor<Float>, %3 : $Tensor<Float>)
// CHECK-NEXT: return %4 : $(Tensor<Float>, Tensor<Float>)
// CHECK-NEXT: }