| // RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -enable-sil-verify-all -inline -generic-specializer -inline %s > %t.sil |
| // RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -sil-full-demangle -enable-sil-verify-all -specdevirt %t.sil | %FileCheck %s |
| |
| // Check that the @_optimize(none) annotation prevents |
| // any optimizations of the annotated function: |
| // - No functions are inlined into the annotated function. |
| // - No calls are devirtualized in the body of the annotated function. |
| // - No generic specializations of the annotated function are generated. |
| // - The annotated function is not inlined into any other function. |
| |
| sil_stage canonical |
| |
| import Builtin |
| import Swift |
| import SwiftShims |
| |
| public class C { |
| @inline(never) func foo() -> Int32 |
| deinit |
| init() |
| } |
| |
| public class D : C { |
| @inline(never) override func foo() -> Int32 |
| deinit |
| override init() |
| } |
| |
| public class Base { |
| @_optimize(none) func foo() -> Int32 |
| func boo() -> Int32 |
| deinit |
| init() |
| } |
| |
| public class Derived1 : Base { |
| override func foo() -> Int32 |
| @_optimize(none) override func boo() -> Int32 |
| deinit |
| override init() |
| } |
| |
| public class Derived2 : Base { |
| override func foo() -> Int32 |
| override func boo() -> Int32 |
| deinit |
| override init() |
| } |
| |
| public func transform1<T>(x: T) -> Int32 |
| |
| @_optimize(none) public func foo1<T>(x: T, _ c: C) -> Int32 |
| |
| public func boo1(c: C) -> Int32 |
| |
| public func transform2(x: Int32) -> Int32 |
| |
| @_optimize(none) public func foo2(x: Int32, _ c: C) -> Int32 |
| |
| public func boo2(c: C) -> Int32 |
| |
| public func transform3<T>(x: T) -> Int32 |
| |
| public func foo3<T>(x: T, _ c: C) -> Int32 |
| |
| public func boo3(c: C) -> Int32 |
| |
| public func transform4(x: Int32) -> Int32 |
| |
| public func foo4(x: Int32, _ c: C) -> Int32 |
| |
| public func boo4(c: C) -> Int32 |
| |
| |
| sil_global [serialized] @_T0s7ProcessO5_argcs5Int32VvZ : $Int32 |
| |
| |
| sil_global private_external [serialized] @globalinit_33_1BDF70FFC18749BAB495A73B459ED2F0_token5 : $Builtin.Word |
| |
| |
| sil_global [serialized] @_T0s7ProcessO11_unsafeArgvSpySpys4Int8VGGvZ : $UnsafeMutablePointer<UnsafeMutablePointer<Int8>> |
| |
| |
| |
| sil hidden [noinline] @_T014optimize_never1CC3foos5Int32VyF : $@convention(method) (@guaranteed C) -> Int32 { |
| bb0(%0 : $C): |
| %2 = integer_literal $Builtin.Int32, 1 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| |
| sil [transparent] [serialized] @_T0s5Int32V22_builtinIntegerLiteralABBi2048__tcfC : $@convention(thin) (Builtin.Int2048, @thin Int32.Type) -> Int32 |
| |
| |
| sil @_T014optimize_never1CCfD : $@convention(method) (@owned C) -> () { |
| bb0(%0 : $C): |
| %2 = function_ref @_T014optimize_never1CCfd : $@convention(method) (@guaranteed C) -> @owned Builtin.NativeObject |
| %3 = apply %2(%0) : $@convention(method) (@guaranteed C) -> @owned Builtin.NativeObject |
| %4 = unchecked_ref_cast %3 : $Builtin.NativeObject to $C |
| dealloc_ref %4 : $C |
| %6 = tuple () |
| return %6 : $() |
| } |
| |
| |
| sil @_T014optimize_never1CCfd : $@convention(method) (@guaranteed C) -> @owned Builtin.NativeObject { |
| bb0(%0 : $C): |
| %2 = unchecked_ref_cast %0 : $C to $Builtin.NativeObject |
| return %2 : $Builtin.NativeObject |
| } |
| |
| |
| sil hidden @_T014optimize_never1CCACycfc : $@convention(method) (@owned C) -> @owned C { |
| bb0(%0 : $C): |
| return %0 : $C |
| } |
| |
| |
| sil hidden [noinline] @_T014optimize_never1DC3foos5Int32VyF : $@convention(method) (@guaranteed D) -> Int32 { |
| bb0(%0 : $D): |
| %2 = integer_literal $Builtin.Int32, 11 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| |
| sil @_T014optimize_never1DCfD : $@convention(method) (@owned D) -> () { |
| bb0(%0 : $D): |
| %2 = function_ref @_T014optimize_never1DCfd : $@convention(method) (@guaranteed D) -> @owned Builtin.NativeObject |
| %3 = apply %2(%0) : $@convention(method) (@guaranteed D) -> @owned Builtin.NativeObject |
| %4 = unchecked_ref_cast %3 : $Builtin.NativeObject to $D |
| dealloc_ref %4 : $D |
| %6 = tuple () |
| return %6 : $() |
| } |
| |
| |
| sil @_T014optimize_never1DCfd : $@convention(method) (@guaranteed D) -> @owned Builtin.NativeObject { |
| bb0(%0 : $D): |
| %2 = upcast %0 : $D to $C |
| %3 = function_ref @_T014optimize_never1CCfd : $@convention(method) (@guaranteed C) -> @owned Builtin.NativeObject |
| %4 = apply %3(%2) : $@convention(method) (@guaranteed C) -> @owned Builtin.NativeObject |
| return %4 : $Builtin.NativeObject |
| } |
| |
| |
| sil hidden @_T014optimize_never1DCACycfc : $@convention(method) (@owned D) -> @owned D { |
| bb0(%0 : $D): |
| %1 = alloc_stack $D |
| store %0 to %1 : $*D |
| %3 = upcast %0 : $D to $C |
| |
| %4 = function_ref @_T014optimize_never1CCACycfc : $@convention(method) (@owned C) -> @owned C |
| %5 = apply %4(%3) : $@convention(method) (@owned C) -> @owned C |
| %6 = unchecked_ref_cast %5 : $C to $D |
| store %6 to %1 : $*D |
| dealloc_stack %1 : $*D |
| return %6 : $D |
| } |
| |
| |
| sil @_T014optimize_never10transform1ys5Int32VxlF : $@convention(thin) <T> (@in T) -> Int32 { |
| bb0(%0 : $*T): |
| %2 = integer_literal $Builtin.Int32, 2 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| destroy_addr %0 : $*T |
| return %3 : $Int32 |
| } |
| |
| |
| |
| // Check that both calls are not inlined or devirtualized. |
| // CHECK-LABEL: sil [Onone] @_T014optimize_never4foo1ys5Int32Vx_AA1CCtlF |
| // CHECK-NOT: checked_cast_br |
| // CHECK: function_ref @_T014optimize_never10transform1ys5Int32VxlF |
| // CHECK: apply |
| // CHECK-NOT: checked_cast_br |
| // CHECK: class_method {{%.*}} : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| // CHECK: apply |
| // CHECK: return |
| sil [Onone] @_T014optimize_never4foo1ys5Int32Vx_AA1CCtlF : $@convention(thin) <T> (@in T, @owned C) -> Int32 { |
| bb0(%0 : $*T, %1 : $C): |
| %4 = function_ref @_T014optimize_never10transform1ys5Int32VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int32 |
| %5 = alloc_stack $T |
| copy_addr %0 to [initialization] %5 : $*T |
| %7 = apply %4<T>(%5) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int32 |
| %8 = class_method %1 : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| %9 = apply %8(%1) : $@convention(method) (@guaranteed C) -> Int32 |
| %10 = struct_extract %7 : $Int32, #Int32._value |
| %11 = struct_extract %9 : $Int32, #Int32._value |
| %12 = integer_literal $Builtin.Int1, -1 |
| %13 = builtin "sadd_with_overflow_Int32"(%10 : $Builtin.Int32, %11 : $Builtin.Int32, %12 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) |
| %14 = tuple_extract %13 : $(Builtin.Int32, Builtin.Int1), 0 |
| %15 = tuple_extract %13 : $(Builtin.Int32, Builtin.Int1), 1 |
| cond_fail %15 : $Builtin.Int1 |
| %17 = struct $Int32 (%14 : $Builtin.Int32) |
| dealloc_stack %5 : $*T |
| strong_release %1 : $C |
| destroy_addr %0 : $*T |
| return %17 : $Int32 |
| } |
| |
| |
| sil [transparent] [serialized] @_T0s1poiys5Int32VAC_ACtFZ : $@convention(thin) (Int32, Int32) -> Int32 |
| |
| |
| // CHECK-LABEL: sil @_T014optimize_never4boo1ys5Int32VAA1CCF |
| // CHECK-NOT: return |
| // This call should not be inlined, because the callee is annotated with @_optimize(none) |
| // CHECK: function_ref @_T014optimize_never4foo1ys5Int32Vx_AA1CCtlF |
| // CHECK: apply |
| // CHECK: return |
| sil @_T014optimize_never4boo1ys5Int32VAA1CCF : $@convention(thin) (@owned C) -> Int32 { |
| bb0(%0 : $C): |
| %2 = function_ref @_T014optimize_never4foo1ys5Int32Vx_AA1CCtlF : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> Int32 |
| %3 = integer_literal $Builtin.Int32, 1 |
| %4 = struct $Int32 (%3 : $Builtin.Int32) |
| %5 = alloc_stack $Int32 |
| store %4 to %5 : $*Int32 |
| strong_retain %0 : $C |
| %8 = apply %2<Int32>(%5, %0) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> Int32 |
| dealloc_stack %5 : $*Int32 |
| strong_release %0 : $C |
| return %8 : $Int32 |
| } |
| |
| |
| sil [transparent] [serialized] @_T0Si22_builtinIntegerLiteralSiBi2048__tcfC : $@convention(thin) (Builtin.Int2048, @thin Int.Type) -> Int |
| |
| |
| sil @_T014optimize_never10transform2ys5Int32VADF : $@convention(thin) (Int32) -> Int32 { |
| bb0(%0 : $Int32): |
| return %0 : $Int32 |
| } |
| |
| |
| // Check that both calls are not inlined or devirtualized. |
| // CHECK-LABEL: sil [Onone] @_T014optimize_never4foo2ys5Int32VAD_AA1CCtF |
| // CHECK-NOT: checked_cast_br |
| // CHECK: function_ref @_T014optimize_never10transform2ys5Int32VADF |
| // CHECK: apply |
| // CHECK-NOT: checked_cast_br |
| // CHECK: class_method {{%.*}} : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| // CHECK: apply |
| // CHECK: return |
| sil [Onone] @_T014optimize_never4foo2ys5Int32VAD_AA1CCtF : $@convention(thin) (Int32, @owned C) -> Int32 { |
| bb0(%0 : $Int32, %1 : $C): |
| %4 = function_ref @_T014optimize_never10transform2ys5Int32VADF : $@convention(thin) (Int32) -> Int32 |
| %5 = apply %4(%0) : $@convention(thin) (Int32) -> Int32 |
| %6 = class_method %1 : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| %7 = apply %6(%1) : $@convention(method) (@guaranteed C) -> Int32 |
| %8 = struct_extract %5 : $Int32, #Int32._value |
| %9 = struct_extract %7 : $Int32, #Int32._value |
| %10 = integer_literal $Builtin.Int1, -1 |
| %11 = builtin "sadd_with_overflow_Int32"(%8 : $Builtin.Int32, %9 : $Builtin.Int32, %10 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) |
| %12 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 0 |
| %13 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 1 |
| cond_fail %13 : $Builtin.Int1 |
| %15 = struct $Int32 (%12 : $Builtin.Int32) |
| strong_release %1 : $C |
| return %15 : $Int32 |
| } |
| |
| |
| // CHECK-LABEL: sil @_T014optimize_never4boo2ys5Int32VAA1CCF |
| // CHECK-NOT: return |
| // This call should not be inlined, because the callee is annotated with @_optimize(none) |
| // CHECK: function_ref @_T014optimize_never4foo2ys5Int32VAD_AA1CCtF |
| // CHECK: apply |
| // CHECK: return |
| sil @_T014optimize_never4boo2ys5Int32VAA1CCF : $@convention(thin) (@owned C) -> Int32 { |
| bb0(%0 : $C): |
| %2 = function_ref @_T014optimize_never4foo2ys5Int32VAD_AA1CCtF : $@convention(thin) (Int32, @owned C) -> Int32 |
| %3 = integer_literal $Builtin.Int32, 1 |
| %4 = struct $Int32 (%3 : $Builtin.Int32) |
| strong_retain %0 : $C |
| %6 = apply %2(%4, %0) : $@convention(thin) (Int32, @owned C) -> Int32 |
| strong_release %0 : $C |
| return %6 : $Int32 |
| } |
| |
| // Check that no generic specialization was produced for foo1 because it is excluded from optimizations. |
| // CHECK-NOT: generic specialization <Swift.Int> of {{.*}}.foo1 |
| // But foo3 was specialized, because it is not annotated with @_optimize(none) |
| // CHECK: generic specialization <Swift.Int32> of {{.*}}.foo3 |
| |
| |
| sil @_T014optimize_never10transform3ys5Int32VxlF : $@convention(thin) <T> (@in T) -> Int32 { |
| bb0(%0 : $*T): |
| %2 = integer_literal $Builtin.Int32, 2 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| destroy_addr %0 : $*T |
| return %3 : $Int32 |
| } |
| |
| |
| sil @_T014optimize_never4foo3ys5Int32Vx_AA1CCtlF : $@convention(thin) <T> (@in T, @owned C) -> Int32 { |
| bb0(%0 : $*T, %1 : $C): |
| %4 = function_ref @_T014optimize_never10transform3ys5Int32VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int32 |
| %5 = alloc_stack $T |
| copy_addr %0 to [initialization] %5 : $*T |
| %7 = apply %4<T>(%5) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int32 |
| %8 = class_method %1 : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| %9 = apply %8(%1) : $@convention(method) (@guaranteed C) -> Int32 |
| %10 = struct_extract %7 : $Int32, #Int32._value |
| %11 = struct_extract %9 : $Int32, #Int32._value |
| %12 = integer_literal $Builtin.Int1, -1 |
| %13 = builtin "sadd_with_overflow_Int32"(%10 : $Builtin.Int32, %11 : $Builtin.Int32, %12 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) |
| %14 = tuple_extract %13 : $(Builtin.Int32, Builtin.Int1), 0 |
| %15 = tuple_extract %13 : $(Builtin.Int32, Builtin.Int1), 1 |
| cond_fail %15 : $Builtin.Int1 |
| %17 = struct $Int32 (%14 : $Builtin.Int32) |
| dealloc_stack %5 : $*T |
| strong_release %1 : $C |
| destroy_addr %0 : $*T |
| return %17 : $Int32 |
| } |
| |
| |
| // Everything in this function should get devirtualized, inlined and folded together, |
| // because neither this function nor any of its callees are annotated to be |
| // excluded from the optimization. |
| // CHECK-LABEL: sil @_T014optimize_never4boo3ys5Int32VAA1CCF |
| // CHECK: bb0 |
| // CHECK-NOT: function_ref @_T014optimize_never4foo3ys5Int32Vx_AA1CCtlF |
| // Presence of checked_cast_br indicates that the speculative devirtualization |
| // was performed. And this is only possible, if the original call is inlined. |
| // CHECK: checked_cast_br [exact] {{%.*}} : $C to $C, bb2, bb3 |
| // CHECK: return |
| sil @_T014optimize_never4boo3ys5Int32VAA1CCF : $@convention(thin) (@owned C) -> Int32 { |
| bb0(%0 : $C): |
| %2 = function_ref @_T014optimize_never4foo3ys5Int32Vx_AA1CCtlF : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> Int32 |
| %3 = integer_literal $Builtin.Int32, 1 |
| %4 = struct $Int32 (%3 : $Builtin.Int32) |
| %5 = alloc_stack $Int32 |
| store %4 to %5 : $*Int32 |
| strong_retain %0 : $C |
| %8 = apply %2<Int32>(%5, %0) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> Int32 |
| dealloc_stack %5 : $*Int32 |
| strong_release %0 : $C |
| return %8 : $Int32 |
| } |
| |
| |
| sil @_T014optimize_never10transform4ys5Int32VADF : $@convention(thin) (Int32) -> Int32 { |
| bb0(%0 : $Int32): |
| return %0 : $Int32 |
| } |
| |
| |
| sil @_T014optimize_never4foo4ys5Int32VAD_AA1CCtF : $@convention(thin) (Int32, @owned C) -> Int32 { |
| bb0(%0 : $Int32, %1 : $C): |
| %4 = function_ref @_T014optimize_never10transform4ys5Int32VADF : $@convention(thin) (Int32) -> Int32 |
| %5 = apply %4(%0) : $@convention(thin) (Int32) -> Int32 |
| %6 = class_method %1 : $C, #C.foo!1 : (C) -> () -> Int32, $@convention(method) (@guaranteed C) -> Int32 |
| %7 = apply %6(%1) : $@convention(method) (@guaranteed C) -> Int32 |
| %8 = struct_extract %5 : $Int32, #Int32._value |
| %9 = struct_extract %7 : $Int32, #Int32._value |
| %10 = integer_literal $Builtin.Int1, -1 |
| %11 = builtin "sadd_with_overflow_Int32"(%8 : $Builtin.Int32, %9 : $Builtin.Int32, %10 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) |
| %12 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 0 |
| %13 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 1 |
| cond_fail %13 : $Builtin.Int1 |
| %15 = struct $Int32 (%12 : $Builtin.Int32) |
| strong_release %1 : $C |
| return %15 : $Int32 |
| } |
| |
| |
| // Everything in this function should get devirtualized and inlined, |
| // because neither this function nor any of its callees are annotated to be |
| // excluded from the optimization. |
| // CHECK-LABEL: sil @_T014optimize_never4boo4ys5Int32VAA1CCF |
| // CHECK: bb0 |
| // CHECK-NOT: function_ref @_T014optimize_never4foo4ys5Int32VAD_AA1CCtF |
| // Presence of checked_cast_br indicates that the speculative devirtualization |
| // was performed. And this is only possible, if the original call is inlined. |
| // CHECK: checked_cast_br [exact] {{%.*}} : $C to $C, bb2, bb3 |
| // CHECK: return |
| sil @_T014optimize_never4boo4ys5Int32VAA1CCF : $@convention(thin) (@owned C) -> Int32 { |
| bb0(%0 : $C): |
| %2 = function_ref @_T014optimize_never4foo4ys5Int32VAD_AA1CCtF : $@convention(thin) (Int32, @owned C) -> Int32 |
| %3 = integer_literal $Builtin.Int32, 1 |
| %4 = struct $Int32 (%3 : $Builtin.Int32) |
| strong_retain %0 : $C |
| %6 = apply %2(%4, %0) : $@convention(thin) (Int32, @owned C) -> Int32 |
| strong_release %0 : $C |
| return %6 : $Int32 |
| } |
| |
| sil hidden [Onone] @_T014optimize_never4BaseC3foos5Int32VyF : $@convention(method) (@guaranteed Base) -> Int32 { |
| bb0(%0 : $Base): |
| %2 = integer_literal $Builtin.Int32, 1 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| sil hidden @_T014optimize_never4BaseC3boos5Int32VyF : $@convention(method) (@guaranteed Base) -> Int32 { |
| bb0(%0 : $Base): |
| %2 = integer_literal $Builtin.Int32, 2 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| sil hidden @_T014optimize_never8Derived1C3foos5Int32VyF : $@convention(method) (@guaranteed Derived1) -> Int32 { |
| bb0(%0 : $Derived1): |
| %2 = integer_literal $Builtin.Int32, 3 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| sil hidden [Onone] @_T014optimize_never8Derived1C3boos5Int32VyF : $@convention(method) (@guaranteed Derived1) -> Int32 { |
| bb0(%0 : $Derived1): |
| %2 = integer_literal $Builtin.Int32, 4 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| sil hidden @_T014optimize_never8Derived2C3foos5Int32VyF : $@convention(method) (@guaranteed Derived2) -> Int32 { |
| bb0(%0 : $Derived2): |
| %2 = integer_literal $Builtin.Int32, 5 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| sil hidden @_T014optimize_never8Derived2C3boos5Int32VyF : $@convention(method) (@guaranteed Derived2) -> Int32 { |
| bb0(%0 : $Derived2): |
| %2 = integer_literal $Builtin.Int32, 6 |
| %3 = struct $Int32 (%2 : $Builtin.Int32) |
| return %3 : $Int32 |
| } |
| |
| // Check that class_method is not speculatively devirtualized, because the |
| // method foo in the class Base, which mentioned in the class_method |
| // instruction is marked with @_optimize(none). |
| // |
| // CHECK-LABEL: sil @_T014optimize_never19testDoNotDevirtBase1os5Int32VAA0G0C_tF |
| // CHECK: bb0 |
| // CHECK-NOT: checked_cast_br |
| // CHECK: class_method {{%[0-9]+}} : $Base, #Base.foo!1 : (Base) -> () -> Int32 |
| // CHECK-NOT: checked_cast_br |
| // CHECK: return |
| sil @_T014optimize_never19testDoNotDevirtBase1os5Int32VAA0G0C_tF : $@convention(thin) (@owned Base) -> Int32 { |
| bb0(%0 : $Base): |
| %2 = class_method %0 : $Base, #Base.foo!1 : (Base) -> () -> Int32, $@convention(method) (@guaranteed Base) -> Int32 |
| %3 = apply %2(%0) : $@convention(method) (@guaranteed Base) -> Int32 |
| strong_release %0 : $Base |
| return %3 : $Int32 |
| } |
| |
| // Check that Derived1.boo is not devirtualized, because this method |
| // is marked with @_optimize(none) and thus should not |
| // participate in speculative devirtualization. |
| // |
| // CHECK-LABEL: sil @_T014optimize_never23testDoNotDevirtDerived11os5Int32VAA4BaseC_tF |
| // CHECK: bb0 |
| // CHECK: class_method {{%[0-9]+}} : $Base, #Base.boo!1 : (Base) -> () -> Int32 |
| // CHECK: checked_cast_br [exact] {{%[0-9]+}} : $Base to $Base |
| // CHECK: checked_cast_br [exact] {{%[0-9]+}} : $Base to $Derived2 |
| // CHECK-NOT: class_method |
| // CHECK: } |
| sil @_T014optimize_never23testDoNotDevirtDerived11os5Int32VAA4BaseC_tF : $@convention(thin) (@owned Base) -> Int32 { |
| bb0(%0 : $Base): |
| %2 = class_method %0 : $Base, #Base.boo!1 : (Base) -> () -> Int32, $@convention(method) (@guaranteed Base) -> Int32 |
| %3 = apply %2(%0) : $@convention(method) (@guaranteed Base) -> Int32 |
| strong_release %0 : $Base |
| return %3 : $Int32 |
| } |
| |
| sil_vtable C { |
| #C.foo!1: _T014optimize_never1CC3foos5Int32VyF |
| #C.deinit!deallocator: _T014optimize_never1CCfD |
| #C.init!initializer.1: _T014optimize_never1CCACycfc |
| } |
| |
| sil_vtable D { |
| #C.foo!1: _T014optimize_never1DC3foos5Int32VyF |
| #C.init!initializer.1: _T014optimize_never1DCACycfc |
| #D.deinit!deallocator: _T014optimize_never1DCfD |
| } |
| |
| sil_vtable Base { |
| #Base.foo!1: _T014optimize_never4BaseC3foos5Int32VyF |
| #Base.boo!1: _T014optimize_never4BaseC3boos5Int32VyF |
| } |
| |
| sil_vtable Derived1 { |
| #Base.foo!1: _T014optimize_never8Derived1C3foos5Int32VyF |
| #Base.boo!1: _T014optimize_never8Derived1C3boos5Int32VyF |
| } |
| |
| sil_vtable Derived2 { |
| #Base.foo!1: _T014optimize_never8Derived2C3foos5Int32VyF |
| #Base.boo!1: _T014optimize_never8Derived2C3boos5Int32VyF |
| } |
| |