blob: cf34d9dcb481d460235ff59c278c6af903c3e125 [file] [log] [blame]
// RUN: %target-sil-opt -enable-sil-verify-all %s -temp-rvalue-opt | %FileCheck %s
sil_stage canonical
import Builtin
import Swift
/////////////
// Utility //
/////////////
struct GS<Base> {
var _base: Base
var _value: Builtin.Int64
}
class Klass {
@_hasStorage var a: String
}
struct Str {
var kl: Klass
var _value: Builtin.Int64
}
sil @unknown : $@convention(thin) () -> ()
sil @load_string : $@convention(thin) (@in_guaranteed String) -> String
sil @guaranteed_user : $@convention(thin) (@guaranteed Klass) -> ()
sil @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> () {
bb0(%0 : $*Klass):
%9999 = tuple()
return %9999 : $()
}
sil @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass {
bb0(%0 : $*Klass, %1 : $*Klass):
copy_addr %1 to [initialization] %0 : $*Klass
%9999 = tuple()
return %9999 : $()
}
sil @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error)
sil [ossa] @takeStr : $@convention(thin) (@owned Str) -> ()
///////////
// Tests //
///////////
// CHECK-LABEL: sil @rvalue_simple
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>):
// CHECK: [[A1:%.*]] = struct_element_addr %0 : $*GS<B>, #GS._value
// CHECK: [[V1:%.*]] = load [[A1]] : $*Builtin.Int64
// CHECK-NOT: alloc_stack
// CHECK-NOT: copy_addr
// CHECK: [[A2:%.*]] = struct_element_addr %1 : $*GS<B>, #GS._value
// CHECK: [[V2:%.*]] = load [[A2]] : $*Builtin.Int64
// CHECK: %{{.*}} = builtin "cmp_slt_Int64"([[V1]] : $Builtin.Int64, [[V2]] : $Builtin.Int64) : $Builtin.Int1
// CHECK-NOT: destroy_addr
// CHECK-NOT: dealloc_stack
// CHECK: return %{{.*}} : $()
// CHECK-LABEL: } // end sil function 'rvalue_simple'
sil @rvalue_simple : $@convention(thin) <B> (@in GS<B>, @inout GS<B>) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
%2 = struct_element_addr %0 : $*GS<B>, #GS._value
%3 = load %2 : $*Builtin.Int64
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @copy_from_temp
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: builtin
// CHECK-NEXT: copy_addr %1 to [initialization] %0 : $*GS<B>
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @copy_from_temp : $@convention(thin) <B> (@inout GS<B>, @inout GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%8 = builtin "cmp_slt_Int64"(%2 : $Builtin.Int64, %2 : $Builtin.Int64) : $Builtin.Int1
copy_addr %4 to [initialization] %0 : $*GS<B>
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @copy_back_to_src
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>):
// CHECK-NEXT: struct_element_addr %1
// CHECK-NEXT: load
// CHECK-NEXT: builtin
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @copy_back_to_src : $@convention(thin) <B> (@in GS<B>, @inout GS<B>) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
copy_addr %4 to %1 : $*GS<B>
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @take_from_temp
// CHECK: bb0(%0 : $*B, %1 : $*GS<B>):
// CHECK-NEXT: [[STK:%.*]] = alloc_stack
// CHECK-NEXT: copy_addr %1 to [initialization] [[STK]]
// CHECK-NEXT: [[INNER:%.*]] = struct_element_addr
// CHECK-NEXT: copy_addr [take] [[INNER]]
// CHECK-NEXT: dealloc_stack
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @take_from_temp : $@convention(thin) <B> (@inout B, @inout GS<B>) -> () {
bb0(%0 : $*B, %1 : $*GS<B>):
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%7 = struct_element_addr %4 : $*GS<B>, #GS._base
copy_addr [take] %7 to %0: $*B
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// Check that we don't cause a memory lifetime failure with load [take]
// CHECK-LABEL: sil [ossa] @take_from_source
// CHECK: alloc_stack
// CHECK: copy_addr
// CHECK: load
// CHECK: load
// CHECK: } // end sil function 'take_from_source'
sil [ossa] @take_from_source : $@convention(thin) (@in Str) -> @owned Str {
bb0(%0 : $*Str):
%1 = alloc_stack $Str
copy_addr %0 to [initialization] %1 : $*Str
%3 = load [take] %0 : $*Str
%4 = load [copy] %1 : $*Str
%f = function_ref @takeStr : $@convention(thin) (@owned Str) -> ()
%a = apply %f(%4) : $@convention(thin) (@owned Str) -> ()
destroy_addr %1 : $*Str
dealloc_stack %1 : $*Str
return %3 : $Str
}
// CHECK-LABEL: sil @load_in_wrong_block
// CHECK: bb0(%0 : $*GS<B>):
// CHECK-NEXT: alloc_stack
// CHECK-NEXT: copy_addr
// CHECK-NEXT: struct_element_addr
// CHECK-NEXT: br bb1
// CHECK: return
sil @load_in_wrong_block : $@convention(thin) <B> (@in GS<B>) -> () {
bb0(%0 : $*GS<B>):
%4 = alloc_stack $GS<B>
copy_addr %0 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
br bb1
bb1:
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @projection_in_wrong_block
// CHECK: bb0(%0 : $*GS<B>):
// CHECK-NEXT: alloc_stack
// CHECK-NEXT: copy_addr
// CHECK-NEXT: br bb1
// CHECK: return
sil @projection_in_wrong_block : $@convention(thin) <B> (@in GS<B>) -> () {
bb0(%0 : $*GS<B>):
%4 = alloc_stack $GS<B>
copy_addr %0 to [initialization] %4 : $*GS<B>
br bb1
bb1:
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @store_after_load
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1
// CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1
// CHECK-NEXT: load [[A2]]
// CHECK-NEXT: store %2 to [[A1]]
// CHECK-NEXT: builtin
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @store_after_load : $@convention(thin) <B> (@in GS<B>, @inout GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
store %2 to %3 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @store_after_two_loads
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1
// CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1
// CHECK-NEXT: load [[A2]]
// CHECK-NEXT: load [[A2]]
// CHECK-NEXT: store %2 to [[A1]]
// CHECK-NEXT: builtin
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @store_after_two_loads : $@convention(thin) <B> (@in GS<B>, @inout GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
%8 = load %6 : $*Builtin.Int64
store %2 to %3 : $*Builtin.Int64
%9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @store_before_load
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: struct_element_addr %1
// CHECK-NEXT: [[T:%.*]] = alloc_stack
// CHECK-NEXT: copy_addr %1 to [initialization] [[T]]
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
// CHECK-NEXT: store
// CHECK-NEXT: load [[A]]
// CHECK-NEXT: builtin
// CHECK-NEXT: destroy_addr [[T]]
// CHECK-NEXT: dealloc_stack [[T]]
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @store_before_load : $@convention(thin) <B> (@in GS<B>, @inout GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
store %2 to %3 : $*Builtin.Int64
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @store_between_loads
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: struct_element_addr %1
// CHECK-NEXT: [[T:%.*]] = alloc_stack
// CHECK-NEXT: copy_addr %1 to [initialization] [[T]]
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
// CHECK-NEXT: load [[A]]
// CHECK-NEXT: store
// CHECK-NEXT: load [[A]]
// CHECK-NEXT: builtin
// CHECK-NEXT: destroy_addr [[T]]
// CHECK-NEXT: dealloc_stack [[T]]
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @store_between_loads : $@convention(thin) <B> (@in GS<B>, @inout GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%7 = load %6 : $*Builtin.Int64
store %2 to %3 : $*Builtin.Int64
%8 = load %6 : $*Builtin.Int64
%9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil @potential_store_before_load
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
// CHECK-NEXT: struct_element_addr %1
// CHECK-NEXT: [[T:%.*]] = alloc_stack
// CHECK-NEXT: copy_addr %1 to [initialization] [[T]]
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
// CHECK: apply
// CHECK-NEXT: load [[A]]
// CHECK-NEXT: builtin
// CHECK-NEXT: destroy_addr [[T]]
// CHECK-NEXT: dealloc_stack [[T]]
// CHECK-NEXT: tuple
// CHECK-NEXT: return
sil @potential_store_before_load : $@convention(thin) <B> (@in GS<B>, @inout_aliasable GS<B>, Builtin.Int64) -> () {
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
%4 = alloc_stack $GS<B>
copy_addr %1 to [initialization] %4 : $*GS<B>
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
%f = function_ref @unknown : $@convention(thin) () -> ()
%a = apply %f() : $@convention(thin) () -> ()
%7 = load %6 : $*Builtin.Int64
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
destroy_addr %4 : $*GS<B>
dealloc_stack %4 : $*GS<B>
%9999 = tuple()
return %9999 : $()
}
// CHECK-LABEL: sil [ossa] @consider_implicit_aliases_in_callee
// CHECK: alloc_stack
// CHECK-NEXT: copy_addr
// CHECK: } // end sil function 'consider_implicit_aliases_in_callee'
sil [ossa] @consider_implicit_aliases_in_callee : $@convention(thin) (@guaranteed Klass) -> String {
bb0(%0 : @guaranteed $Klass):
%1 = ref_element_addr %0 : $Klass, #Klass.a
%2 = alloc_stack $String
copy_addr %1 to [initialization] %2 : $*String
%f = function_ref @load_string : $@convention(thin) (@in_guaranteed String) -> String
%a = apply %f(%2) : $@convention(thin) (@in_guaranteed String) -> String
destroy_addr %2 : $*String
dealloc_stack %2 : $*String
return %a : $String
}
// Test temp RValue elimination on switches.
// CHECK-LABEL: sil @rvalueSwitch
// CHECK: bb1:
// CHECK-NEXT: struct_element_addr %1
// CHECK-NEXT: load
// CHECK-NOT: alloc_stack $UnfoldSequence
// CHECK: return
sil @rvalueSwitch : $@convention(method) <Element, State> (@inout UnfoldSequence<Element, State>) -> @out Optional<Element> {
bb0(%0 : $*Optional<Element>, %1 : $*UnfoldSequence<Element, State>):
%2 = struct_element_addr %1 : $*UnfoldSequence<Element, State>, #UnfoldSequence._done
%3 = struct_element_addr %2 : $*Bool, #Bool._value
%4 = load %3 : $*Builtin.Int1
cond_br %4, bb4, bb1
bb1:
%6 = alloc_stack $UnfoldSequence<Element, State>
copy_addr %1 to [initialization] %6 : $*UnfoldSequence<Element, State>
%8 = struct_element_addr %6 : $*UnfoldSequence<Element, State>, #UnfoldSequence._next
%9 = load %8 : $*@callee_guaranteed @substituted _0_0, τ_0_1> (@inout τ_0_0) -> @out Optional_0_1> for <State, Element>
%10 = alloc_stack $Optional<Element>
%11 = struct_element_addr %1 : $*UnfoldSequence<Element, State>, #UnfoldSequence._state
strong_retain %9 : $@callee_guaranteed @substituted _0_0, τ_0_1> (@inout τ_0_0) -> @out Optional_0_1> for <State, Element>
%13 = apply %9(%10, %11) : $@callee_guaranteed @substituted _0_0, τ_0_1> (@inout τ_0_0) -> @out Optional_0_1> for <State, Element>
switch_enum_addr %10 : $*Optional<Element>, case #Optional.some!enumelt: bb3, case #Optional.none!enumelt: bb2
bb2:
destroy_addr %10 : $*Optional<Element>
dealloc_stack %10 : $*Optional<Element>
destroy_addr %6 : $*UnfoldSequence<Element, State>
dealloc_stack %6 : $*UnfoldSequence<Element, State>
%19 = integer_literal $Builtin.Int1, -1
%20 = struct $Bool (%19 : $Builtin.Int1)
store %20 to %2 : $*Bool
%22 = alloc_stack $Optional<Element>
inject_enum_addr %22 : $*Optional<Element>, #Optional.none!enumelt
copy_addr [take] %22 to [initialization] %0 : $*Optional<Element>
dealloc_stack %22 : $*Optional<Element>
br bb5
bb3:
%27 = unchecked_take_enum_data_addr %10 : $*Optional<Element>, #Optional.some!enumelt
%28 = init_enum_data_addr %0 : $*Optional<Element>, #Optional.some!enumelt
copy_addr [take] %27 to [initialization] %28 : $*Element
dealloc_stack %10 : $*Optional<Element>
destroy_addr %6 : $*UnfoldSequence<Element, State>
dealloc_stack %6 : $*UnfoldSequence<Element, State>
inject_enum_addr %0 : $*Optional<Element>, #Optional.some!enumelt
br bb5
bb4:
%35 = alloc_stack $Optional<Element>
inject_enum_addr %35 : $*Optional<Element>, #Optional.none!enumelt
copy_addr [take] %35 to [initialization] %0 : $*Optional<Element>
dealloc_stack %35 : $*Optional<Element>
br bb5
bb5:
%40 = tuple ()
return %40 : $()
}
// Make sure that we can eliminate temporaries passed via a temporary rvalue to
// an @in_guaranteed function.
//
// CHECK-LABEL: sil @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () {
// CHECK: bb0([[ARG:%.*]] : $*Klass):
// CHECK-NOT: copy_addr
// CHECK: apply {{%.*}}([[ARG]])
// CHECK-NOT: destroy_addr
// CHECK: } // end sil function 'inguaranteed_no_result'
sil @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () {
bb0(%0 : $*Klass):
%1 = alloc_stack $Klass
copy_addr %0 to [initialization] %1 : $*Klass
%5 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
%6 = apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> ()
destroy_addr %1 : $*Klass
dealloc_stack %1 : $*Klass
%9 = tuple ()
return %9 : $()
}
// CHECK-LABEL: sil @try_apply_argument : $@convention(thin) (@inout Klass) -> () {
// CHECK-NOT: copy_addr
// CHECK: try_apply {{%[0-9]+}}(%0)
// CHECK: } // end sil function 'try_apply_argument'
sil @try_apply_argument : $@convention(thin) (@inout Klass) -> () {
bb0(%0 : $*Klass):
%1 = alloc_stack $Klass
copy_addr %0 to [initialization] %1 : $*Klass
%5 = function_ref @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error)
try_apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error), normal bb1, error bb2
bb1(%r : $()):
br bb3
bb2(%e : $Error):
br bb3
bb3:
destroy_addr %1 : $*Klass
dealloc_stack %1 : $*Klass
%9 = tuple ()
return %9 : $()
}
// CHECK-LABEL: sil @lifetime_beyond_last_use :
// CHECK-NOT: copy_addr
// CHECK: [[V:%[0-9]+]] = load %0
// CHECK: apply {{%[0-9]+}}([[V]])
// CHECK: destroy_addr %0
// CHECK: } // end sil function 'lifetime_beyond_last_use'
sil @lifetime_beyond_last_use : $@convention(thin) (@in Klass) -> () {
bb0(%0 : $*Klass):
%2 = alloc_stack $Klass
copy_addr [take] %0 to [initialization] %2 : $*Klass
%4 = load %2 : $*Klass
%5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Klass) -> ()
%6 = apply %5(%4) : $@convention(thin) (@guaranteed Klass) -> ()
destroy_addr %2 : $*Klass
dealloc_stack %2 : $*Klass
%9 = tuple ()
return %9 : $()
}
// Make sure that we can eliminate temporaries passed via a temporary rvalue to
// an @in_guaranteed function.
//
// CHECK-LABEL: sil @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () {
// CHECK: bb0([[ARG:%.*]] : $*Klass):
// dead temp
// CHECK: [[TMP_OUT:%.*]] = alloc_stack $Klass
// CHECK-NOT: copy_addr
// CHECK: apply {{%.*}}([[TMP_OUT]], [[ARG]])
// CHECK-NOT: copy_addr
// CHECK: destroy_addr [[TMP_OUT]]
// CHECK-NOT: destroy_addr
// CHECK: } // end sil function 'inguaranteed_with_result'
sil @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () {
bb0(%0 : $*Klass):
%1 = alloc_stack $Klass
%1a = alloc_stack $Klass
copy_addr %0 to [initialization] %1 : $*Klass
%5 = function_ref @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass
%6 = apply %5(%1a, %1) : $@convention(thin) (@in_guaranteed Klass) -> @out Klass
destroy_addr %1a : $*Klass
destroy_addr %1 : $*Klass
dealloc_stack %1a : $*Klass
dealloc_stack %1 : $*Klass
%9 = tuple ()
return %9 : $()
}
// CHECK-LABEL: sil @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () {
// CHECK: bb0([[ARG:%.*]] : $*Klass):
// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $Klass
// CHECK-NEXT: copy_addr [[ARG]] to [initialization] [[TMP]]
// CHECK-NEXT: destroy_addr [[ARG]]
// CHECK: apply %{{[0-9]*}}([[TMP]])
// CHECK-NEXT: destroy_addr [[TMP]]
// CHECK-NEXT: dealloc_stack [[TMP]]
// CHECK-NEXT: tuple
// CHECK-NEXT: return
// CHECK-NEXT: } // end sil function 'non_overlapping_lifetime'
sil @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () {
bb0(%0 : $*Klass):
%1a = alloc_stack $Klass
%1 = alloc_stack $Klass
%2 = alloc_stack $Klass
copy_addr %0 to [initialization] %2 : $*Klass
copy_addr [take] %2 to [initialization] %1 : $*Klass
dealloc_stack %2 : $*Klass
copy_addr %1 to [initialization] %1a : $*Klass
destroy_addr %0 : $*Klass
destroy_addr %1 : $*Klass
dealloc_stack %1 : $*Klass
%3 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
apply %3(%1a) : $@convention(thin) (@in_guaranteed Klass) -> ()
destroy_addr %1a : $*Klass
dealloc_stack %1a : $*Klass
%9999 = tuple()
return %9999 : $()
}
sil @$createKlass : $@convention(thin) () -> @out Klass
sil @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
// CHECK-LABEL: sil @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass {
// CHECK: [[S1:%.*]] = alloc_stack $Klass
// CHECK: [[S2:%.*]] = alloc_stack $Klass
// CHECK: copy_addr [[S1]] to [initialization] [[S2]]
// CHECK: apply {{%.*}}([[S2]], [[S1]])
// CHECK: }
sil @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass {
bb0(%0 : $*Klass):
%1 = alloc_stack $Klass
%2 = function_ref @$createKlass : $@convention(thin) () -> @out Klass
%3 = apply %2(%1) : $@convention(thin) () -> @out Klass
%4 = alloc_stack $Klass
copy_addr %1 to [initialization] %4 : $*Klass
%6 = function_ref @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
%7 = apply %6(%4, %1) : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
destroy_addr %4 : $*Klass
dealloc_stack %4 : $*Klass
copy_addr [take] %1 to [initialization] %0 : $*Klass
dealloc_stack %1 : $*Klass
%12 = tuple ()
return %12 : $()
}
protocol P {
func foo()
}
sil @getP : $@convention(thin) () -> @out Optional<P>
// CHECK-LABEL: sil @handle_open_existential_addr : $@convention(thin) () -> () {
// CHECK: [[P:%.*]] = unchecked_take_enum_data_addr
// CHECK-NOT: copy_addr
// CHECK: open_existential_addr immutable_access [[P]]
// CHECK: }
sil @handle_open_existential_addr : $@convention(thin) () -> () {
bb0:
%2 = alloc_stack $Optional<P>
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
cond_br undef, bb1, bb3
bb1:
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
%10 = alloc_stack $P
copy_addr %9 to [initialization] %10 : $*P
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P
%14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
%15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) _0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
destroy_addr %2 : $*Optional<P>
destroy_addr %10 : $*P
dealloc_stack %10 : $*P
dealloc_stack %2 : $*Optional<P>
br bb2
bb2:
%23 = tuple ()
return %23 : $()
bb3:
destroy_addr %2 : $*Optional<P>
dealloc_stack %2 : $*Optional<P>
br bb2
}
// CHECK-LABEL: sil @open_existential_addr_blocks_optimization : $@convention(thin) () -> () {
// CHECK: [[P:%.*]] = alloc_stack $P
// CHECK: copy_addr {{.*}} to [initialization] [[P]]
// CHECK: }
sil @open_existential_addr_blocks_optimization : $@convention(thin) () -> () {
bb0:
%2 = alloc_stack $Optional<P>
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
cond_br undef, bb1, bb3
bb1:
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
%10 = alloc_stack $P
copy_addr %9 to [initialization] %10 : $*P
destroy_addr %2 : $*Optional<P>
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P
%14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
%15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) _0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
destroy_addr %10 : $*P
dealloc_stack %10 : $*P
dealloc_stack %2 : $*Optional<P>
br bb2
bb2:
%23 = tuple ()
return %23 : $()
bb3:
destroy_addr %2 : $*Optional<P>
dealloc_stack %2 : $*Optional<P>
br bb2
}
// CHECK-LABEL: sil @witness_method_blocks_optimization : $@convention(thin) () -> () {
// CHECK: [[P:%.*]] = alloc_stack $P
// CHECK: copy_addr {{.*}} to [initialization] [[P]]
// CHECK: }
sil @witness_method_blocks_optimization : $@convention(thin) () -> () {
bb0:
%2 = alloc_stack $Optional<P>
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
cond_br undef, bb1, bb3
bb1:
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
%10 = alloc_stack $P
copy_addr %9 to [initialization] %10 : $*P
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P
destroy_addr %2 : $*Optional<P>
%14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
%15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) _0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
destroy_addr %10 : $*P
dealloc_stack %10 : $*P
dealloc_stack %2 : $*Optional<P>
br bb2
bb2:
%23 = tuple ()
return %23 : $()
bb3:
destroy_addr %2 : $*Optional<P>
dealloc_stack %2 : $*Optional<P>
br bb2
}
///////////////////////////////////////////////////////////////////////////////
// Test checkTempObjectDestroy
// <rdar://problem/56393491> Use-after free crashing an XCTest.
sil @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
// Do not remove a copy that is released via a load (because
// TempRValueOpt is an address-based optimization does not know how to
// remove releases, and trying to do that would reduce to ARC
// optimization).
// CHECK-LABEL: sil @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () {
// CHECK: bb0(%0 : $*Builtin.NativeObject):
// CHECK: [[STK:%.*]] = alloc_stack $Builtin.NativeObject
// CHECK: copy_addr %0 to [initialization] [[STK]] : $*Builtin.NativeObject
// CHECK: [[VAL:%.*]] = load [[STK]] : $*Builtin.NativeObject
// CHECK: apply %{{.*}}([[VAL]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
// CHECK: release_value [[VAL]] : $Builtin.NativeObject
// CHECK: dealloc_stack [[STK]] : $*Builtin.NativeObject
// CHECK-LABEL: } // end sil function 'copyWithLoadRelease'
sil @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () {
bb0(%0 : $*Builtin.NativeObject):
%stk = alloc_stack $Builtin.NativeObject
copy_addr %0 to [initialization] %stk : $*Builtin.NativeObject
%obj = load %stk : $*Builtin.NativeObject
%f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
%call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
release_value %obj : $Builtin.NativeObject
dealloc_stack %stk : $*Builtin.NativeObject
%v = tuple ()
return %v : $()
}
// CHECK-LABEL: sil @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () {
// CHECK: bb0(%0 : $*Builtin.NativeObject):
// CHECK: [[STK:%.*]] = alloc_stack $Builtin.NativeObject
// CHECK: copy_addr [take] %0 to [initialization] [[STK]] : $*Builtin.NativeObject
// CHECK: [[VAL:%.*]] = load [[STK]] : $*Builtin.NativeObject
// CHECK: apply %{{.*}}([[VAL]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
// CHECK: release_value [[VAL]] : $Builtin.NativeObject
// CHECK-LABEL: } // end sil function 'takeWithLoadRelease'
sil @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () {
bb0(%0 : $*Builtin.NativeObject):
%stk = alloc_stack $Builtin.NativeObject
copy_addr [take] %0 to [initialization] %stk : $*Builtin.NativeObject
%obj = load %stk : $*Builtin.NativeObject
%f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
%call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
release_value %obj : $Builtin.NativeObject
dealloc_stack %stk : $*Builtin.NativeObject
%v = tuple ()
return %v : $()
}
// CHECK-LABEL: sil @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () {
// CHECK-NOT: alloc_stack
// CHECK: fix_lifetime %0
// CHECK-NOT: alloc_stack
// CHECK: } // end sil function 'eliminate_fix_lifetime_on_dest_copyaddr'
sil @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () {
bb0(%0 : $*Klass):
%3 = alloc_stack $Klass
copy_addr %0 to [initialization] %3 : $*Klass
fix_lifetime %3 : $*Klass
destroy_addr %3 : $*Klass
dealloc_stack %3 : $*Klass
%9999 = tuple()
return %9999 : $()
}