blob: d9d66a447cc2b9406a2a87d0687c8b9646f04c44 [file] [log] [blame]
// RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -enable-sil-verify-all -eager-specializer %s | %FileCheck %s
// RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -enable-sil-verify-all -eager-specializer -sil-deadfuncelim %s | %FileCheck --check-prefix=CHECK-DEADFUNCELIM %s
// RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -enable-sil-verify-all -eager-specializer %s -o %t.sil && %target-swift-frontend -assume-parsing-unqualified-ownership-sil -module-name=eager_specialize -emit-ir %t.sil | %FileCheck --check-prefix=CHECK-IRGEN %s
sil_stage canonical
import Builtin
import Swift
import SwiftShims
public protocol AnElt {
}
public protocol HasElt {
associatedtype Elt
init(e: Elt)
}
struct X : AnElt {
@sil_stored var i: Int { get set }
init(i: Int)
}
struct S : HasElt {
typealias Elt = X
@sil_stored var e: X { get set }
init(e: Elt)
}
public struct G<Container : HasElt> {
public func getContainer(e: Container.Elt) -> Container
init()
}
// CHECK: @_specialize(exported: false, kind: full, where T == S)
// CHECK: public func getGenericContainer<T>(g: G<T>, e: T.Elt) -> T where T : HasElt, T.Elt : AnElt
@_specialize(where T == S)
public func getGenericContainer<T>(g: G<T>, e: T.Elt) -> T where T.Elt : AnElt
enum ArithmeticError : Error {
case DivByZero
var hashValue: Int { get }
var _code: Int { get }
}
// CHECK: @_specialize(exported: false, kind: full, where T == Int)
// CHECK: public func divideNum<T>(num: T, den: T) throws -> T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral
@_specialize(where T == Int)
public func divideNum<T : SignedInteger & _ExpressibleByBuiltinIntegerLiteral>(num: T, den: T) throws -> T
@inline(never) @_semantics("optimize.sil.never") func foo<T>(t: T) -> Int64
// CHECK: @_specialize(exported: false, kind: full, where T == Int64)
// CHECK: @_specialize(exported: false, kind: full, where T == Float)
// CHECK: public func voidReturn<T>(t: T)
@_specialize(where T == Int64)
@_specialize(where T == Float)
public func voidReturn<T>(t: T)
// CHECK: @_specialize(exported: false, kind: full, where T == Int64)
// CHECK: @_specialize(exported: false, kind: full, where T == Float)
// CHECK: public func nonvoidReturn<T>(t: T) -> Int64
@_specialize(where T == Int64)
@_specialize(where T == Float)
public func nonvoidReturn<T>(t: T) -> Int64
// --- test: protocol conformance, substitution for dependent types
// non-layout dependent generic arguments, emitUncheckedBitCast (non
// address-type)
// Helper
//
// G.getContainer(A.Elt) -> A
sil @_T016eager_specialize1GV12getContainerx3EltQzF : $@convention(method) <Container where Container : HasElt> (@in Container.Elt, G<Container>) -> @out Container {
bb0(%0 : $*Container, %1 : $*Container.Elt, %2 : $G<Container>):
%4 = witness_method $Container, #HasElt.init!allocator.1 : $@convention(witness_method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, @thick τ_0_0.Type) -> @out τ_0_0
%5 = metatype $@thick Container.Type
%6 = apply %4<Container>(%0, %1, %5) : $@convention(witness_method) _0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, @thick τ_0_0.Type) -> @out τ_0_0
%7 = tuple ()
return %7 : $()
}
// getGenericContainer<A where ...> (G<A>, e : A.Elt) -> A
sil [_specialize where T == S] @_T016eager_specialize19getGenericContainerxAA1GVyxG_3EltQz1etAA03HasF0RzAA02AnF0AGRQlF : $@convention(thin) <T where T : HasElt, T.Elt : AnElt> (G<T>, @in T.Elt) -> @out T {
bb0(%0 : $*T, %1 : $G<T>, %2 : $*T.Elt):
// function_ref G.getContainer(A.Elt) -> A
%5 = function_ref @_T016eager_specialize1GV12getContainerx3EltQzF : $@convention(method) _0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G_0_0>) -> @out τ_0_0
%6 = apply %5<T>(%0, %2, %1) : $@convention(method) _0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G_0_0>) -> @out τ_0_0
%7 = tuple ()
return %7 : $()
}
// Specialization getGenericContainer<S, X>
//
// CHECK-LABEL: sil shared @_T016eager_specialize19getGenericContainerxAA1GVyxG_3EltQz1etAA03HasF0RzAA02AnF0AGRQlF4main1SV_Tg5 : $@convention(thin) (G<S>, X) -> S {
// CHECK: bb0(%0 : $G<S>, %1 : $X):
// CHECK: return %{{.*}} : $S
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil @_T016eager_specialize19getGenericContainerxAA1GVyxG_3EltQz1etAA03HasF0RzAA02AnF0AGRQlF : $@convention(thin) <T where T : HasElt, T.Elt : AnElt> (G<T>, @in T.Elt) -> @out T {
// CHECK: bb0(%0 : $*T, %1 : $G<T>, %2 : $*T.Elt):
// CHECK: %3 = metatype $@thick T.Type
// CHECK: %4 = metatype $@thick S.Type
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick T.Type to $Builtin.Word
// CHECK: %6 = unchecked_bitwise_cast %4 : $@thick S.Type to $Builtin.Word
// CHECK: %7 = builtin "cmp_eq_Word"(%5 : $Builtin.Word, %6 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %7, bb3, bb1
// CHECK: bb1:
// CHECK: %9 = function_ref @_T016eager_specialize1GV12getContainerx3EltQzF : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
// CHECK: %10 = apply %9<T>(%0, %2, %1) : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
// CHECK: br bb2
// CHECK: bb2:
// CHECK: %12 = tuple ()
// CHECK: return %12 : $()
// CHECK: bb3:
// CHECK: %14 = unchecked_addr_cast %0 : $*T to $*S
// CHECK: %15 = unchecked_trivial_bit_cast %1 : $G<T> to $G<S>
// CHECK: %16 = unchecked_addr_cast %2 : $*T.Elt to $*X
// CHECK: %17 = load %16 : $*X
// function_ref specialized getGenericContainer<A where ...> (G<A>, e : A.Elt) -> A
// CHECK: %18 = function_ref @_T016eager_specialize19getGenericContainerxAA1GVyxG_3EltQz1etAA03HasF0RzAA02AnF0AGRQlF4main1SV_Tg5 : $@convention(thin) (G<S>, X) -> S
// CHECK: %19 = apply %18(%15, %17) : $@convention(thin) (G<S>, X) -> S
// CHECK: store %19 to %14 : $*S
// CHECK: %21 = tuple ()
// CHECK: %22 = unchecked_trivial_bit_cast %21 : $() to $()
// CHECK: br bb2
// --- test: rethrow
// Helper
//
// static != infix<A where ...> (A, A) -> Bool
sil public_external [serialized] @_T0s2neoiSbx_xts9EquatableRzlFZ : $@convention(thin) <T where T : Equatable> (@in T, @in T) -> Bool {
bb0(%0 : $*T, %1 : $*T):
%4 = witness_method $T, #Equatable."=="!1 : $@convention(witness_method) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool
%5 = metatype $@thick T.Type
%6 = apply %4<T>(%0, %1, %5) : $@convention(witness_method) _0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool
%7 = struct_extract %6 : $Bool, #Bool._value
%8 = integer_literal $Builtin.Int1, -1
%9 = builtin "xor_Int1"(%7 : $Builtin.Int1, %8 : $Builtin.Int1) : $Builtin.Int1
%10 = struct $Bool (%9 : $Builtin.Int1)
return %10 : $Bool
}
// divideNum<A where ...> (A, den : A) throws -> A
sil [_specialize where T == Int] @_T016eager_specialize9divideNumxx_x3dentKs13SignedIntegerRzlF : $@convention(thin) <T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral> (@in T, @in T) -> (@out T, @error Error) {
bb0(%0 : $*T, %1 : $*T, %2 : $*T):
// function_ref static != infix<A where ...> (A, A) -> Bool
%5 = function_ref @_T0s2neoiSbx_xts9EquatableRzlFZ : $@convention(thin) _0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0) -> Bool
%6 = alloc_stack $T
copy_addr %2 to [initialization] %6 : $*T
%8 = witness_method $T, #_ExpressibleByBuiltinIntegerLiteral.init!allocator.1 : $@convention(witness_method) <τ_0_0 where τ_0_0 : _ExpressibleByBuiltinIntegerLiteral> (Builtin.Int2048, @thick τ_0_0.Type) -> @out τ_0_0
%9 = metatype $@thick T.Type
%10 = integer_literal $Builtin.Int2048, 0
%11 = alloc_stack $T
%12 = apply %8<T>(%11, %10, %9) : $@convention(witness_method) _0_0 where τ_0_0 : _ExpressibleByBuiltinIntegerLiteral> (Builtin.Int2048, @thick τ_0_0.Type) -> @out τ_0_0
%13 = apply %5<T>(%6, %11) : $@convention(thin) _0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0) -> Bool
%14 = struct_extract %13 : $Bool, #Bool._value
dealloc_stack %11 : $*T
dealloc_stack %6 : $*T
cond_br %14, bb2, bb1
bb1:
destroy_addr %2 : $*T
destroy_addr %1 : $*T
%24 = alloc_existential_box $Error, $ArithmeticError
%25 = project_existential_box $ArithmeticError in %24 : $Error
%26 = enum $ArithmeticError, #ArithmeticError.DivByZero!enumelt
store %26 to %25 : $*ArithmeticError
throw %24 : $Error
bb2:
%18 = witness_method $T, #BinaryInteger."/"!1 : $@convention(witness_method) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
%19 = apply %18<T>(%0, %1, %2, %9) : $@convention(witness_method) _0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
%20 = tuple ()
return %20 : $()
}
// specialized divideNum<A where ...> (A, den : A) throws -> A
// CHECK-LABEL: sil shared @_T016eager_specialize9divideNumxx_x3dentKs13SignedIntegerRzlFSi_Tg5 : $@convention(thin) (Int, Int) -> (Int, @error Error) {
// CHECK: bb0(%0 : $Int, %1 : $Int):
// CHECK: return %{{.*}}
// CHECK: throw %{{.*}}
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil @_T016eager_specialize9divideNumxx_x3dentKs13SignedIntegerRzlF : $@convention(thin) <T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral> (@in T, @in T) -> (@out T, @error Error) {
// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T):
// CHECK: %3 = metatype $@thick T.Type
// CHECK: %4 = metatype $@thick Int.Type
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick T.Type to $Builtin.Word
// CHECK: %6 = unchecked_bitwise_cast %4 : $@thick Int.Type to $Builtin.Word
// CHECK: %7 = builtin "cmp_eq_Word"(%5 : $Builtin.Word, %6 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %7, bb6, bb1
// CHECK: bb1:
// CHECK: // function_ref static != infix<A>(_:_:)
// CHECK: cond_br %{{.*}}, bb4, bb2
// CHECK: bb2:
// CHECK: br bb3(%{{.*}} : $Error)
// CHECK: bb3(%{{.*}} : $Error):
// CHECK: throw %{{.*}} : $Error
// CHECK: bb4:
// CHECK: %{{.*}} = witness_method $T, #BinaryInteger."/"!1 : {{.*}} : $@convention(witness_method) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
// CHECK: apply %{{.*}}<T>({{.*}}) : $@convention(witness_method) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
// CHECK: br bb5
// CHECK: bb5:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// CHECK: bb6:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*T to $*Int
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*T to $*Int
// CHECK: %{{.*}} = load %{{.*}} : $*Int
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*T to $*Int
// CHECK: %{{.*}} = load %{{.*}} : $*Int
// CHECK: // function_ref specialized divideNum<A>(_:den:)
// CHECK: %{{.*}} = function_ref @_T016eager_specialize9divideNumxx_x3dentKs13SignedIntegerRzlFSi_Tg5 : $@convention(thin) (Int, Int) -> (Int, @error Error)
// CHECK: try_apply %{{.*}}(%{{.*}}, %{{.*}}) : $@convention(thin) (Int, Int) -> (Int, @error Error), normal bb8, error bb7
// CHECK: bb7(%{{.*}} : $Error):
// CHECK: %{{.*}} = builtin "willThrow"(%{{.*}} : $Error) : $()
// CHECK: br bb3(%{{.*}} : $Error)
// CHECK: bb8(%{{.*}} : $Int):
// CHECK: store %{{.*}} to %{{.*}} : $*Int
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb5
// --- test: multiple void and non-void return values
// foo<A> (A) -> Int64
sil hidden [noinline] [_semantics "optimize.sil.never"] @_T016eager_specialize3foos5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// %0 // users: %1, %4
bb0(%0 : $*T):
%2 = integer_literal $Builtin.Int64, 3
%3 = struct $Int64 (%2 : $Builtin.Int64)
destroy_addr %0 : $*T
return %3 : $Int64
}
// voidReturn<A> (A) -> ()
sil [_specialize where T == Float] [_specialize where T == Int64] @_T016eager_specialize10voidReturnyxlF : $@convention(thin) <T> (@in T) -> () {
bb0(%0 : $*T):
// function_ref foo<A> (A) -> Int64
%2 = function_ref @_T016eager_specialize3foos5Int64VxlF : $@convention(thin) _0_0> (@in τ_0_0) -> Int64
%3 = apply %2<T>(%0) : $@convention(thin) _0_0> (@in τ_0_0) -> Int64
%4 = tuple ()
return %4 : $()
}
// CHECK-LABEL: // specialized voidReturn<A>(_:)
// CHECK: sil shared @_T016eager_specialize10voidReturnyxlFSf_Tg5 : $@convention(thin) (Float) -> () {
// %0 // user: %2
// CHECK: bb0(%0 : $Float):
// CHECK: return %5 : $()
// CHECK-LABEL: // specialized voidReturn<A>(_:)
// CHECK: sil shared @_T016eager_specialize10voidReturnyxlFs5Int64V_Tg5 : $@convention(thin) (Int64) -> () {
// CHECK: bb0(%0 : $Int64):
// CHECK: return %5 : $()
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: // voidReturn<A>(_:)
// CHECK: sil @_T016eager_specialize10voidReturnyxlF : $@convention(thin) <T> (@in T) -> () {
// CHECK: bb0(%0 : $*T):
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %5, bb5, bb1
// CHECK: bb1:
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %11, bb4, bb2
// CHECK: bb2:
// CHECK: function_ref @_T016eager_specialize3foos5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: apply %13<T>(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: br bb3
// CHECK: bb3:
// CHECK: tuple ()
// CHECK: return
// CHECK: bb4:
// CHECK: function_ref @_T016eager_specialize10voidReturnyxlFSf_Tg5 : $@convention(thin) (Float) -> ()
// CHECK: br bb3
// CHECK: bb5:
// CHECK: br bb3
// nonvoidReturn<A>(A) -> Int64
sil [_specialize where T == Float] [_specialize where T == Int64] @_T016eager_specialize13nonvoidReturns5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// %0 // users: %1, %3
bb0(%0 : $*T):
// function_ref foo<A>(A) -> Int64
%2 = function_ref @_T016eager_specialize3foos5Int64VxlF : $@convention(thin) _0_0> (@in τ_0_0) -> Int64
%3 = apply %2<T>(%0) : $@convention(thin) _0_0> (@in τ_0_0) -> Int64
return %3 : $Int64
}
// CHECK-LABEL: // specialized nonvoidReturn<A>(_:)
// CHECK: sil shared @_T016eager_specialize13nonvoidReturns5Int64VxlFSf_Tg5 : $@convention(thin) (Float) -> Int64 {
// CHECK: bb0(%0 : $Float):
// CHECK: return %4 : $Int64
// CHECK-LABEL: // specialized nonvoidReturn<A>(_:)
// CHECK: sil shared @_T016eager_specialize13nonvoidReturns5Int64VxlFAD_Tg5 : $@convention(thin) (Int64) -> Int64 {
// CHECK: bb0(%0 : $Int64):
// CHECK: return %4 : $Int64
// CHECK-LABEL: // nonvoidReturn<A>(_:)
// CHECK: sil @_T016eager_specialize13nonvoidReturns5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// CHECK: bb0(%0 : $*T):
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb5, bb1
// CHECK: bb1:
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb4, bb2
// CHECK: bb2:
// CHECK: // function_ref foo<A>(_:)
// CHECK: function_ref @_T016eager_specialize3foos5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: apply %13<T>
// CHECK: br bb3(%{{.*}} : $Int64)
// CHECK: bb3(%{{.*}} : $Int64):
// CHECK: return %{{.*}} : $Int64
// CHECK: bb4:
// CHECK: br bb3(%{{.*}} : $Int64)
// CHECK: bb5:
// CHECK: br bb3(%{{.*}} : $Int64)
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _Trivial(64) and _Trivial(32)
////////////////////////////////////////////////////////////////////
// copyValueAndReturn<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _Trivial(32)] [_specialize where S : _Trivial(64)] @_T016eager_specialize18copyValueAndReturnxx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [initialization] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '_T016eager_specialize18copyValueAndReturnxx_xz1stlF'
// Check specialized for 32 bits
// specialized copyValueAndReturn<A>(A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze31_lItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(32)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: bb0(%0 : $*τ_0_0, %1 : $*τ_0_0, %2 : $*τ_0_0):
// CHECK: copy_addr %2 to [initialization] %0 : $*τ_0_0
// CHECK: destroy_addr %1 : $*τ_0_0
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze31_lItilr_Tp5'
// Check specialized for 64 bits
// specialized copyValueAndReturn<A>(A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze63_lItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(64)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: bb0(%0 : $*τ_0_0, %1 : $*τ_0_0, %2 : $*τ_0_0):
// CHECK: copy_addr %2 to [initialization] %0 : $*τ_0_0
// CHECK: destroy_addr %1 : $*τ_0_0
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze63_lItilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil [noinline] @_T016eager_specialize18copyValueAndReturnxx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S
// Check if size == 8 bytes, i.e. 64 444its
// CHECK: %3 = metatype $@thick S.Type
// CHECK: %4 = builtin "sizeof"<S>(%3 : $@thick S.Type) : $Builtin.Word
// CHECK: %5 = integer_literal $Builtin.Word, 8
// CHECK: %6 = builtin "cmp_eq_Word"(%4 : $Builtin.Word, %5 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %6, bb6, bb1
// Check if size == 4 bytes, i.32 2 2 2 bits
// CHECK: bb1:
// CHECK: %8 = metatype $@thick S.Type
// CHECK: %9 = builtin "sizeof"<S>(%8 : $@thick S.Type) : $Builtin.Word
// CHECK: %10 = integer_literal $Builtin.Word, 4
// CHECK: %11 = builtin "cmp_eq_Word"(%9 : $Builtin.Word, %10 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %11, bb4, bb2
// None of the constraint checks was successful, perform a generic copy.
// CHECK: bb2:
// CHECK: copy_addr %2 to [initialization] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb3
// CHECK: bb3:
// CHECK: %16 = tuple ()
// CHECK: return %16 : $()
// Check if it is a trivial type
// CHECK: bb4:
// CHECK: %18 = builtin "ispod"<S>(%8 : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %18, bb5, bb2
// Invoke the specialized function for 32 bits
// CHECK: bb5:
// CHECK: %20 = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %21 = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %22 = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn<A> (A, s : inout A) -> A
// CHECK: %23 = function_ref @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze31_lItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(32)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %24 = apply %23<S>(%20, %21, %22) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(32)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %25 = tuple ()
// CHECK: %26 = unchecked_trivial_bit_cast %25 : $() to $()
// CHECK: br bb3
// Check if it is a trivial type
// CHECK: bb6:
// CHECK: %28 = builtin "ispod"<S>(%3 : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %28, bb7, bb1
// Invoke the specialized function for 64 bits
// CHECK: bb7:
// CHECK: %30 = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %31 = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %32 = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn<A> (A, s : inout A) -> A
// CHECK: %33 = function_ref @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze63_lItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(64)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %34 = apply %33<S>(%30, %31, %32) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(64)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %35 = tuple ()
// CHECK: %36 = unchecked_trivial_bit_cast %35 : $() to $()
// CHECK: br bb3
// CHECK: } // end sil function '_T016eager_specialize18copyValueAndReturnxx_xz1stlF'
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _Trivial
////////////////////////////////////////////////////////////////////
// copyValueAndReturn2<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _Trivial] @_T016eager_specialize19copyValueAndReturn2xx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [initialization] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '_T016eager_specialize19copyValueAndReturn2xx_xz1stlF'
// Check the specialization for _Trivial
// specialized copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @_T016eager_specialize19copyValueAndReturn2xx_xz1stlFxxxRlzTlItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: bb0(%0 : $*τ_0_0, %1 : $*τ_0_0, %2 : $*τ_0_0):
// CHECK: copy_addr %2 to [initialization] %0 : $*τ_0_0
// CHECK: destroy_addr %1 : $*τ_0_0
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '_T016eager_specialize19copyValueAndReturn2xx_xz1stlFxxxRlzTlItilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK-LABEL: sil [noinline] @_T016eager_specialize19copyValueAndReturn2xx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: %3 = metatype $@thick S.Type
// CHECK: %4 = builtin "ispod"<S>(%3 : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %4, bb3, bb1
// None of the constraint checks was successful, perform a generic copy.
// CHECK: bb1:
// CHECK: copy_addr %2 to [initialization] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb2
// CHECK: bb2:
// CHECK: %9 = tuple ()
// CHECK: return %9 : $()
// Invoke the specialized function for trivial types
// CHECK: bb3:
// CHECK: %11 = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %12 = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %13 = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK: %14 = function_ref @_T016eager_specialize19copyValueAndReturn2xx_xz1stlFxxxRlzTlItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %15 = apply %14<S>(%11, %12, %13) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %16 = tuple ()
// CHECK: %17 = unchecked_trivial_bit_cast %16 : $() to $()
// CHECK: br bb2
// CHECK: } // end sil function '_T016eager_specialize19copyValueAndReturn2xx_xz1stlF'
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _RefCountedObject
////////////////////////////////////////////////////////////////////
// copyValueAndReturn3<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _RefCountedObject] @_T016eager_specialize19copyValueAndReturn3xx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [initialization] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '_T016eager_specialize19copyValueAndReturn3xx_xz1stlF'
// Check for specialized function for _RefCountedObject
// specialized copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @_T016eager_specialize19copyValueAndReturn3xx_xz1stlFxxxRlzRlItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _RefCountedObject> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: bb0(%0 : $*τ_0_0, %1 : $*τ_0_0, %2 : $*τ_0_0):
// CHECK: copy_addr %2 to [initialization] %0 : $*τ_0_0
// CHECK: destroy_addr %1 : $*τ_0_0
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '_T016eager_specialize19copyValueAndReturn3xx_xz1stlFxxxRlzRlItilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK-LABEL: sil [noinline] @_T016eager_specialize19copyValueAndReturn3xx_xz1stlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
// Check if can be a class
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: %3 = metatype $@thick S.Type
// CHECK: %4 = builtin "canBeClass"<S>(%3 : $@thick S.Type) : $Builtin.Int8
// CHECK: %5 = integer_literal $Builtin.Int8, 1
// CHECK: %6 = builtin "cmp_eq_Int8"(%4 : $Builtin.Int8, %5 : $Builtin.Int8) : $Builtin.Int1
// True if it is a Swift class
// CHECK: cond_br %6, bb3, bb4
// CHECK: bb1:
// CHECK: copy_addr %2 to [initialization] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb2
// CHECK: bb2:
// CHECK: %11 = tuple ()
// CHECK: return %11 : $()
// Invoke the specialized function for ref-conted objects
// CHECK: bb3:
// CHECK: %13 = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %14 = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %15 = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK: %16 = function_ref @_T016eager_specialize19copyValueAndReturn3xx_xz1stlFxxxRlzRlItilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _RefCountedObject> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %17 = apply %16<S>(%13, %14, %15) : $@convention(thin) <τ_0_0 where τ_0_0 : _RefCountedObject> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %18 = tuple ()
// CHECK: %19 = unchecked_trivial_bit_cast %18 : $() to $()
// CHECK: br bb2
// Check if the object could be of a class or objc existential type
// CHECK: bb4:
// CHECK: %21 = integer_literal $Builtin.Int8, 2
// CHECK: %22 = builtin "cmp_eq_Int8"(%4 : $Builtin.Int8, %21 : $Builtin.Int8) : $Builtin.Int1
// CHECK: cond_br %22, bb5, bb1
// CHECK: bb5:
// function_ref _swift_isClassOrObjCExistentialType
// CHECK: %24 = function_ref @_swift_isClassOrObjCExistentialType : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> Bool
// CHECK: %25 = apply %24<S>(%3) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> Bool
// CHECK: %26 = struct_extract %25 : $Bool, #Bool._value
// CHECK: cond_br %26, bb3, bb1
// CHECK: } // end sil function '_T016eager_specialize19copyValueAndReturn3xx_xz1stlF'
////////////////////////////////////////////////////////////////////
// Check the ability to produce exported specializations, which can
// be referenced from other object files.
////////////////////////////////////////////////////////////////////
// exportSpecializations<A> (A) -> ()
sil [_specialize exported: true, where T == Int64] @_T016eager_specialize21exportSpecializationsyxlF : $@convention(thin) <T> (@in T) -> () {
bb0(%0 : $*T):
destroy_addr %0 : $*T
%3 = tuple ()
return %3 : $()
} // end sil function '_T016eager_specialize21exportSpecializationsyxlF'
// Check that a public specialization for Int64 was produced.
// specialized exportSpecializations<A> (A) -> ()
// CHECK-DEADFUNCELIM-LABEL: sil @_T016eager_specialize21exportSpecializationsyxlFs5Int64V_Tg5 : $@convention(thin) (Int64) -> ()
////////////////////////////////////////////////////////////////////
// Check the ability to produce explicit partial specializations.
////////////////////////////////////////////////////////////////////
// checkExplicitPartialSpecialization<A, B> (A, B) -> ()
sil [_specialize kind: partial, where T == Int64] @_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lF : $@convention(thin) <T, S> (@in T, @in S) -> () {
bb0(%0 : $*T, %1 : $*S):
destroy_addr %1 : $*S
destroy_addr %0 : $*T
%6 = tuple ()
return %6 : $()
} // end sil function '_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lF'
// Check for specialized function for τ_0_0 == Int64
// specialized checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK-LABEL: sil shared @_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lFs5Int64Vq_ADRszr0_lItyi_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 == Int64> (Int64, @in τ_0_1) -> ()
// CHECK: bb0(%0 : $Int64, %1 : $*τ_0_1):
// CHECK: %2 = alloc_stack $Int64
// CHECK: store %0 to %2 : $*Int64
// CHECK: destroy_addr %1 : $*τ_0_1
// CHECK: destroy_addr %2 : $*Int64
// CHECK: %6 = tuple ()
// CHECK: dealloc_stack %2 : $*Int64
// CHECK: return %6 : $()
// CHECK: } // end sil function '_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lFs5Int64Vq_ADRszr0_lItyi_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK-LABEL: sil @_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lF : $@convention(thin) <T, S> (@in T, @in S) -> ()
// CHECK: bb0(%0 : $*T, %1 : $*S):
// CHECK: %2 = metatype $@thick T.Type
// CHECK: %3 = metatype $@thick Int64.Type
// CHECK: %4 = unchecked_bitwise_cast %2 : $@thick T.Type to $Builtin.Word
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick Int64.Type to $Builtin.Word
// CHECK: %6 = builtin "cmp_eq_Word"(%4 : $Builtin.Word, %5 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %6, bb3, bb1
// Type dispatch was not successful.
// CHECK: bb1:
// CHECK: destroy_addr %1 : $*S
// CHECK: destroy_addr %0 : $*T
// CHECK: br bb2
// CHECK: bb2:
// CHECK: %11 = tuple ()
// CHECK: return %11 : $()
// Invoke a partially specialized function.
// CHECK: bb3:
// CHECK: %13 = unchecked_addr_cast %0 : $*T to $*Int64
// CHECK: %14 = load %13 : $*Int64
// CHECK: %15 = unchecked_addr_cast %1 : $*S to $*S
// function_ref specialized checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK: %16 = function_ref @_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lFs5Int64Vq_ADRszr0_lItyi_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 == Int64> (Int64, @in τ_0_1) -> ()
// CHECK: %17 = apply %16<S>(%14, %15) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 == Int64> (Int64, @in τ_0_1) -> ()
// CHECK: %18 = tuple ()
// CHECK: %19 = unchecked_trivial_bit_cast %18 : $() to $()
// CHECK: br bb2
// CHECK: } // end sil function '_T016eager_specialize34checkExplicitPartialSpecializationyx_q_tr0_lF'
/////////////////////////////////////////////////////////////////////////
// Check that functions with unreachable instructions can be specialized.
/////////////////////////////////////////////////////////////////////////
protocol P {
}
struct T : P {
init()
}
extension P {
public static func f(_ x: Self) -> Self
}
sil @error : $@convention(thin) () -> Never
// CHECK-LABEL: sil @_T016eager_specialize1PPAAE1fxxFZ : $@convention(method) <Self where Self : P> (@in Self, @thick Self.Type) -> @out Self
// CHECK: %3 = metatype $@thick Self.Type
// CHECK: %4 = metatype $@thick T.Type
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick Self.Type to $Builtin.Word
// CHECK: %6 = unchecked_bitwise_cast %4 : $@thick T.Type to $Builtin.Word
// CHECK: %7 = builtin "cmp_eq_Word"(%5 : $Builtin.Word, %6 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %7, bb2, bb1
// CHECK: bb1:
// CHECK: %9 = function_ref @error : $@convention(thin) () -> Never
// CHECK: %10 = apply %9() : $@convention(thin) () -> Never
// CHECK: unreachable
// CHECK: bb2:
// CHECK: %12 = unchecked_addr_cast %0 : $*Self to $*T
// CHECK: %13 = unchecked_addr_cast %1 : $*Self to $*T
// CHECK: %14 = load %13 : $*T
// CHECK: %15 = unchecked_trivial_bit_cast %2 : $@thick Self.Type to $@thick T.Type
// CHECK: %16 = function_ref @_T016eager_specialize1PPAAE1fxxFZ4main1TV_Tg5 : $@convention(method) (T, @thick T.Type) -> T
// CHECK: %17 = apply %16(%14, %15) : $@convention(method) (T, @thick T.Type) -> T
// CHECK: store %17 to %12 : $*T
// CHECK: %19 = tuple ()
// CHECK: unreachable
// CHECK: } // end sil function '_T016eager_specialize1PPAAE1fxxFZ'
sil [_specialize exported: false, kind: full, where Self == T] @_T016eager_specialize1PPAAE1fxxFZ : $@convention(method) <Self where Self : P> (@in Self, @thick Self.Type) -> @out Self {
bb0(%0 : $*Self, %1 : $*Self, %2 : $@thick Self.Type):
// function_ref error
%5 = function_ref @error : $@convention(thin) () -> Never
%6 = apply %5() : $@convention(thin) () -> Never
unreachable
} // end sil function '_T016eager_specialize1PPAAE1fxxFZ'
////////////////////////////////////////////////////////////////////
// Check that IRGen generates efficient code for fixed-size Trivial
// constraints.
////////////////////////////////////////////////////////////////////
// Check that a specialization for _Trivial(32) uses direct loads and stores
// instead of value witness functions to load and store the value of a generic type.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze31_lItilr_Tp5(i32* noalias nocapture sret, i32* noalias nocapture dereferenceable(4), i32* nocapture dereferenceable(4), %swift.type* %"\CF\84_0_0")
// CHECK-IRGEN: entry:
// CHECK-IRGEN-NEXT: %3 = load i32, i32* %2
// CHECK-IRGEN-NEXT: store i32 %3, i32* %0
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT:}
// Check that a specialization for _Trivial(64) uses direct loads and stores
// instead of value witness functions to load and store the value of a generic type.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @_T016eager_specialize18copyValueAndReturnxx_xz1stlFxxxRlze63_lItilr_Tp5(i64* noalias nocapture sret, i64* noalias nocapture dereferenceable(8), i64* nocapture dereferenceable(8), %swift.type* %"\CF\84_0_0")
// CHECK-IRGEN: entry:
// CHECK-IRGEN-NEXT: %3 = load i64, i64* %2
// CHECK-IRGEN-NEXT: store i64 %3, i64* %0
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT: }
// Check that a specialization for _Trivial does not call the 'destroy' value witness,
// because it is known that the object is Trivial, i.e. contains no references.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @_T016eager_specialize19copyValueAndReturn2xx_xz1stlFxxxRlzTlItilr_Tp5(%swift.opaque* noalias nocapture sret, %swift.opaque* noalias nocapture, %swift.opaque* nocapture, %swift.type* %"\CF\84_0_0")
// CHECK-IRGEN-NEXT: entry:
// CHECK-IRGEN-NEXT: %3 = bitcast %swift.type* %"\CF\84_0_0" to i8***
// CHECK-IRGEN-NEXT: %4 = getelementptr inbounds i8**, i8*** %3, i{{.*}} -1
// CHECK-IRGEN-NEXT: %"\CF\84_0_0.valueWitnesses" = load i8**, i8*** %4
// CHECK-IRGEN-NEXT: %5 = getelementptr inbounds i8*, i8** %"\CF\84_0_0.valueWitnesses"
// CHECK-IRGEN-NEXT: %6 = load i8*, i8** %5
// CHECK-IRGEN-NEXT: %initializeWithCopy = {{.*}}
// CHECK-IRGEN-NEXT: %7 = call {{.*}} %initializeWithCopy
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT: }
// Check that a specialization for _RefCountedObject just copies the fixed-size reference,
// and call retain/release directly, instead of calling the value witness functions.
// The matching patterns in this test are rather non-precise to cover both objc and non-objc platforms.
// CHECK-IRGEN-LABEL: define{{.*}}@_T016eager_specialize19copyValueAndReturn3xx_xz1stlFxxxRlzRlItilr_Tp5
// CHECK-IRGEN: entry:
// CHECK-IRGEN-NOT: ret void
// CHECK-IRGEN: call {{.*}}etain
// CHECK-IRGEN-NOT: ret void
// CHECK-IRGEN: call {{.*}}elease
// CHECK-IRGEN: ret void
////////////////////////////////////////////////////////////////////
// Check that try_apply instructions are handled correctly by the
// eager specializer.
////////////////////////////////////////////////////////////////////
protocol ThrowingP {
func action() throws -> Int64
}
extension Int64 : ThrowingP {
public func action() throws -> Int64
}
class ClassUsingThrowingP {
required init()
@_specialize(exported: false, kind: full, where T == Int64)
public static func f<T>(_: T) throws -> Self where T : ThrowingP
@_specialize(exported: false, kind: full, where T == Int64)
public static func g<T>(_ t: T) throws -> Int64 where T : ThrowingP
deinit
}
// Int64.action()
sil @_T0s5Int64V34eager_specialize_throwing_functionE6actionAByKF : $@convention(method) (Int64) -> (Int64, @error Error)
// protocol witness for ThrowingP.action() in conformance Int64
sil @_T0s5Int64V34eager_specialize_throwing_function9ThrowingPA2cDP6actionAByKFTW : $@convention(witness_method) (@in_guaranteed Int64) -> (Int64, @error Error)
sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPCACycfc : $@convention(method) (@owned ClassUsingThrowingP) -> @owned ClassUsingThrowingP
sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPCfd : $@convention(method) (@guaranteed ClassUsingThrowingP) -> @owned Builtin.NativeObject
// ClassUsingThrowingP.__allocating_init()
sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPCACycfC : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
// ClassUsingThrowingP.__deallocating_deinit
sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPCfD : $@convention(method) (@owned ClassUsingThrowingP) -> ()
// f is a function that may throw according to its type, but does not actually do it.
// Check that this function is properly specialized by the eager specializer.
// It should dispatch to its specialized version, but use apply [nothrow] to invoke
// the specialized version.
// CHECK-LABEL: sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1fACXDxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error Error)
// CHECK: [[SPECIALIZED:%.*]] = function_ref @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1fACXDxKAA0G1PRzlFZs5Int64V_Tg5 : $@convention(method) (Int64, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error Error)
// CHECK: apply [nothrow] [[SPECIALIZED]]
// CHECK: // end sil function '_T034eager_specialize_throwing_function19ClassUsingThrowingPC1fACXDxKAA0G1PRzlFZ'
// static ClassUsingThrowingP.f<A>(_:)
sil [_specialize exported: false, kind: full, where T == Int64] @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1fACXDxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error Error) {
bb0(%0 : $*T, %1 : $@thick ClassUsingThrowingP.Type):
destroy_addr %0 : $*T
%4 = unchecked_trivial_bit_cast %1 : $@thick ClassUsingThrowingP.Type to $@thick @dynamic_self ClassUsingThrowingP.Type
// function_ref ClassUsingThrowingP.__allocating_init()
%7 = function_ref @_T034eager_specialize_throwing_function19ClassUsingThrowingPCACycfC : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
%8 = upcast %4 : $@thick @dynamic_self ClassUsingThrowingP.Type to $@thick ClassUsingThrowingP.Type
%9 = apply %7(%8) : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
%10 = unchecked_ref_cast %9 : $ClassUsingThrowingP to $ClassUsingThrowingP
return %10 : $ClassUsingThrowingP
} // end sil function '_T034eager_specialize_throwing_function19ClassUsingThrowingPC1fACXDxKAA0G1PRzlFZ'
// g is a function that may throw according to its type and has a try_apply inisde
// its body.
// Check that this function is properly specialized by the eager specializer.
// It should dispatch to its specialized version and use try_apply to invoke
// the specialized version.
// CHECK-LABEL: sil @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1gs5Int64VxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (Int64, @error Error)
// CHECK: [[SPECIALIZED:%.*]] = function_ref @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1gs5Int64VxKAA0G1PRzlFZAF_Tg5 : $@convention(method) (Int64, @thick ClassUsingThrowingP.Type) -> (Int64, @error Error)
// CHECK: try_apply [[SPECIALIZED]]
// CHECK: // end sil function '_T034eager_specialize_throwing_function19ClassUsingThrowingPC1gs5Int64VxKAA0G1PRzlFZ'
// static ClassUsingThrowingP.g<A>(_:)
sil [_specialize exported: false, kind: full, where T == Int64] @_T034eager_specialize_throwing_function19ClassUsingThrowingPC1gs5Int64VxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (Int64, @error Error) {
bb0(%0 : $*T, %1 : $@thick ClassUsingThrowingP.Type):
%5 = witness_method $T, #ThrowingP.action!1 : <Self where Self : ThrowingP> (Self) -> () throws -> Int64 : $@convention(witness_method) <τ_0_0 where τ_0_0 : ThrowingP> (@in_guaranteed τ_0_0) -> (Int64, @error Error)
try_apply %5<T>(%0) : $@convention(witness_method) _0_0 where τ_0_0 : ThrowingP> (@in_guaranteed τ_0_0) -> (Int64, @error Error), normal bb1, error bb2
bb1(%7 : $Int64): // Preds: bb0
destroy_addr %0 : $*T
return %7 : $Int64
bb2(%10 : $Error): // Preds: bb0
destroy_addr %0 : $*T
throw %10 : $Error
} // end sil function '_T034eager_specialize_throwing_function19ClassUsingThrowingPC1gs5Int64VxKAA0G1PRzlFZ'
sil_vtable ClassUsingThrowingP {
#ClassUsingThrowingP.init!allocator.1: (ClassUsingThrowingP.Type) -> () -> ClassUsingThrowingP : _T034eager_specialize_throwing_function19ClassUsingThrowingPCACycfC // ClassUsingThrowingP.__allocating_init()
#ClassUsingThrowingP.init!initializer.1: (ClassUsingThrowingP.Type) -> () -> ClassUsingThrowingP : _T034eager_specialize_throwing_function19ClassUsingThrowingPCACycfc // ClassUsingThrowingP.init()
#ClassUsingThrowingP.deinit!deallocator: _T034eager_specialize_throwing_function19ClassUsingThrowingPCfD // ClassUsingThrowingP.__deallocating_deinit
}
sil_witness_table hidden Int64: ThrowingP module eager_specialize_throwing_function {
method #ThrowingP.action!1: <Self where Self : ThrowingP> (Self) -> () throws -> Int64 : @_T0s5Int64V34eager_specialize_throwing_function9ThrowingPA2cDP6actionAByKFTW // protocol witness for ThrowingP.action() in conformance Int64
}
sil_default_witness_table hidden ThrowingP {
no_default
}