|
1 | 1 | // RUN: %empty-directory(%t)
|
2 |
| -// RUN: %target-swift-frontend -Xllvm -sil-disable-pass=OnoneSimplification -emit-ir %s -o - | %FileCheck %s --check-prefix=CHECK |
| 2 | +// RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_struct.swiftmodule -module-name=resilient_struct %S/../Inputs/resilient_struct.swift |
| 3 | +// RUN: %target-swift-frontend -Xllvm -sil-disable-pass=OnoneSimplification -I %t -emit-ir %s -o - | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-ptrsize |
3 | 4 |
|
4 | 5 | // REQUIRES: concurrency
|
5 | 6 |
|
6 | 7 | import Builtin
|
7 | 8 | import Swift
|
| 9 | +import resilient_struct |
8 | 10 |
|
9 | 11 | // Basic coroutine: a single yield and no results.
|
10 | 12 | //
|
@@ -2191,6 +2193,124 @@ bb0(%x : $*SwiftClassPair):
|
2191 | 2193 | return %t : $()
|
2192 | 2194 | }
|
2193 | 2195 |
|
| 2196 | +// CHECK: define {{.*}} void @test_initial_offset |
| 2197 | +// CHECK: [[T0:%.*]] = call swiftcc %swift.metadata_response @"$s16resilient_struct12ResilientIntVMa" |
| 2198 | +// CHECK: [[MD:%.*]] = extractvalue %swift.metadata_response [[T0]], 0 |
| 2199 | +// CHECK: [[VWT_PTR:%.*]] = getelementptr inbounds ptr, ptr [[MD]], i64 -1 |
| 2200 | +// CHECK: [[VWT:%.*]] = load ptr, ptr [[VWT_PTR]] |
| 2201 | +// CHECK: [[FLAGS_PTR:%.*]] = getelementptr inbounds %swift.vwtable, ptr [[VWT]], i32 0, i32 10 |
| 2202 | +// CHECK: [[FLAGS:%.*]] = load i32, ptr [[FLAGS_PTR]] |
| 2203 | +// CHECK: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64 |
| 2204 | +// CHECK: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255 |
| 2205 | +// CHECK: = xor i64 [[ALIGNMASK]], -1 |
| 2206 | +// CHECK: = add i64 16, [[ALIGNMASK]] |
| 2207 | + |
| 2208 | +sil public_external @closure : $@yield_once @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> (@yields @in ResilientInt) |
| 2209 | +sil @test_initial_offset : $@convention(thin) (@in ResilientInt, @guaranteed SwiftClass) -> () { |
| 2210 | +bb0(%x : $*ResilientInt, %y : $SwiftClass): |
| 2211 | + %f = function_ref @closure : $@yield_once @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> (@yields @in ResilientInt) |
| 2212 | + %p = partial_apply [callee_guaranteed] %f(%x, %y) : $@yield_once @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> (@yields @in ResilientInt) |
| 2213 | + (%yield, %token) = begin_apply %p() : $@yield_once @callee_guaranteed () -> (@yields @in ResilientInt) |
| 2214 | + end_apply %token as $() |
| 2215 | + release_value %p : $@yield_once @callee_guaranteed () -> (@yields @in ResilientInt) |
| 2216 | + %t = tuple() |
| 2217 | + return %t : $() |
| 2218 | +} |
| 2219 | + |
| 2220 | +protocol Proto1 {} |
| 2221 | +protocol Proto2 {} |
| 2222 | +struct EmptyType : Proto1 { } |
| 2223 | + |
| 2224 | +struct SomeType : Proto2 { |
| 2225 | + var d : ResilientInt // some resilient type |
| 2226 | + var x : Int |
| 2227 | +} |
| 2228 | + |
| 2229 | +// CHECK-64-LABEL: define{{.*}} swiftcc void @empty_followed_by_non_fixed(ptr noalias %0) |
| 2230 | +// CHECK-64: [[FLAGS:%.*]] = load i32, ptr |
| 2231 | +// CHECK-64: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64 |
| 2232 | +// CHECK-64: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255 |
| 2233 | +// CHECK-64: [[NOTALIGNMASK:%.*]] = xor i64 [[ALIGNMASK]], -1 |
| 2234 | +// Make sure we take the header offset (16) into account. |
| 2235 | +// CHECK-64: [[TMP:%.*]] = add i64 16, [[ALIGNMASK]] |
| 2236 | +// CHECK-64: [[OFFSET:%.*]] = and i64 [[TMP]], [[NOTALIGNMASK]] |
| 2237 | +// CHECK-64: [[CONTEXT:%.*]] = call noalias ptr @swift_allocObject |
| 2238 | +// CHECK-64: [[GEP:%.*]] = getelementptr inbounds i8, ptr [[CONTEXT]], i64 [[OFFSET]] |
| 2239 | +// CHECK-64: call ptr @"$s18partial_apply_coro8SomeTypeVWOb"(ptr {{.*}}, ptr [[GEP]]) |
| 2240 | + |
| 2241 | +sil @foo : $@yield_once @convention(thin) <T0, T1 where T0 : Proto1, T1 : Proto2> (@in_guaranteed T0, @in_guaranteed T1) -> (@yields @in T0, @yields @in T1) |
| 2242 | +sil @empty_followed_by_non_fixed : $@convention(thin) (EmptyType, @in_guaranteed SomeType) -> () { |
| 2243 | +entry(%0 : $EmptyType, %1: $*SomeType): |
| 2244 | + %5 = alloc_stack $EmptyType |
| 2245 | + store %0 to %5 : $*EmptyType |
| 2246 | + %31 = function_ref @foo : $@yield_once @convention(thin) <T0, T1 where T0 : Proto1, T1 : Proto2> (@in_guaranteed T0, @in_guaranteed T1) -> (@yields @in T0, @yields @in T1) |
| 2247 | + %32 = alloc_stack $EmptyType |
| 2248 | + copy_addr %5 to [init] %32 : $*EmptyType |
| 2249 | + %34 = alloc_stack $SomeType |
| 2250 | + copy_addr %1 to [init] %34 : $*SomeType // id: %35 |
| 2251 | + %36 = partial_apply [callee_guaranteed] %31<EmptyType, SomeType>(%32, %34) : $@yield_once @convention(thin) <T0, T1 where T0 : Proto1, T1 : Proto2> (@in_guaranteed T0, @in_guaranteed T1) -> (@yields @in T0, @yields @in T1) |
| 2252 | + (%yield1, %yield2, %token) = begin_apply %36() : $@yield_once @callee_guaranteed @substituted <T0, T1 where T0 : Proto1, T1 : Proto2> () -> (@yields @in T0, @yields @in T1) for <EmptyType, SomeType> |
| 2253 | + end_apply %token as $() |
| 2254 | + release_value %36: $@yield_once @callee_guaranteed @substituted <T0, T1 where T0 : Proto1, T1 : Proto2> () -> (@yields @in T0, @yields @in T1) for <EmptyType, SomeType> |
| 2255 | + dealloc_stack %34 : $*SomeType |
| 2256 | + dealloc_stack %32 : $*EmptyType |
| 2257 | + dealloc_stack %5 : $*EmptyType |
| 2258 | + %40 = tuple() |
| 2259 | + return %40 : $() |
| 2260 | +} |
| 2261 | + |
| 2262 | +// CHECK-64-LABEL: define{{.*}} swiftcc void @fixed_followed_by_empty_followed_by_non_fixed |
| 2263 | +// CHECK-64-NOT: ret |
| 2264 | +// CHECK-64: [[FLAGS:%.*]] = load i32, ptr |
| 2265 | +// CHECK-64: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64 |
| 2266 | +// CHECK-64: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255 |
| 2267 | +// CHECK-64: [[NOTALIGNMASK:%.*]] = xor i64 [[ALIGNMASK]], -1 |
| 2268 | +// Make sure we compute the correct offset of the non-fixed field. |
| 2269 | +// CHECK-64: [[TMP:%.*]] = add i64 20, [[ALIGNMASK]] |
| 2270 | +// CHECK-64: ret |
| 2271 | + |
| 2272 | +struct FixedType { |
| 2273 | + var f: Int32 |
| 2274 | +} |
| 2275 | +sil @foo2 : $@yield_once @convention(thin) <T0, T1, T2> (@in_guaranteed T0, @in_guaranteed T1, @in_guaranteed T2) -> (@yields @in T0, @out T1, @out T2) |
| 2276 | +sil @fixed_followed_by_empty_followed_by_non_fixed : $@convention(thin) (EmptyType, @in_guaranteed SomeType, FixedType) -> () { |
| 2277 | +entry(%0 : $EmptyType, %1: $*SomeType, %3: $FixedType): |
| 2278 | + %5 = alloc_stack $EmptyType |
| 2279 | + store %0 to %5 : $*EmptyType |
| 2280 | + %7 = alloc_stack $FixedType |
| 2281 | + store %3 to %7 : $*FixedType |
| 2282 | + %31 = function_ref @foo2 : $@yield_once @convention(thin) <T0, T1, T2> (@in_guaranteed T0, @in_guaranteed T1, @in_guaranteed T2) -> (@yields @in T0, @out T1, @out T2) |
| 2283 | + %32 = alloc_stack $EmptyType |
| 2284 | + copy_addr %5 to [init] %32 : $*EmptyType |
| 2285 | + %34 = alloc_stack $SomeType |
| 2286 | + copy_addr %1 to [init] %34 : $*SomeType // id: %35 |
| 2287 | + %36 = partial_apply [callee_guaranteed] %31<FixedType, EmptyType, SomeType>(%7, %32, %34) : $@yield_once @convention(thin) <T0, T1, T2> (@in_guaranteed T0, @in_guaranteed T1, @in_guaranteed T2) -> (@yields @in T0, @out T1, @out T2) |
| 2288 | + release_value %36: $@yield_once @callee_guaranteed @substituted <T0, T1, T2 where T0: Copyable, T0: Escapable, T1: Copyable, T1: Escapable, T2: Copyable, T2: Escapable> () -> (@yields @in T0, @out T1, @out T2) for <FixedType, EmptyType, SomeType> |
| 2289 | + dealloc_stack %34 : $*SomeType |
| 2290 | + dealloc_stack %32 : $*EmptyType |
| 2291 | + dealloc_stack %7 : $*FixedType |
| 2292 | + dealloc_stack %5 : $*EmptyType |
| 2293 | + %40 = tuple() |
| 2294 | + return %40 : $() |
| 2295 | +} |
| 2296 | + |
| 2297 | +// Test that we don't have an alloc object with 0, because that is not allowed |
| 2298 | +// CHECK-LABEL: define{{.*}} swiftcc void @my_test_case |
| 2299 | +// CHECK-NOT: swift_allocObject |
| 2300 | +// CHECK: ret |
| 2301 | +sil @take_empty : $@yield_once @convention(thin) (@in_guaranteed EmptyType) -> (@yields EmptyType) |
| 2302 | +sil @my_test_case : $@convention(thin) () -> () { |
| 2303 | +entry: |
| 2304 | + %5 = alloc_stack $EmptyType |
| 2305 | + // store % to %5 |
| 2306 | + %f = function_ref @take_empty : $@yield_once @convention(thin) (@in_guaranteed EmptyType) -> (@yields EmptyType) |
| 2307 | + %36 = partial_apply [callee_guaranteed] %f(%5) : $@yield_once @convention(thin) (@in_guaranteed EmptyType) -> (@yields EmptyType) |
| 2308 | + release_value %36: $@yield_once @callee_guaranteed () -> (@yields EmptyType) |
| 2309 | + dealloc_stack %5 : $*EmptyType |
| 2310 | + %t = tuple() |
| 2311 | + return %t : $() |
| 2312 | +} |
| 2313 | + |
2194 | 2314 | // CHECK: define {{.*}} { ptr, i64 } @indirect_in_constant_captured_class_pair_param(ptr noalias dereferenceable(32) %0, i64 %1, ptr noalias nocapture dereferenceable(16) %2)
|
2195 | 2315 | sil public @indirect_in_constant_captured_class_pair_param : $@yield_once @convention(thin) (Int, @in_guaranteed SwiftClassPair) -> (@yields Int) {
|
2196 | 2316 | entry(%i : $Int, %ic : $*SwiftClassPair):
|
|
0 commit comments