|
| 1 | +; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| 2 | +; RUN: llc < %s -mtriple=riscv64 -mattr=+v | FileCheck %s |
| 3 | + |
| 4 | +declare i32 @llvm.experimental.constrained.fptosi.i32.f64(double, metadata) |
| 5 | +declare void @g() |
| 6 | + |
| 7 | +define void @f(ptr %m, ptr %n, ptr %p, ptr %q, ptr %r, ptr %s, double %t) { |
| 8 | +; CHECK-LABEL: f: |
| 9 | +; CHECK: # %bb.0: |
| 10 | +; CHECK-NEXT: addi sp, sp, -48 |
| 11 | +; CHECK-NEXT: .cfi_def_cfa_offset 48 |
| 12 | +; CHECK-NEXT: sd ra, 40(sp) # 8-byte Folded Spill |
| 13 | +; CHECK-NEXT: sd s0, 32(sp) # 8-byte Folded Spill |
| 14 | +; CHECK-NEXT: sd s1, 24(sp) # 8-byte Folded Spill |
| 15 | +; CHECK-NEXT: sd s2, 16(sp) # 8-byte Folded Spill |
| 16 | +; CHECK-NEXT: .cfi_offset ra, -8 |
| 17 | +; CHECK-NEXT: .cfi_offset s0, -16 |
| 18 | +; CHECK-NEXT: .cfi_offset s1, -24 |
| 19 | +; CHECK-NEXT: .cfi_offset s2, -32 |
| 20 | +; CHECK-NEXT: csrr a6, vlenb |
| 21 | +; CHECK-NEXT: sub sp, sp, a6 |
| 22 | +; CHECK-NEXT: .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x30, 0x22, 0x11, 0x01, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 48 + 1 * vlenb |
| 23 | +; CHECK-NEXT: mv s0, a5 |
| 24 | +; CHECK-NEXT: mv s1, a4 |
| 25 | +; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma |
| 26 | +; CHECK-NEXT: vle64.v v8, (a0) |
| 27 | +; CHECK-NEXT: vse64.v v8, (a1) |
| 28 | +; CHECK-NEXT: vle64.v v8, (a2) |
| 29 | +; CHECK-NEXT: addi a0, sp, 16 |
| 30 | +; CHECK-NEXT: vs1r.v v8, (a0) # Unknown-size Folded Spill |
| 31 | +; CHECK-NEXT: mv s2, a3 |
| 32 | +; CHECK-NEXT: call g |
| 33 | +; CHECK-NEXT: addi a0, sp, 16 |
| 34 | +; CHECK-NEXT: vl1r.v v8, (a0) # Unknown-size Folded Reload |
| 35 | +; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma |
| 36 | +; CHECK-NEXT: vse64.v v8, (s2) |
| 37 | +; CHECK-NEXT: vle64.v v8, (s1) |
| 38 | +; CHECK-NEXT: vse64.v v8, (s0) |
| 39 | +; CHECK-NEXT: csrr a0, vlenb |
| 40 | +; CHECK-NEXT: add sp, sp, a0 |
| 41 | +; CHECK-NEXT: .cfi_def_cfa sp, 48 |
| 42 | +; CHECK-NEXT: ld ra, 40(sp) # 8-byte Folded Reload |
| 43 | +; CHECK-NEXT: ld s0, 32(sp) # 8-byte Folded Reload |
| 44 | +; CHECK-NEXT: ld s1, 24(sp) # 8-byte Folded Reload |
| 45 | +; CHECK-NEXT: ld s2, 16(sp) # 8-byte Folded Reload |
| 46 | +; CHECK-NEXT: .cfi_restore ra |
| 47 | +; CHECK-NEXT: .cfi_restore s0 |
| 48 | +; CHECK-NEXT: .cfi_restore s1 |
| 49 | +; CHECK-NEXT: .cfi_restore s2 |
| 50 | +; CHECK-NEXT: addi sp, sp, 48 |
| 51 | +; CHECK-NEXT: .cfi_def_cfa_offset 0 |
| 52 | +; CHECK-NEXT: ret |
| 53 | + %z0 = load i64, ptr %m |
| 54 | + %m.1 = getelementptr i64, ptr %m, i64 1 |
| 55 | + %z1 = load i64, ptr %m.1 |
| 56 | + store i64 %z0, ptr %n |
| 57 | + %n.1 = getelementptr i64, ptr %n, i64 1 |
| 58 | + store i64 %z1, ptr %n.1 |
| 59 | + |
| 60 | + %x0 = load i64, ptr %p |
| 61 | + %p.1 = getelementptr i64, ptr %p, i64 1 |
| 62 | + %x1 = load i64, ptr %p.1 |
| 63 | + call void @g() |
| 64 | + store i64 %x0, ptr %q |
| 65 | + %q.1 = getelementptr i64, ptr %q, i64 1 |
| 66 | + store i64 %x1, ptr %q.1 |
| 67 | + |
| 68 | + %y0 = load i64, ptr %r |
| 69 | + %r.1 = getelementptr i64, ptr %r, i64 1 |
| 70 | + %y1 = load i64, ptr %r.1 |
| 71 | + store i64 %y0, ptr %s |
| 72 | + %s.1 = getelementptr i64, ptr %s, i64 1 |
| 73 | + store i64 %y1, ptr %s.1 |
| 74 | + |
| 75 | + ret void |
| 76 | +} |
| 77 | + |
| 78 | +define void @f1(ptr %m, ptr %n, ptr %p, ptr %q, ptr %r, ptr %s, double %t) { |
| 79 | +; CHECK-LABEL: f1: |
| 80 | +; CHECK: # %bb.0: |
| 81 | +; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma |
| 82 | +; CHECK-NEXT: vle64.v v8, (a2) |
| 83 | +; CHECK-NEXT: fcvt.wu.d a0, fa0, rtz |
| 84 | +; CHECK-NEXT: vse64.v v8, (a3) |
| 85 | +; CHECK-NEXT: ret |
| 86 | + %x0 = load i64, ptr %p |
| 87 | + %p.1 = getelementptr i64, ptr %p, i64 1 |
| 88 | + %x1 = load i64, ptr %p.1 |
| 89 | + %t1 = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %t, metadata !"fpexcept.strict") |
| 90 | + store i64 %x0, ptr %q |
| 91 | + %q.1 = getelementptr i64, ptr %q, i64 1 |
| 92 | + store i64 %x1, ptr %q.1 |
| 93 | + |
| 94 | + ret void |
| 95 | +} |
0 commit comments