|
| 1 | +; RUN: llc < %s -mtriple=hexagon-unknown-linux-musl \ |
| 2 | +; RUN: | FileCheck %s -check-prefix=CHECK |
| 3 | + |
| 4 | +define i64 @double_to_i128(double %d) nounwind strictfp { |
| 5 | +; CHECK-LABEL: double_to_i128: |
| 6 | +; CHECK: // %bb.0: |
| 7 | +; CHECK: call __fixdfti |
| 8 | +; CHECK: dealloc_return |
| 9 | + %1 = tail call i128 @llvm.experimental.constrained.fptosi.i128.f64(double %d, metadata !"fpexcept.strict") |
| 10 | + %2 = trunc i128 %1 to i64 |
| 11 | + ret i64 %2 |
| 12 | +} |
| 13 | + |
| 14 | +define i64 @double_to_ui128(double %d) nounwind strictfp { |
| 15 | +; CHECK-LABEL: double_to_ui128: |
| 16 | +; CHECK: // %bb.0: |
| 17 | +; CHECK: call __fixunsdfti |
| 18 | +; CHECK: dealloc_return |
| 19 | + %1 = tail call i128 @llvm.experimental.constrained.fptoui.i128.f64(double %d, metadata !"fpexcept.strict") |
| 20 | + %2 = trunc i128 %1 to i64 |
| 21 | + ret i64 %2 |
| 22 | +} |
| 23 | + |
| 24 | +define i64 @float_to_i128(float %d) nounwind strictfp { |
| 25 | +; CHECK-LABEL: float_to_i128: |
| 26 | +; CHECK: // %bb.0: |
| 27 | +; CHECK: call __fixsfti |
| 28 | +; CHECK: dealloc_return |
| 29 | + %1 = tail call i128 @llvm.experimental.constrained.fptosi.i128.f32(float %d, metadata !"fpexcept.strict") |
| 30 | + %2 = trunc i128 %1 to i64 |
| 31 | + ret i64 %2 |
| 32 | +} |
| 33 | + |
| 34 | +define i64 @float_to_ui128(float %d) nounwind strictfp { |
| 35 | +; CHECK-LABEL: float_to_ui128: |
| 36 | +; CHECK: // %bb.0: |
| 37 | +; CHECK: call __fixunssfti |
| 38 | +; CHECK: dealloc_return |
| 39 | + %1 = tail call i128 @llvm.experimental.constrained.fptoui.i128.f32(float %d, metadata !"fpexcept.strict") |
| 40 | + %2 = trunc i128 %1 to i64 |
| 41 | + ret i64 %2 |
| 42 | +} |
| 43 | + |
| 44 | +define double @ui128_to_double(ptr nocapture readonly %0) nounwind strictfp { |
| 45 | +; CHECK-LABEL: ui128_to_double: |
| 46 | +; CHECK: // %bb.0: |
| 47 | +; CHECK: call __floatuntidf |
| 48 | +; CHECK: dealloc_return |
| 49 | + %2 = load i128, ptr %0, align 16 |
| 50 | + %3 = tail call double @llvm.experimental.constrained.uitofp.f64.i128(i128 %2, metadata !"round.dynamic", metadata !"fpexcept.strict") |
| 51 | + ret double %3 |
| 52 | +} |
| 53 | + |
| 54 | +define float @i128_to_float(ptr nocapture readonly %0) nounwind strictfp { |
| 55 | +; CHECK-LABEL: i128_to_float: |
| 56 | +; CHECK: // %bb.0: |
| 57 | +; CHECK: call __floattisf |
| 58 | +; CHECK: dealloc_return |
| 59 | + %2 = load i128, ptr %0, align 16 |
| 60 | + %3 = tail call float @llvm.experimental.constrained.sitofp.f32.i128(i128 %2, metadata !"round.dynamic", metadata !"fpexcept.strict") |
| 61 | + ret float %3 |
| 62 | +} |
| 63 | + |
| 64 | +define float @ui128_to_float(ptr nocapture readonly %0) nounwind strictfp { |
| 65 | +; CHECK-LABEL: ui128_to_float: |
| 66 | +; CHECK: // %bb.0: |
| 67 | +; CHECK: call __floatuntisf |
| 68 | +; CHECK: dealloc_return |
| 69 | + %2 = load i128, ptr %0, align 16 |
| 70 | + %3 = tail call float @llvm.experimental.constrained.uitofp.f32.i128(i128 %2, metadata !"round.dynamic", metadata !"fpexcept.strict") |
| 71 | + ret float %3 |
| 72 | +} |
| 73 | + |
| 74 | +declare i128 @llvm.experimental.constrained.fptosi.i128.f64(double, metadata) |
| 75 | +declare i128 @llvm.experimental.constrained.fptoui.i128.f64(double, metadata) |
| 76 | +declare i128 @llvm.experimental.constrained.fptosi.i128.f32(float, metadata) |
| 77 | +declare i128 @llvm.experimental.constrained.fptoui.i128.f32(float, metadata) |
| 78 | +declare double @llvm.experimental.constrained.sitofp.f64.i128(i128, metadata, metadata) |
| 79 | +declare double @llvm.experimental.constrained.uitofp.f64.i128(i128, metadata, metadata) |
| 80 | +declare float @llvm.experimental.constrained.sitofp.f32.i128(i128, metadata, metadata) |
| 81 | +declare float @llvm.experimental.constrained.uitofp.f32.i128(i128, metadata, metadata) |
0 commit comments