diff options
| -rw-r--r-- | llvm/lib/Target/AArch64/AArch64LegalizerInfo.cpp | 3 | ||||
| -rw-r--r-- | llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir | 148 | 
2 files changed, 151 insertions, 0 deletions
| diff --git a/llvm/lib/Target/AArch64/AArch64LegalizerInfo.cpp b/llvm/lib/Target/AArch64/AArch64LegalizerInfo.cpp index aeedeba73ab..cf063316c7d 100644 --- a/llvm/lib/Target/AArch64/AArch64LegalizerInfo.cpp +++ b/llvm/lib/Target/AArch64/AArch64LegalizerInfo.cpp @@ -201,6 +201,9 @@ AArch64LegalizerInfo::AArch64LegalizerInfo(const AArch64Subtarget &ST) {        .legalForTypesWithMemDesc({{s32, p0, 8, 8},                                   {s32, p0, 16, 8},                                   {s32, p0, 32, 8}, +                                 {s64, p0, 8, 2}, +                                 {s64, p0, 16, 2}, +                                 {s64, p0, 32, 4},                                   {s64, p0, 64, 8},                                   {p0, p0, 64, 8},                                   {v2s32, p0, 64, 8}}) diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir index a26704497c3..22873b7ff96 100644 --- a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir @@ -1,5 +1,51 @@  # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py  # RUN: llc -march=aarch64 -run-pass=legalizer %s -o - -verify-machineinstrs | FileCheck %s +--- | +  target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128" +  target triple = "aarch64" + +  define void @test_extload() { +  entry: +    ret void +  } + +  define i64 @sext_i32_i64(i32* %ptr) { +    %ld = load i32, i32* %ptr, align 4 +    %v = sext i32 %ld to i64 +    ret i64 %v +  } + +  define i64 @sext_i16_i64(i16* %ptr) { +    %ld = load i16, i16* %ptr, align 2 +    %v = sext i16 %ld to i64 +    ret i64 %v +  } + +  define i64 @sext_i8_i64(i8* %ptr) { +    %ld = load i8, i8* %ptr, align 1 +    %v = sext i8 %ld to i64 +    ret i64 %v +  } + +  define i64 @zext_i32_i64(i32* %ptr) { +    %ld = load i32, i32* %ptr, align 4 +    %v = zext i32 %ld to i64 +    ret i64 %v +  } + +  define i64 @zext_i16_i64(i16* %ptr) { +    %ld = load i16, i16* %ptr, align 2 +    %v = zext i16 %ld to i64 +    ret i64 %v +  } + +  define i64 @zext_i8_i64(i8* %ptr) { +    %ld = load i8, i8* %ptr, align 1 +    %v = zext i8 %ld to i64 +    ret i64 %v +  } + +...  ---  name:            test_extload  body: | @@ -13,3 +59,105 @@ body: |      %1:_(s32) = G_LOAD %0 :: (load 1)      $w0 = COPY %1  ... +--- +name:            sext_i32_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: sext_i32_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 4 from %ir.ptr) +    ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 4 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... +--- +name:            sext_i16_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: sext_i16_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 2 from %ir.ptr) +    ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 2 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... +--- +name:            sext_i8_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: sext_i8_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 1 from %ir.ptr) +    ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load 1 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... +--- +name:            zext_i32_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: zext_i32_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 4 from %ir.ptr) +    ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 4 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... +--- +name:            zext_i16_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: zext_i16_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 2 from %ir.ptr) +    ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 2 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... +--- +name:            zext_i8_i64 +body:             | +  bb.1: +    liveins: $x0 + +    ; CHECK-LABEL: name: zext_i8_i64 +    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 +    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load 1 from %ir.ptr) +    ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) +    ; CHECK: RET_ReallyLR implicit $x0 +    %0:_(p0) = COPY $x0 +    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load 1 from %ir.ptr) +    $x0 = COPY %2(s64) +    RET_ReallyLR implicit $x0 + +... | 

