
After the refactoring in #149710 the logic change is trivial. Motivation for preferring sign-extended 32-bit loads (LW) vs zero-extended (LWU): * LW is compressible while LWU is not. * Helps to minimise the diff vs RV32 (e.g. LWU vs LW) * Helps to minimise distracting diffs vs GCC. I see this come up frequently when comparing GCC code and in these cases it's a red herring. Similar normalisation could be done for LHU and LH, but this is less well motivated as there is a compressed LHU (and if performing the change in RISCVOptWInstrs it wouldn't be done for RV32). There is a compressed LBU but not LB, meaning doing a similar normalisation for byte-sized loads would actually be a regression in terms of code size. Load narrowing when allowed by hasAllNBitUsers isn't explored in this patch. This changes ~20500 instructions in an RVA22 build of the llvm-test-suite including SPEC 2017. As part of the review, the option of doing the change at ISel time was explored but was found to be less effective.
5891 lines
259 KiB
LLVM
5891 lines
259 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBB
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBB
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBKB
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBKB
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-V
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-V
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb,+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBB
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb,+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBB
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBKB
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBKB
|
|
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-V
|
|
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
|
|
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-V
|
|
|
|
declare i32 @bcmp(ptr, ptr, iXLen) nounwind readonly
|
|
declare i32 @memcmp(ptr, ptr, iXLen) nounwind readonly
|
|
|
|
define i32 @bcmp_size_0(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: bcmp_size_0:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 0
|
|
; CHECK-RV32-NEXT: call bcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: bcmp_size_0:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: li a2, 0
|
|
; CHECK-RV64-NEXT: call bcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 0)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_1(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_size_1:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lbu a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lbu a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 1)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_2(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_size_2:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lhu a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lhu a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 2)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_3(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_size_3:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lhu a2, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lbu a0, 2(a0)
|
|
; CHECK-UNALIGNED-NEXT: lhu a3, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: lbu a1, 2(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 3)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_4(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_size_4:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_5(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 5, e8, mf2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 5)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_6(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 6, e8, mf2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 6)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_7(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 7, e8, mf2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 7)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_8(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 8, e8, mf2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 8)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_15(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 11(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 11(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 7(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 7(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 15, e8, m1, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetivli zero, 15, e8, m1, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 15)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_16(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 12(a0)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 12(a1)
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 16, e8, m1, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetivli zero, 16, e8, m1, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v9, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v8, v8, v9
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v8
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 16)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_31(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 31
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 23(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 23(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 31
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 31
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetivli zero, 31, e8, m2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v10, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v12, v8, v10
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v12
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetivli zero, 31, e8, m2, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v10, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v12, v8, v10
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v12
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 31)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_32(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 24(a0)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 24(a1)
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetvli zero, a2, e8, m2, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v10, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v12, v8, v10
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v12
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetvli zero, a2, e8, m2, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v10, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v12, v8, v10
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v12
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 32)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_63(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_63:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 63
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetvli zero, a2, e8, m4, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v12, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v16, v8, v12
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_63:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 63
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetvli zero, a2, e8, m4, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v12, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v16, v8, v12
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 63)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_64(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_64:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 64
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetvli zero, a2, e8, m4, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v12, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v16, v8, v12
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_64:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 64
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetvli zero, a2, e8, m4, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v12, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v16, v8, v12
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 64)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_127(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_127:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 127
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetvli zero, a2, e8, m8, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v16, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v24, v8, v16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v24
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_127:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 127
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetvli zero, a2, e8, m8, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v16, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v24, v8, v16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v24
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 127)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_128(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_128:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 128
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vsetvli zero, a2, e8, m8, ta, ma
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vle8.v v16, (a1)
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vmsne.vv v24, v8, v16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: vcpop.m a0, v24
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_128:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 128
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vsetvli zero, a2, e8, m8, ta, ma
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v8, (a0)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vle8.v v16, (a1)
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vmsne.vv v24, v8, v16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: vcpop.m a0, v24
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 128)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i32 @bcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind optsize {
|
|
; CHECK-RV32-LABEL: bcmp_size_runtime:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: call bcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: bcmp_size_runtime:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: call bcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen %len)
|
|
ret i32 %bcmp
|
|
}
|
|
|
|
define i1 @bcmp_eq_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_eq_zero:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: seqz a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp eq i32 %bcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
|
|
define i1 @bcmp_lt_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_lt_zero:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp slt i32 %bcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
|
|
define i1 @bcmp_gt_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: bcmp_gt_zero:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: snez a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp sgt i32 %bcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
|
|
define i32 @memcmp_size_0(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-LABEL: memcmp_size_0:
|
|
; CHECK: # %bb.0: # %entry
|
|
; CHECK-NEXT: li a0, 0
|
|
; CHECK-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 0)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_1(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_1:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 1
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 1
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 1
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_1:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 1
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 1)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_2(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_2:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 2
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 2
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a0, a0, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a1, a1, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 48
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 48
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a1, a1, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 48
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 48
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 2
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_2:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 2
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 2)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_3(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_3:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 3
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 3
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a2, 2(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a3, 2(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: slli a2, a2, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: slli a3, a3, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a1, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a2, 2(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a3, 2(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a2, a2, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a3, a3, 16
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a1, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 2(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 2(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: pack a0, a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: pack a1, a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a2, 2(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a3, 2(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: slli a2, a2, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: slli a3, a3, 16
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a1, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 3
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_3:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 3
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 3)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_4(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_4:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_4:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_5(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_5:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 5
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 5
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB26_2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB26_2: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a1, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB26_2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB26_2: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a1, a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 5
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_5:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 5
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 5)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_6(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_6:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 6
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 6
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB27_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a2, a2, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a3, a3, 16
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB27_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB27_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a1, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB27_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a2, a2, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a3, a3, 16
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB27_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB27_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a0, a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a1, a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 6
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_6:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 6
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 6)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_7(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_7:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 7
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 7
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB28_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB28_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB28_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 3(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 3(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB28_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB28_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 7
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_7:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 7
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 7)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_8(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_8:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 8
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 8
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB29_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB29_3: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 8
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_8:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 8
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 8)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_15(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_15:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 15
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 15
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB30_5: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB30_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB30_5: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB30_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 15
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_15:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 15
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 15)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_16(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_16:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB31_5: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB31_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB31_3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB31_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB31_5: # %res_block
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB31_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB31_3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB31_3: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_16:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 16)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_31(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_31:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 31
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_31:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_31:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 31
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB32_5: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB32_5: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_31:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 31
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 31)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_32(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_32:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 32
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_32:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_32:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB33_5: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB33_5: # %res_block
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_32:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 32
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 32)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_63(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_63:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 63
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: memcmp_size_63:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: li a2, 63
|
|
; CHECK-RV64-NEXT: call memcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 63)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_64(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_64:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 64
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: memcmp_size_64:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: li a2, 64
|
|
; CHECK-RV64-NEXT: call memcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 64)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_127(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_127:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 127
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: memcmp_size_127:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: li a2, 127
|
|
; CHECK-RV64-NEXT: call memcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 127)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_128(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_128:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: li a2, 128
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: memcmp_size_128:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: li a2, 128
|
|
; CHECK-RV64-NEXT: call memcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 128)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i32 @memcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind optsize {
|
|
; CHECK-RV32-LABEL: memcmp_size_runtime:
|
|
; CHECK-RV32: # %bb.0: # %entry
|
|
; CHECK-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-RV32-NEXT: call memcmp
|
|
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-RV32-NEXT: ret
|
|
;
|
|
; CHECK-RV64-LABEL: memcmp_size_runtime:
|
|
; CHECK-RV64: # %bb.0: # %entry
|
|
; CHECK-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-RV64-NEXT: call memcmp
|
|
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-RV64-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen %len)
|
|
ret i32 %memcmp
|
|
}
|
|
|
|
define i1 @memcmp_eq_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 3(a1)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV32-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV32-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV32-NEXT: lbu a0, 3(a0)
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV32-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV32-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV64-NEXT: lb a4, 3(a1)
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV64-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV64-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV64-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV64-NEXT: lb a0, 3(a0)
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV64-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV64-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 3(a1)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a0, 3(a0)
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a4, 3(a1)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a0, 3(a0)
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a2, 0(a1)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a3, 1(a1)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a4, 2(a1)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a1, 3(a1)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a5, 1(a0)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a6, 2(a0)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a7, 3(a0)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a0, 0(a0)
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a1, a4, a1
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a2, a2, a3
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a3, a6, a7
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a0, a0, a5
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a1, a2, a1
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a0, a0, a3
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a2, 0(a1)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 1(a1)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a4, 2(a1)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a1, 3(a1)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a5, 0(a0)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a6, 1(a0)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a2, a2, a3
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 2(a0)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a0, 3(a0)
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a5, a5, a6
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a4, a4, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a1, a1, 24
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a3
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a2
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 3(a1)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lbu a0, 3(a0)
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV32-V-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_eq_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a2, 1(a1)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a3, 2(a1)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lb a4, 3(a1)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a1, 0(a1)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a3, a3, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a4, a4, 24
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a2, a1
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a3, a4, a3
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a2, 1(a0)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a4, 0(a0)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lbu a5, 2(a0)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: lb a0, 3(a0)
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a2, a2, 8
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a2, a2, a4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a5, a5, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slli a0, a0, 24
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a5
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a3, a1
|
|
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a2
|
|
; CHECK-ALIGNED-RV64-V-NEXT: xor a0, a0, a1
|
|
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-LABEL: memcmp_eq_zero:
|
|
; CHECK-UNALIGNED: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
|
|
; CHECK-UNALIGNED-NEXT: seqz a0, a0
|
|
; CHECK-UNALIGNED-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp eq i32 %memcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
|
|
define i1 @memcmp_lt_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_lt_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: srli a0, a0, 31
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: slti a0, a0, 0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a0, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: srli a0, a0, 31
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_lt_zero:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: slti a0, a0, 0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp slt i32 %memcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
|
|
define i1 @memcmp_gt_zero(ptr %s1, ptr %s2) nounwind optsize {
|
|
; CHECK-ALIGNED-RV32-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_gt_zero:
|
|
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-ALIGNED-RV64-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-NEXT: sgtz a0, a0
|
|
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-NEXT: sgtz a0, a0
|
|
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
|
|
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: sgtz a0, a0
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV32-V-NEXT: ret
|
|
;
|
|
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_gt_zero:
|
|
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: sgtz a0, a0
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
|
|
; CHECK-UNALIGNED-RV64-V-NEXT: ret
|
|
entry:
|
|
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
|
|
%ret = icmp sgt i32 %memcmp, 0
|
|
ret i1 %ret
|
|
}
|
|
;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
|
|
; CHECK-ALIGNED: {{.*}}
|