llvm-project/llvm/test/CodeGen/LoongArch/bswap-bitreverse.ll
hev 746c682c4a
[LoongArch] Introduce 32s target feature for LA32S ISA extensions (#139695)
According to the offical LoongArch reference manual, the 32-bit
LoongArch is divied into two variants: the Reduced version (LA32R) and
Standard version (LA32S). LA32S extends LA32R by adding additional
instructions, and the 64-bit version (LA64) fully includes the LA32S
instruction set.

This patch introduces a new target feature `32s` for the LoongArch
backend, enabling support for instructions specific to the LA32S
variant.

The LA32S exntension includes the following additional instructions:

- ALSL.W
- {AND,OR}N
- B{EQ,NE}Z
- BITREV.{4B,W}
- BSTR{INS,PICK}.W
- BYTEPICK.W
- CL{O,Z}.W
- CPUCFG
- CT{O,Z}.W
- EXT.W,{B,H}
- F{LD,ST}X.{D,S}
- MASK{EQ,NE}Z
- PC{ADDI,ALAU12I}
- REVB.2H
- ROTR{I},W

Additionally, LA32R defines three new instruction aliases:

- RDCNTID.W RJ => RDTIMEL.W ZERO, RJ
- RDCNTVH.W RD => RDTIMEH.W RD, ZERO
- RDCNTVL.W RD => RDTIMEL.W RD, ZERO
2025-05-20 18:28:08 +08:00

323 lines
9.9 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc --mtriple=loongarch32 -mattr=-32s,+d --verify-machineinstrs < %s \
; RUN: | FileCheck %s --check-prefix=LA32R
; RUN: llc --mtriple=loongarch32 -mattr=+32s,+d --verify-machineinstrs < %s \
; RUN: | FileCheck %s --check-prefix=LA32S
; RUN: llc --mtriple=loongarch64 -mattr=+d --verify-machineinstrs < %s \
; RUN: | FileCheck %s --check-prefix=LA64
declare i16 @llvm.bitreverse.i16(i16)
declare i32 @llvm.bitreverse.i32(i32)
declare i64 @llvm.bitreverse.i64(i64)
declare i16 @llvm.bswap.i16(i16)
declare i32 @llvm.bswap.i32(i32)
declare i64 @llvm.bswap.i64(i64)
define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
; LA32R-LABEL: test_bswap_bitreverse_i16:
; LA32R: # %bb.0:
; LA32R-NEXT: andi $a1, $a0, 3855
; LA32R-NEXT: slli.w $a1, $a1, 4
; LA32R-NEXT: srli.w $a0, $a0, 4
; LA32R-NEXT: andi $a0, $a0, 3855
; LA32R-NEXT: or $a0, $a0, $a1
; LA32R-NEXT: srli.w $a1, $a0, 2
; LA32R-NEXT: lu12i.w $a2, 3
; LA32R-NEXT: ori $a2, $a2, 819
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 1
; LA32R-NEXT: lu12i.w $a2, 5
; LA32R-NEXT: ori $a2, $a2, 1365
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bswap_bitreverse_i16:
; LA32S: # %bb.0:
; LA32S-NEXT: revb.2h $a0, $a0
; LA32S-NEXT: bitrev.w $a0, $a0
; LA32S-NEXT: srli.w $a0, $a0, 16
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bswap_bitreverse_i16:
; LA64: # %bb.0:
; LA64-NEXT: revb.2h $a0, $a0
; LA64-NEXT: bitrev.d $a0, $a0
; LA64-NEXT: srli.d $a0, $a0, 48
; LA64-NEXT: ret
%tmp = call i16 @llvm.bswap.i16(i16 %a)
%tmp2 = call i16 @llvm.bitreverse.i16(i16 %tmp)
ret i16 %tmp2
}
define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
; LA32R-LABEL: test_bswap_bitreverse_i32:
; LA32R: # %bb.0:
; LA32R-NEXT: srli.w $a1, $a0, 4
; LA32R-NEXT: lu12i.w $a2, 61680
; LA32R-NEXT: ori $a2, $a2, 3855
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 4
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 2
; LA32R-NEXT: lu12i.w $a2, 209715
; LA32R-NEXT: ori $a2, $a2, 819
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 1
; LA32R-NEXT: lu12i.w $a2, 349525
; LA32R-NEXT: ori $a2, $a2, 1365
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bswap_bitreverse_i32:
; LA32S: # %bb.0:
; LA32S-NEXT: bitrev.4b $a0, $a0
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bswap_bitreverse_i32:
; LA64: # %bb.0:
; LA64-NEXT: bitrev.4b $a0, $a0
; LA64-NEXT: ret
%tmp = call i32 @llvm.bswap.i32(i32 %a)
%tmp2 = call i32 @llvm.bitreverse.i32(i32 %tmp)
ret i32 %tmp2
}
define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind {
; LA32R-LABEL: test_bswap_bitreverse_i64:
; LA32R: # %bb.0:
; LA32R-NEXT: srli.w $a2, $a0, 4
; LA32R-NEXT: lu12i.w $a3, 61680
; LA32R-NEXT: ori $a3, $a3, 3855
; LA32R-NEXT: and $a2, $a2, $a3
; LA32R-NEXT: and $a0, $a0, $a3
; LA32R-NEXT: slli.w $a0, $a0, 4
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a0, 2
; LA32R-NEXT: lu12i.w $a4, 209715
; LA32R-NEXT: ori $a4, $a4, 819
; LA32R-NEXT: and $a2, $a2, $a4
; LA32R-NEXT: and $a0, $a0, $a4
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a0, 1
; LA32R-NEXT: lu12i.w $a5, 349525
; LA32R-NEXT: ori $a5, $a5, 1365
; LA32R-NEXT: and $a2, $a2, $a5
; LA32R-NEXT: and $a0, $a0, $a5
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a1, 4
; LA32R-NEXT: and $a2, $a2, $a3
; LA32R-NEXT: and $a1, $a1, $a3
; LA32R-NEXT: slli.w $a1, $a1, 4
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: srli.w $a2, $a1, 2
; LA32R-NEXT: and $a2, $a2, $a4
; LA32R-NEXT: and $a1, $a1, $a4
; LA32R-NEXT: slli.w $a1, $a1, 2
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: srli.w $a2, $a1, 1
; LA32R-NEXT: and $a2, $a2, $a5
; LA32R-NEXT: and $a1, $a1, $a5
; LA32R-NEXT: slli.w $a1, $a1, 1
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bswap_bitreverse_i64:
; LA32S: # %bb.0:
; LA32S-NEXT: bitrev.4b $a0, $a0
; LA32S-NEXT: bitrev.4b $a1, $a1
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bswap_bitreverse_i64:
; LA64: # %bb.0:
; LA64-NEXT: bitrev.8b $a0, $a0
; LA64-NEXT: ret
%tmp = call i64 @llvm.bswap.i64(i64 %a)
%tmp2 = call i64 @llvm.bitreverse.i64(i64 %tmp)
ret i64 %tmp2
}
define i16 @test_bitreverse_bswap_i16(i16 %a) nounwind {
; LA32R-LABEL: test_bitreverse_bswap_i16:
; LA32R: # %bb.0:
; LA32R-NEXT: andi $a1, $a0, 3855
; LA32R-NEXT: slli.w $a1, $a1, 4
; LA32R-NEXT: srli.w $a0, $a0, 4
; LA32R-NEXT: andi $a0, $a0, 3855
; LA32R-NEXT: or $a0, $a0, $a1
; LA32R-NEXT: srli.w $a1, $a0, 2
; LA32R-NEXT: lu12i.w $a2, 3
; LA32R-NEXT: ori $a2, $a2, 819
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 1
; LA32R-NEXT: lu12i.w $a2, 5
; LA32R-NEXT: ori $a2, $a2, 1365
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bitreverse_bswap_i16:
; LA32S: # %bb.0:
; LA32S-NEXT: revb.2h $a0, $a0
; LA32S-NEXT: bitrev.w $a0, $a0
; LA32S-NEXT: srli.w $a0, $a0, 16
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bitreverse_bswap_i16:
; LA64: # %bb.0:
; LA64-NEXT: revb.2h $a0, $a0
; LA64-NEXT: bitrev.d $a0, $a0
; LA64-NEXT: srli.d $a0, $a0, 48
; LA64-NEXT: ret
%tmp = call i16 @llvm.bitreverse.i16(i16 %a)
%tmp2 = call i16 @llvm.bswap.i16(i16 %tmp)
ret i16 %tmp2
}
define i32 @test_bitreverse_bswap_i32(i32 %a) nounwind {
; LA32R-LABEL: test_bitreverse_bswap_i32:
; LA32R: # %bb.0:
; LA32R-NEXT: srli.w $a1, $a0, 4
; LA32R-NEXT: lu12i.w $a2, 61680
; LA32R-NEXT: ori $a2, $a2, 3855
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 4
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 2
; LA32R-NEXT: lu12i.w $a2, 209715
; LA32R-NEXT: ori $a2, $a2, 819
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: srli.w $a1, $a0, 1
; LA32R-NEXT: lu12i.w $a2, 349525
; LA32R-NEXT: ori $a2, $a2, 1365
; LA32R-NEXT: and $a1, $a1, $a2
; LA32R-NEXT: and $a0, $a0, $a2
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a1, $a0
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bitreverse_bswap_i32:
; LA32S: # %bb.0:
; LA32S-NEXT: bitrev.4b $a0, $a0
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bitreverse_bswap_i32:
; LA64: # %bb.0:
; LA64-NEXT: bitrev.4b $a0, $a0
; LA64-NEXT: ret
%tmp = call i32 @llvm.bitreverse.i32(i32 %a)
%tmp2 = call i32 @llvm.bswap.i32(i32 %tmp)
ret i32 %tmp2
}
define i64 @test_bitreverse_bswap_i64(i64 %a) nounwind {
; LA32R-LABEL: test_bitreverse_bswap_i64:
; LA32R: # %bb.0:
; LA32R-NEXT: srli.w $a2, $a0, 4
; LA32R-NEXT: lu12i.w $a3, 61680
; LA32R-NEXT: ori $a3, $a3, 3855
; LA32R-NEXT: and $a2, $a2, $a3
; LA32R-NEXT: and $a0, $a0, $a3
; LA32R-NEXT: slli.w $a0, $a0, 4
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a0, 2
; LA32R-NEXT: lu12i.w $a4, 209715
; LA32R-NEXT: ori $a4, $a4, 819
; LA32R-NEXT: and $a2, $a2, $a4
; LA32R-NEXT: and $a0, $a0, $a4
; LA32R-NEXT: slli.w $a0, $a0, 2
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a0, 1
; LA32R-NEXT: lu12i.w $a5, 349525
; LA32R-NEXT: ori $a5, $a5, 1365
; LA32R-NEXT: and $a2, $a2, $a5
; LA32R-NEXT: and $a0, $a0, $a5
; LA32R-NEXT: slli.w $a0, $a0, 1
; LA32R-NEXT: or $a0, $a2, $a0
; LA32R-NEXT: srli.w $a2, $a1, 4
; LA32R-NEXT: and $a2, $a2, $a3
; LA32R-NEXT: and $a1, $a1, $a3
; LA32R-NEXT: slli.w $a1, $a1, 4
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: srli.w $a2, $a1, 2
; LA32R-NEXT: and $a2, $a2, $a4
; LA32R-NEXT: and $a1, $a1, $a4
; LA32R-NEXT: slli.w $a1, $a1, 2
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: srli.w $a2, $a1, 1
; LA32R-NEXT: and $a2, $a2, $a5
; LA32R-NEXT: and $a1, $a1, $a5
; LA32R-NEXT: slli.w $a1, $a1, 1
; LA32R-NEXT: or $a1, $a2, $a1
; LA32R-NEXT: ret
;
; LA32S-LABEL: test_bitreverse_bswap_i64:
; LA32S: # %bb.0:
; LA32S-NEXT: bitrev.4b $a0, $a0
; LA32S-NEXT: bitrev.4b $a1, $a1
; LA32S-NEXT: ret
;
; LA64-LABEL: test_bitreverse_bswap_i64:
; LA64: # %bb.0:
; LA64-NEXT: bitrev.8b $a0, $a0
; LA64-NEXT: ret
%tmp = call i64 @llvm.bitreverse.i64(i64 %a)
%tmp2 = call i64 @llvm.bswap.i64(i64 %tmp)
ret i64 %tmp2
}
define i32 @pr55484(i32 %0) {
; LA32R-LABEL: pr55484:
; LA32R: # %bb.0:
; LA32R-NEXT: slli.w $a1, $a0, 8
; LA32R-NEXT: slli.w $a0, $a0, 24
; LA32R-NEXT: or $a0, $a0, $a1
; LA32R-NEXT: srai.w $a0, $a0, 16
; LA32R-NEXT: ret
;
; LA32S-LABEL: pr55484:
; LA32S: # %bb.0:
; LA32S-NEXT: srli.w $a1, $a0, 8
; LA32S-NEXT: slli.w $a0, $a0, 8
; LA32S-NEXT: or $a0, $a1, $a0
; LA32S-NEXT: ext.w.h $a0, $a0
; LA32S-NEXT: ret
;
; LA64-LABEL: pr55484:
; LA64: # %bb.0:
; LA64-NEXT: srli.d $a1, $a0, 8
; LA64-NEXT: slli.d $a0, $a0, 8
; LA64-NEXT: or $a0, $a1, $a0
; LA64-NEXT: ext.w.h $a0, $a0
; LA64-NEXT: ret
%2 = lshr i32 %0, 8
%3 = shl i32 %0, 8
%4 = or i32 %2, %3
%5 = trunc i32 %4 to i16
%6 = sext i16 %5 to i32
ret i32 %6
}