
According to the offical LoongArch reference manual, the 32-bit LoongArch is divied into two variants: the Reduced version (LA32R) and Standard version (LA32S). LA32S extends LA32R by adding additional instructions, and the 64-bit version (LA64) fully includes the LA32S instruction set. This patch introduces a new target feature `32s` for the LoongArch backend, enabling support for instructions specific to the LA32S variant. The LA32S exntension includes the following additional instructions: - ALSL.W - {AND,OR}N - B{EQ,NE}Z - BITREV.{4B,W} - BSTR{INS,PICK}.W - BYTEPICK.W - CL{O,Z}.W - CPUCFG - CT{O,Z}.W - EXT.W,{B,H} - F{LD,ST}X.{D,S} - MASK{EQ,NE}Z - PC{ADDI,ALAU12I} - REVB.2H - ROTR{I},W Additionally, LA32R defines three new instruction aliases: - RDCNTID.W RJ => RDTIMEL.W ZERO, RJ - RDCNTVH.W RD => RDTIMEH.W RD, ZERO - RDCNTVL.W RD => RDTIMEL.W RD, ZERO
690 lines
22 KiB
LLVM
690 lines
22 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc --mtriple=loongarch32 -mattr=-32s,+d --verify-machineinstrs < %s \
|
|
; RUN: | FileCheck %s --check-prefix=LA32R
|
|
; RUN: llc --mtriple=loongarch32 -mattr=+32s,+d --verify-machineinstrs < %s \
|
|
; RUN: | FileCheck %s --check-prefix=LA32S
|
|
; RUN: llc --mtriple=loongarch64 -mattr=+d --verify-machineinstrs < %s \
|
|
; RUN: | FileCheck %s --check-prefix=LA64
|
|
|
|
declare i7 @llvm.bitreverse.i7(i7)
|
|
declare i8 @llvm.bitreverse.i8(i8)
|
|
declare i16 @llvm.bitreverse.i16(i16)
|
|
declare i24 @llvm.bitreverse.i24(i24)
|
|
declare i32 @llvm.bitreverse.i32(i32)
|
|
declare i48 @llvm.bitreverse.i48(i48)
|
|
declare i64 @llvm.bitreverse.i64(i64)
|
|
declare i77 @llvm.bitreverse.i77(i77)
|
|
declare i128 @llvm.bitreverse.i128(i128)
|
|
|
|
define i8 @test_bitreverse_i8(i8 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i8:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: andi $a1, $a0, 240
|
|
; LA32R-NEXT: andi $a0, $a0, 15
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: srli.w $a1, $a1, 4
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: andi $a1, $a0, 51
|
|
; LA32R-NEXT: slli.w $a1, $a1, 2
|
|
; LA32R-NEXT: srli.w $a0, $a0, 2
|
|
; LA32R-NEXT: andi $a0, $a0, 51
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: andi $a1, $a0, 85
|
|
; LA32R-NEXT: slli.w $a1, $a1, 1
|
|
; LA32R-NEXT: srli.w $a0, $a0, 1
|
|
; LA32R-NEXT: andi $a0, $a0, 85
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i8:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.4b $a0, $a0
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i8:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.4b $a0, $a0
|
|
; LA64-NEXT: ret
|
|
%tmp = call i8 @llvm.bitreverse.i8(i8 %a)
|
|
ret i8 %tmp
|
|
}
|
|
|
|
define i16 @test_bitreverse_i16(i16 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i16:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: lu12i.w $a1, 15
|
|
; LA32R-NEXT: ori $a1, $a1, 3840
|
|
; LA32R-NEXT: and $a1, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a1, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 8
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: andi $a1, $a0, 3855
|
|
; LA32R-NEXT: slli.w $a1, $a1, 4
|
|
; LA32R-NEXT: srli.w $a0, $a0, 4
|
|
; LA32R-NEXT: andi $a0, $a0, 3855
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 2
|
|
; LA32R-NEXT: lu12i.w $a2, 3
|
|
; LA32R-NEXT: ori $a2, $a2, 819
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 1
|
|
; LA32R-NEXT: lu12i.w $a2, 5
|
|
; LA32R-NEXT: ori $a2, $a2, 1365
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i16:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a0, $a0
|
|
; LA32S-NEXT: srli.w $a0, $a0, 16
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i16:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a0, $a0
|
|
; LA64-NEXT: srli.d $a0, $a0, 48
|
|
; LA64-NEXT: ret
|
|
%tmp = call i16 @llvm.bitreverse.i16(i16 %a)
|
|
ret i16 %tmp
|
|
}
|
|
|
|
define i32 @test_bitreverse_i32(i32 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i32:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: srli.w $a1, $a0, 8
|
|
; LA32R-NEXT: lu12i.w $a2, 15
|
|
; LA32R-NEXT: ori $a2, $a2, 3840
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: srli.w $a3, $a0, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a3
|
|
; LA32R-NEXT: and $a2, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a2, $a2, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 24
|
|
; LA32R-NEXT: or $a0, $a0, $a2
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 4
|
|
; LA32R-NEXT: lu12i.w $a2, 61680
|
|
; LA32R-NEXT: ori $a2, $a2, 3855
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 2
|
|
; LA32R-NEXT: lu12i.w $a2, 209715
|
|
; LA32R-NEXT: ori $a2, $a2, 819
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 1
|
|
; LA32R-NEXT: lu12i.w $a2, 349525
|
|
; LA32R-NEXT: ori $a2, $a2, 1365
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i32:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a0, $a0
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i32:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.w $a0, $a0
|
|
; LA64-NEXT: ret
|
|
%tmp = call i32 @llvm.bitreverse.i32(i32 %a)
|
|
ret i32 %tmp
|
|
}
|
|
|
|
define i64 @test_bitreverse_i64(i64 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i64:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: srli.w $a2, $a1, 8
|
|
; LA32R-NEXT: lu12i.w $a3, 15
|
|
; LA32R-NEXT: ori $a3, $a3, 3840
|
|
; LA32R-NEXT: and $a2, $a2, $a3
|
|
; LA32R-NEXT: srli.w $a4, $a1, 24
|
|
; LA32R-NEXT: or $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a4, $a1, $a3
|
|
; LA32R-NEXT: slli.w $a4, $a4, 8
|
|
; LA32R-NEXT: slli.w $a1, $a1, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a4
|
|
; LA32R-NEXT: or $a1, $a1, $a2
|
|
; LA32R-NEXT: srli.w $a2, $a1, 4
|
|
; LA32R-NEXT: lu12i.w $a4, 61680
|
|
; LA32R-NEXT: ori $a4, $a4, 3855
|
|
; LA32R-NEXT: and $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: slli.w $a1, $a1, 4
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 2
|
|
; LA32R-NEXT: lu12i.w $a5, 209715
|
|
; LA32R-NEXT: ori $a5, $a5, 819
|
|
; LA32R-NEXT: and $a2, $a2, $a5
|
|
; LA32R-NEXT: and $a1, $a1, $a5
|
|
; LA32R-NEXT: slli.w $a1, $a1, 2
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 1
|
|
; LA32R-NEXT: lu12i.w $a6, 349525
|
|
; LA32R-NEXT: ori $a6, $a6, 1365
|
|
; LA32R-NEXT: and $a2, $a2, $a6
|
|
; LA32R-NEXT: and $a1, $a1, $a6
|
|
; LA32R-NEXT: slli.w $a1, $a1, 1
|
|
; LA32R-NEXT: or $a2, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 8
|
|
; LA32R-NEXT: and $a1, $a1, $a3
|
|
; LA32R-NEXT: srli.w $a7, $a0, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a7
|
|
; LA32R-NEXT: and $a3, $a0, $a3
|
|
; LA32R-NEXT: slli.w $a3, $a3, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 24
|
|
; LA32R-NEXT: or $a0, $a0, $a3
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: and $a0, $a0, $a4
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 2
|
|
; LA32R-NEXT: and $a1, $a1, $a5
|
|
; LA32R-NEXT: and $a0, $a0, $a5
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 1
|
|
; LA32R-NEXT: and $a1, $a1, $a6
|
|
; LA32R-NEXT: and $a0, $a0, $a6
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a1, $a1, $a0
|
|
; LA32R-NEXT: move $a0, $a2
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i64:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a2, $a1
|
|
; LA32S-NEXT: bitrev.w $a1, $a0
|
|
; LA32S-NEXT: move $a0, $a2
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i64:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a0, $a0
|
|
; LA64-NEXT: ret
|
|
%tmp = call i64 @llvm.bitreverse.i64(i64 %a)
|
|
ret i64 %tmp
|
|
}
|
|
|
|
;; Bitreverse on non-native integer widths.
|
|
|
|
define i7 @test_bitreverse_i7(i7 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i7:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: srli.w $a1, $a0, 8
|
|
; LA32R-NEXT: lu12i.w $a2, 15
|
|
; LA32R-NEXT: ori $a2, $a2, 3840
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: srli.w $a3, $a0, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a3
|
|
; LA32R-NEXT: and $a2, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a2, $a2, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 24
|
|
; LA32R-NEXT: or $a0, $a0, $a2
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 4
|
|
; LA32R-NEXT: lu12i.w $a2, 61680
|
|
; LA32R-NEXT: ori $a2, $a2, 3855
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 2
|
|
; LA32R-NEXT: lu12i.w $a2, 209715
|
|
; LA32R-NEXT: ori $a2, $a2, 819
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 1
|
|
; LA32R-NEXT: lu12i.w $a2, 344064
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: lu12i.w $a2, 348160
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a0, $a0, 25
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i7:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a0, $a0
|
|
; LA32S-NEXT: srli.w $a0, $a0, 25
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i7:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a0, $a0
|
|
; LA64-NEXT: srli.d $a0, $a0, 57
|
|
; LA64-NEXT: ret
|
|
%tmp = call i7 @llvm.bitreverse.i7(i7 %a)
|
|
ret i7 %tmp
|
|
}
|
|
|
|
define i24 @test_bitreverse_i24(i24 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i24:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: srli.w $a1, $a0, 8
|
|
; LA32R-NEXT: lu12i.w $a2, 15
|
|
; LA32R-NEXT: ori $a2, $a2, 3840
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: srli.w $a3, $a0, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a3
|
|
; LA32R-NEXT: and $a2, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a2, $a2, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 24
|
|
; LA32R-NEXT: or $a0, $a0, $a2
|
|
; LA32R-NEXT: or $a0, $a0, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a0, 4
|
|
; LA32R-NEXT: lu12i.w $a2, 61680
|
|
; LA32R-NEXT: ori $a2, $a2, 3855
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 2
|
|
; LA32R-NEXT: lu12i.w $a2, 209715
|
|
; LA32R-NEXT: ori $a2, $a2, 819
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a0, 1
|
|
; LA32R-NEXT: lu12i.w $a2, 349525
|
|
; LA32R-NEXT: ori $a2, $a2, 1280
|
|
; LA32R-NEXT: and $a1, $a1, $a2
|
|
; LA32R-NEXT: and $a0, $a0, $a2
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a0, $a0, 8
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i24:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a0, $a0
|
|
; LA32S-NEXT: srli.w $a0, $a0, 8
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i24:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a0, $a0
|
|
; LA64-NEXT: srli.d $a0, $a0, 40
|
|
; LA64-NEXT: ret
|
|
%tmp = call i24 @llvm.bitreverse.i24(i24 %a)
|
|
ret i24 %tmp
|
|
}
|
|
|
|
define i48 @test_bitreverse_i48(i48 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i48:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: srli.w $a2, $a0, 8
|
|
; LA32R-NEXT: lu12i.w $a3, 15
|
|
; LA32R-NEXT: ori $a3, $a3, 3840
|
|
; LA32R-NEXT: and $a2, $a2, $a3
|
|
; LA32R-NEXT: srli.w $a4, $a0, 24
|
|
; LA32R-NEXT: or $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a4, $a0, $a3
|
|
; LA32R-NEXT: slli.w $a4, $a4, 8
|
|
; LA32R-NEXT: slli.w $a0, $a0, 24
|
|
; LA32R-NEXT: or $a0, $a0, $a4
|
|
; LA32R-NEXT: or $a0, $a0, $a2
|
|
; LA32R-NEXT: srli.w $a2, $a0, 4
|
|
; LA32R-NEXT: lu12i.w $a4, 61680
|
|
; LA32R-NEXT: ori $a4, $a4, 3855
|
|
; LA32R-NEXT: and $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a0, $a0, $a4
|
|
; LA32R-NEXT: slli.w $a0, $a0, 4
|
|
; LA32R-NEXT: or $a0, $a2, $a0
|
|
; LA32R-NEXT: srli.w $a2, $a0, 2
|
|
; LA32R-NEXT: lu12i.w $a5, 209715
|
|
; LA32R-NEXT: ori $a5, $a5, 819
|
|
; LA32R-NEXT: and $a2, $a2, $a5
|
|
; LA32R-NEXT: and $a0, $a0, $a5
|
|
; LA32R-NEXT: slli.w $a0, $a0, 2
|
|
; LA32R-NEXT: or $a0, $a2, $a0
|
|
; LA32R-NEXT: srli.w $a2, $a0, 1
|
|
; LA32R-NEXT: lu12i.w $a6, 349525
|
|
; LA32R-NEXT: ori $a6, $a6, 1365
|
|
; LA32R-NEXT: and $a2, $a2, $a6
|
|
; LA32R-NEXT: and $a0, $a0, $a6
|
|
; LA32R-NEXT: slli.w $a0, $a0, 1
|
|
; LA32R-NEXT: or $a2, $a2, $a0
|
|
; LA32R-NEXT: slli.w $a0, $a2, 16
|
|
; LA32R-NEXT: srli.w $a6, $a1, 8
|
|
; LA32R-NEXT: and $a6, $a6, $a3
|
|
; LA32R-NEXT: srli.w $a7, $a1, 24
|
|
; LA32R-NEXT: or $a6, $a6, $a7
|
|
; LA32R-NEXT: and $a3, $a1, $a3
|
|
; LA32R-NEXT: slli.w $a3, $a3, 8
|
|
; LA32R-NEXT: slli.w $a1, $a1, 24
|
|
; LA32R-NEXT: or $a1, $a1, $a3
|
|
; LA32R-NEXT: or $a1, $a1, $a6
|
|
; LA32R-NEXT: srli.w $a3, $a1, 4
|
|
; LA32R-NEXT: and $a3, $a3, $a4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: slli.w $a1, $a1, 4
|
|
; LA32R-NEXT: or $a1, $a3, $a1
|
|
; LA32R-NEXT: srli.w $a3, $a1, 2
|
|
; LA32R-NEXT: and $a3, $a3, $a5
|
|
; LA32R-NEXT: and $a1, $a1, $a5
|
|
; LA32R-NEXT: slli.w $a1, $a1, 2
|
|
; LA32R-NEXT: or $a1, $a3, $a1
|
|
; LA32R-NEXT: srli.w $a3, $a1, 1
|
|
; LA32R-NEXT: lu12i.w $a4, 349520
|
|
; LA32R-NEXT: and $a3, $a3, $a4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: slli.w $a1, $a1, 1
|
|
; LA32R-NEXT: or $a1, $a3, $a1
|
|
; LA32R-NEXT: srli.w $a1, $a1, 16
|
|
; LA32R-NEXT: or $a0, $a1, $a0
|
|
; LA32R-NEXT: srli.w $a1, $a2, 16
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i48:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: bitrev.w $a2, $a0
|
|
; LA32S-NEXT: bitrev.w $a0, $a1
|
|
; LA32S-NEXT: bytepick.w $a0, $a0, $a2, 2
|
|
; LA32S-NEXT: srli.w $a1, $a2, 16
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i48:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a0, $a0
|
|
; LA64-NEXT: srli.d $a0, $a0, 16
|
|
; LA64-NEXT: ret
|
|
%tmp = call i48 @llvm.bitreverse.i48(i48 %a)
|
|
ret i48 %tmp
|
|
}
|
|
|
|
define i77 @test_bitreverse_i77(i77 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i77:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: ld.w $a3, $a1, 4
|
|
; LA32R-NEXT: ld.w $a2, $a1, 0
|
|
; LA32R-NEXT: ld.w $a5, $a1, 8
|
|
; LA32R-NEXT: srli.w $a4, $a3, 8
|
|
; LA32R-NEXT: lu12i.w $a1, 15
|
|
; LA32R-NEXT: ori $a1, $a1, 3840
|
|
; LA32R-NEXT: and $a4, $a4, $a1
|
|
; LA32R-NEXT: srli.w $a6, $a3, 24
|
|
; LA32R-NEXT: or $a4, $a4, $a6
|
|
; LA32R-NEXT: and $a6, $a3, $a1
|
|
; LA32R-NEXT: slli.w $a6, $a6, 8
|
|
; LA32R-NEXT: slli.w $a3, $a3, 24
|
|
; LA32R-NEXT: or $a3, $a3, $a6
|
|
; LA32R-NEXT: or $a4, $a3, $a4
|
|
; LA32R-NEXT: srli.w $a6, $a4, 4
|
|
; LA32R-NEXT: lu12i.w $a3, 61680
|
|
; LA32R-NEXT: ori $a3, $a3, 3855
|
|
; LA32R-NEXT: and $a6, $a6, $a3
|
|
; LA32R-NEXT: and $a4, $a4, $a3
|
|
; LA32R-NEXT: slli.w $a4, $a4, 4
|
|
; LA32R-NEXT: or $a6, $a6, $a4
|
|
; LA32R-NEXT: srli.w $a7, $a6, 2
|
|
; LA32R-NEXT: lu12i.w $a4, 209715
|
|
; LA32R-NEXT: ori $a4, $a4, 819
|
|
; LA32R-NEXT: and $a7, $a7, $a4
|
|
; LA32R-NEXT: and $a6, $a6, $a4
|
|
; LA32R-NEXT: slli.w $a6, $a6, 2
|
|
; LA32R-NEXT: or $a6, $a7, $a6
|
|
; LA32R-NEXT: srli.w $a7, $a6, 1
|
|
; LA32R-NEXT: lu12i.w $t0, 349525
|
|
; LA32R-NEXT: ori $t0, $t0, 1365
|
|
; LA32R-NEXT: and $a7, $a7, $t0
|
|
; LA32R-NEXT: and $a6, $a6, $t0
|
|
; LA32R-NEXT: slli.w $a6, $a6, 1
|
|
; LA32R-NEXT: or $a6, $a7, $a6
|
|
; LA32R-NEXT: slli.w $a7, $a6, 13
|
|
; LA32R-NEXT: srli.w $t1, $a5, 8
|
|
; LA32R-NEXT: and $t1, $t1, $a1
|
|
; LA32R-NEXT: srli.w $t2, $a5, 24
|
|
; LA32R-NEXT: or $t1, $t1, $t2
|
|
; LA32R-NEXT: and $t2, $a5, $a1
|
|
; LA32R-NEXT: slli.w $t2, $t2, 8
|
|
; LA32R-NEXT: slli.w $a5, $a5, 24
|
|
; LA32R-NEXT: or $a5, $a5, $t2
|
|
; LA32R-NEXT: or $a5, $a5, $t1
|
|
; LA32R-NEXT: srli.w $t1, $a5, 4
|
|
; LA32R-NEXT: and $t1, $t1, $a3
|
|
; LA32R-NEXT: and $a5, $a5, $a3
|
|
; LA32R-NEXT: slli.w $a5, $a5, 4
|
|
; LA32R-NEXT: or $a5, $t1, $a5
|
|
; LA32R-NEXT: srli.w $t1, $a5, 2
|
|
; LA32R-NEXT: and $t1, $t1, $a4
|
|
; LA32R-NEXT: and $a5, $a5, $a4
|
|
; LA32R-NEXT: slli.w $a5, $a5, 2
|
|
; LA32R-NEXT: or $a5, $t1, $a5
|
|
; LA32R-NEXT: srli.w $t1, $a5, 1
|
|
; LA32R-NEXT: lu12i.w $t2, 349440
|
|
; LA32R-NEXT: and $t1, $t1, $t2
|
|
; LA32R-NEXT: lu12i.w $t2, 349504
|
|
; LA32R-NEXT: and $a5, $a5, $t2
|
|
; LA32R-NEXT: slli.w $a5, $a5, 1
|
|
; LA32R-NEXT: or $a5, $t1, $a5
|
|
; LA32R-NEXT: srli.w $a5, $a5, 19
|
|
; LA32R-NEXT: or $a5, $a5, $a7
|
|
; LA32R-NEXT: srli.w $a6, $a6, 19
|
|
; LA32R-NEXT: srli.w $a7, $a2, 8
|
|
; LA32R-NEXT: and $a7, $a7, $a1
|
|
; LA32R-NEXT: srli.w $t1, $a2, 24
|
|
; LA32R-NEXT: or $a7, $a7, $t1
|
|
; LA32R-NEXT: and $a1, $a2, $a1
|
|
; LA32R-NEXT: slli.w $a1, $a1, 8
|
|
; LA32R-NEXT: slli.w $a2, $a2, 24
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: or $a1, $a1, $a7
|
|
; LA32R-NEXT: srli.w $a2, $a1, 4
|
|
; LA32R-NEXT: and $a2, $a2, $a3
|
|
; LA32R-NEXT: and $a1, $a1, $a3
|
|
; LA32R-NEXT: slli.w $a1, $a1, 4
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 2
|
|
; LA32R-NEXT: and $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: slli.w $a1, $a1, 2
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 1
|
|
; LA32R-NEXT: and $a2, $a2, $t0
|
|
; LA32R-NEXT: and $a1, $a1, $t0
|
|
; LA32R-NEXT: slli.w $a1, $a1, 1
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: slli.w $a2, $a1, 13
|
|
; LA32R-NEXT: or $a2, $a2, $a6
|
|
; LA32R-NEXT: srli.w $a1, $a1, 19
|
|
; LA32R-NEXT: st.h $a1, $a0, 8
|
|
; LA32R-NEXT: st.w $a2, $a0, 4
|
|
; LA32R-NEXT: st.w $a5, $a0, 0
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i77:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: ld.w $a2, $a1, 4
|
|
; LA32S-NEXT: ld.w $a3, $a1, 8
|
|
; LA32S-NEXT: ld.w $a1, $a1, 0
|
|
; LA32S-NEXT: bitrev.w $a2, $a2
|
|
; LA32S-NEXT: slli.w $a4, $a2, 13
|
|
; LA32S-NEXT: bitrev.w $a3, $a3
|
|
; LA32S-NEXT: srli.w $a3, $a3, 19
|
|
; LA32S-NEXT: or $a3, $a3, $a4
|
|
; LA32S-NEXT: srli.w $a2, $a2, 19
|
|
; LA32S-NEXT: bitrev.w $a1, $a1
|
|
; LA32S-NEXT: slli.w $a4, $a1, 13
|
|
; LA32S-NEXT: or $a2, $a4, $a2
|
|
; LA32S-NEXT: srli.w $a1, $a1, 19
|
|
; LA32S-NEXT: st.h $a1, $a0, 8
|
|
; LA32S-NEXT: st.w $a2, $a0, 4
|
|
; LA32S-NEXT: st.w $a3, $a0, 0
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i77:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a2, $a0
|
|
; LA64-NEXT: slli.d $a0, $a2, 13
|
|
; LA64-NEXT: bitrev.d $a1, $a1
|
|
; LA64-NEXT: srli.d $a1, $a1, 51
|
|
; LA64-NEXT: or $a0, $a1, $a0
|
|
; LA64-NEXT: srli.d $a1, $a2, 51
|
|
; LA64-NEXT: ret
|
|
%tmp = call i77 @llvm.bitreverse.i77(i77 %a)
|
|
ret i77 %tmp
|
|
}
|
|
|
|
define i128 @test_bitreverse_i128(i128 %a) nounwind {
|
|
; LA32R-LABEL: test_bitreverse_i128:
|
|
; LA32R: # %bb.0:
|
|
; LA32R-NEXT: ld.w $a3, $a1, 12
|
|
; LA32R-NEXT: ld.w $a2, $a1, 0
|
|
; LA32R-NEXT: ld.w $a7, $a1, 4
|
|
; LA32R-NEXT: ld.w $t0, $a1, 8
|
|
; LA32R-NEXT: srli.w $a4, $a3, 8
|
|
; LA32R-NEXT: lu12i.w $a1, 15
|
|
; LA32R-NEXT: ori $a1, $a1, 3840
|
|
; LA32R-NEXT: and $a4, $a4, $a1
|
|
; LA32R-NEXT: srli.w $a5, $a3, 24
|
|
; LA32R-NEXT: or $a4, $a4, $a5
|
|
; LA32R-NEXT: and $a5, $a3, $a1
|
|
; LA32R-NEXT: slli.w $a5, $a5, 8
|
|
; LA32R-NEXT: slli.w $a3, $a3, 24
|
|
; LA32R-NEXT: or $a3, $a3, $a5
|
|
; LA32R-NEXT: or $a4, $a3, $a4
|
|
; LA32R-NEXT: srli.w $a5, $a4, 4
|
|
; LA32R-NEXT: lu12i.w $a3, 61680
|
|
; LA32R-NEXT: ori $a3, $a3, 3855
|
|
; LA32R-NEXT: and $a5, $a5, $a3
|
|
; LA32R-NEXT: and $a4, $a4, $a3
|
|
; LA32R-NEXT: slli.w $a4, $a4, 4
|
|
; LA32R-NEXT: or $a5, $a5, $a4
|
|
; LA32R-NEXT: srli.w $a6, $a5, 2
|
|
; LA32R-NEXT: lu12i.w $a4, 209715
|
|
; LA32R-NEXT: ori $a4, $a4, 819
|
|
; LA32R-NEXT: and $a6, $a6, $a4
|
|
; LA32R-NEXT: and $a5, $a5, $a4
|
|
; LA32R-NEXT: slli.w $a5, $a5, 2
|
|
; LA32R-NEXT: or $a5, $a6, $a5
|
|
; LA32R-NEXT: srli.w $t1, $a5, 1
|
|
; LA32R-NEXT: lu12i.w $a6, 349525
|
|
; LA32R-NEXT: ori $a6, $a6, 1365
|
|
; LA32R-NEXT: and $t1, $t1, $a6
|
|
; LA32R-NEXT: and $a5, $a5, $a6
|
|
; LA32R-NEXT: slli.w $a5, $a5, 1
|
|
; LA32R-NEXT: or $a5, $t1, $a5
|
|
; LA32R-NEXT: srli.w $t1, $t0, 8
|
|
; LA32R-NEXT: and $t1, $t1, $a1
|
|
; LA32R-NEXT: srli.w $t2, $t0, 24
|
|
; LA32R-NEXT: or $t1, $t1, $t2
|
|
; LA32R-NEXT: and $t2, $t0, $a1
|
|
; LA32R-NEXT: slli.w $t2, $t2, 8
|
|
; LA32R-NEXT: slli.w $t0, $t0, 24
|
|
; LA32R-NEXT: or $t0, $t0, $t2
|
|
; LA32R-NEXT: or $t0, $t0, $t1
|
|
; LA32R-NEXT: srli.w $t1, $t0, 4
|
|
; LA32R-NEXT: and $t1, $t1, $a3
|
|
; LA32R-NEXT: and $t0, $t0, $a3
|
|
; LA32R-NEXT: slli.w $t0, $t0, 4
|
|
; LA32R-NEXT: or $t0, $t1, $t0
|
|
; LA32R-NEXT: srli.w $t1, $t0, 2
|
|
; LA32R-NEXT: and $t1, $t1, $a4
|
|
; LA32R-NEXT: and $t0, $t0, $a4
|
|
; LA32R-NEXT: slli.w $t0, $t0, 2
|
|
; LA32R-NEXT: or $t0, $t1, $t0
|
|
; LA32R-NEXT: srli.w $t1, $t0, 1
|
|
; LA32R-NEXT: and $t1, $t1, $a6
|
|
; LA32R-NEXT: and $t0, $t0, $a6
|
|
; LA32R-NEXT: slli.w $t0, $t0, 1
|
|
; LA32R-NEXT: or $t0, $t1, $t0
|
|
; LA32R-NEXT: srli.w $t1, $a7, 8
|
|
; LA32R-NEXT: and $t1, $t1, $a1
|
|
; LA32R-NEXT: srli.w $t2, $a7, 24
|
|
; LA32R-NEXT: or $t1, $t1, $t2
|
|
; LA32R-NEXT: and $t2, $a7, $a1
|
|
; LA32R-NEXT: slli.w $t2, $t2, 8
|
|
; LA32R-NEXT: slli.w $a7, $a7, 24
|
|
; LA32R-NEXT: or $a7, $a7, $t2
|
|
; LA32R-NEXT: or $a7, $a7, $t1
|
|
; LA32R-NEXT: srli.w $t1, $a7, 4
|
|
; LA32R-NEXT: and $t1, $t1, $a3
|
|
; LA32R-NEXT: and $a7, $a7, $a3
|
|
; LA32R-NEXT: slli.w $a7, $a7, 4
|
|
; LA32R-NEXT: or $a7, $t1, $a7
|
|
; LA32R-NEXT: srli.w $t1, $a7, 2
|
|
; LA32R-NEXT: and $t1, $t1, $a4
|
|
; LA32R-NEXT: and $a7, $a7, $a4
|
|
; LA32R-NEXT: slli.w $a7, $a7, 2
|
|
; LA32R-NEXT: or $a7, $t1, $a7
|
|
; LA32R-NEXT: srli.w $t1, $a7, 1
|
|
; LA32R-NEXT: and $t1, $t1, $a6
|
|
; LA32R-NEXT: and $a7, $a7, $a6
|
|
; LA32R-NEXT: slli.w $a7, $a7, 1
|
|
; LA32R-NEXT: or $a7, $t1, $a7
|
|
; LA32R-NEXT: srli.w $t1, $a2, 8
|
|
; LA32R-NEXT: and $t1, $t1, $a1
|
|
; LA32R-NEXT: srli.w $t2, $a2, 24
|
|
; LA32R-NEXT: or $t1, $t1, $t2
|
|
; LA32R-NEXT: and $a1, $a2, $a1
|
|
; LA32R-NEXT: slli.w $a1, $a1, 8
|
|
; LA32R-NEXT: slli.w $a2, $a2, 24
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: or $a1, $a1, $t1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 4
|
|
; LA32R-NEXT: and $a2, $a2, $a3
|
|
; LA32R-NEXT: and $a1, $a1, $a3
|
|
; LA32R-NEXT: slli.w $a1, $a1, 4
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 2
|
|
; LA32R-NEXT: and $a2, $a2, $a4
|
|
; LA32R-NEXT: and $a1, $a1, $a4
|
|
; LA32R-NEXT: slli.w $a1, $a1, 2
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: srli.w $a2, $a1, 1
|
|
; LA32R-NEXT: and $a2, $a2, $a6
|
|
; LA32R-NEXT: and $a1, $a1, $a6
|
|
; LA32R-NEXT: slli.w $a1, $a1, 1
|
|
; LA32R-NEXT: or $a1, $a2, $a1
|
|
; LA32R-NEXT: st.w $a1, $a0, 12
|
|
; LA32R-NEXT: st.w $a7, $a0, 8
|
|
; LA32R-NEXT: st.w $t0, $a0, 4
|
|
; LA32R-NEXT: st.w $a5, $a0, 0
|
|
; LA32R-NEXT: ret
|
|
;
|
|
; LA32S-LABEL: test_bitreverse_i128:
|
|
; LA32S: # %bb.0:
|
|
; LA32S-NEXT: ld.w $a2, $a1, 12
|
|
; LA32S-NEXT: ld.w $a3, $a1, 8
|
|
; LA32S-NEXT: ld.w $a4, $a1, 4
|
|
; LA32S-NEXT: ld.w $a1, $a1, 0
|
|
; LA32S-NEXT: bitrev.w $a2, $a2
|
|
; LA32S-NEXT: bitrev.w $a3, $a3
|
|
; LA32S-NEXT: bitrev.w $a4, $a4
|
|
; LA32S-NEXT: bitrev.w $a1, $a1
|
|
; LA32S-NEXT: st.w $a1, $a0, 12
|
|
; LA32S-NEXT: st.w $a4, $a0, 8
|
|
; LA32S-NEXT: st.w $a3, $a0, 4
|
|
; LA32S-NEXT: st.w $a2, $a0, 0
|
|
; LA32S-NEXT: ret
|
|
;
|
|
; LA64-LABEL: test_bitreverse_i128:
|
|
; LA64: # %bb.0:
|
|
; LA64-NEXT: bitrev.d $a2, $a1
|
|
; LA64-NEXT: bitrev.d $a1, $a0
|
|
; LA64-NEXT: move $a0, $a2
|
|
; LA64-NEXT: ret
|
|
%tmp = call i128 @llvm.bitreverse.i128(i128 %a)
|
|
ret i128 %tmp
|
|
}
|