884 lines
30 KiB
LLVM
884 lines
30 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
|
|
; RUN: llc --mtriple=loongarch32 --mattr=+lasx --verify-machineinstrs < %s \
|
|
; RUN: | FileCheck --check-prefix=LA32 %s
|
|
; RUN: llc --mtriple=loongarch64 --mattr=+lasx --verify-machineinstrs < %s \
|
|
; RUN: | FileCheck --check-prefix=LA64 %s
|
|
|
|
@g_i8 = dso_local thread_local(localexec) global i8 0
|
|
|
|
define dso_local signext i8 @tlsle_load_s8() nounwind {
|
|
; LA32-LABEL: tlsle_load_s8:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA32-NEXT: ld.b $a0, $a0, %le_lo12_r(g_i8)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_s8:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA64-NEXT: ld.b $a0, $a0, %le_lo12_r(g_i8)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8)
|
|
%1 = load i8, ptr %0
|
|
ret i8 %1
|
|
}
|
|
|
|
define dso_local zeroext i8 @tlsle_load_u8() nounwind {
|
|
; LA32-LABEL: tlsle_load_u8:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA32-NEXT: ld.bu $a0, $a0, %le_lo12_r(g_i8)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_u8:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA64-NEXT: ld.bu $a0, $a0, %le_lo12_r(g_i8)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8)
|
|
%1 = load i8, ptr %0
|
|
ret i8 %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_i8() nounwind {
|
|
; LA32-LABEL: tlsle_store_i8:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.b $a1, $a0, %le_lo12_r(g_i8)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_i8:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.b $a1, $a0, %le_lo12_r(g_i8)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8)
|
|
store i8 1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_i16 = dso_local thread_local(localexec) global i16 0
|
|
|
|
define dso_local signext i16 @tlsle_load_s16() nounwind {
|
|
; LA32-LABEL: tlsle_load_s16:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA32-NEXT: ld.h $a0, $a0, %le_lo12_r(g_i16)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_s16:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA64-NEXT: ld.h $a0, $a0, %le_lo12_r(g_i16)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i16)
|
|
%1 = load i16, ptr %0
|
|
ret i16 %1
|
|
}
|
|
|
|
define dso_local zeroext i16 @tlsle_load_u16() nounwind {
|
|
; LA32-LABEL: tlsle_load_u16:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA32-NEXT: ld.hu $a0, $a0, %le_lo12_r(g_i16)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_u16:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA64-NEXT: ld.hu $a0, $a0, %le_lo12_r(g_i16)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i16)
|
|
%1 = load i16, ptr %0
|
|
ret i16 %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_i16() nounwind {
|
|
; LA32-LABEL: tlsle_store_i16:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.h $a1, $a0, %le_lo12_r(g_i16)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_i16:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i16)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i16)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.h $a1, $a0, %le_lo12_r(g_i16)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i16)
|
|
store i16 1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_i32 = dso_local thread_local(localexec) global i32 0
|
|
|
|
define dso_local signext i32 @tlsle_load_s32() nounwind {
|
|
; LA32-LABEL: tlsle_load_s32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA32-NEXT: ld.w $a0, $a0, %le_lo12_r(g_i32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_s32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA64-NEXT: ld.w $a0, $a0, %le_lo12_r(g_i32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32)
|
|
%1 = load i32, ptr %0
|
|
ret i32 %1
|
|
}
|
|
|
|
define dso_local zeroext i32 @tlsle_load_u32() nounwind {
|
|
; LA32-LABEL: tlsle_load_u32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA32-NEXT: ld.w $a0, $a0, %le_lo12_r(g_i32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_u32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA64-NEXT: ld.wu $a0, $a0, %le_lo12_r(g_i32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32)
|
|
%1 = load i32, ptr %0
|
|
ret i32 %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_i32() nounwind {
|
|
; LA32-LABEL: tlsle_store_i32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.w $a1, $a0, %le_lo12_r(g_i32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_i32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.w $a1, $a0, %le_lo12_r(g_i32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32)
|
|
store i32 1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_i64 = dso_local thread_local(localexec) global i64 0
|
|
|
|
define dso_local i64 @tlsle_load_i64() nounwind {
|
|
; LA32-LABEL: tlsle_load_i64:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i64)
|
|
; LA32-NEXT: addi.w $a1, $a0, %le_lo12_r(g_i64)
|
|
; LA32-NEXT: ld.w $a0, $a1, 0
|
|
; LA32-NEXT: ld.w $a1, $a1, 4
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_i64:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i64)
|
|
; LA64-NEXT: ld.d $a0, $a0, %le_lo12_r(g_i64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i64)
|
|
%1 = load i64, ptr %0
|
|
ret i64 %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_i64() nounwind {
|
|
; LA32-LABEL: tlsle_store_i64:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i64)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_i64)
|
|
; LA32-NEXT: st.w $zero, $a0, 4
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.w $a1, $a0, 0
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_i64:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i64)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.d $a1, $a0, %le_lo12_r(g_i64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i64)
|
|
store i64 1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_f32 = dso_local thread_local(localexec) global float 0.0
|
|
|
|
define dso_local float @tlsle_load_f32() nounwind {
|
|
; LA32-LABEL: tlsle_load_f32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_f32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_f32)
|
|
; LA32-NEXT: fld.s $fa0, $a0, %le_lo12_r(g_f32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_f32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_f32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_f32)
|
|
; LA64-NEXT: fld.s $fa0, $a0, %le_lo12_r(g_f32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_f32)
|
|
%1 = load float, ptr %0
|
|
ret float %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_f32() nounwind {
|
|
; LA32-LABEL: tlsle_store_f32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_f32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_f32)
|
|
; LA32-NEXT: lu12i.w $a1, 260096
|
|
; LA32-NEXT: st.w $a1, $a0, %le_lo12_r(g_f32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_f32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_f32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_f32)
|
|
; LA64-NEXT: lu12i.w $a1, 260096
|
|
; LA64-NEXT: st.w $a1, $a0, %le_lo12_r(g_f32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_f32)
|
|
store float 1.0, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_f64 = dso_local thread_local(localexec) global double 0.0
|
|
|
|
define dso_local double @tlsle_load_f64() nounwind {
|
|
; LA32-LABEL: tlsle_load_f64:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_f64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_f64)
|
|
; LA32-NEXT: fld.d $fa0, $a0, %le_lo12_r(g_f64)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_f64:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_f64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_f64)
|
|
; LA64-NEXT: fld.d $fa0, $a0, %le_lo12_r(g_f64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_f64)
|
|
%1 = load double, ptr %0
|
|
ret double %1
|
|
}
|
|
|
|
define dso_local void @tlsle_store_f64() nounwind {
|
|
; LA32-LABEL: tlsle_store_f64:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_f64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_f64)
|
|
; LA32-NEXT: vldi $vr0, -912
|
|
; LA32-NEXT: fst.d $fa0, $a0, %le_lo12_r(g_f64)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_f64:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_f64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_f64)
|
|
; LA64-NEXT: lu52i.d $a1, $zero, 1023
|
|
; LA64-NEXT: st.d $a1, $a0, %le_lo12_r(g_f64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_f64)
|
|
store double 1.0, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_m64 = dso_local thread_local(localexec) global i64 0
|
|
|
|
define dso_local void @tlsle_store_multi() nounwind {
|
|
; LA32-LABEL: tlsle_store_multi:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_m64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_m64)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_m64)
|
|
; LA32-NEXT: st.w $zero, $a0, 4
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.w $a1, $a0, 0
|
|
; LA32-NEXT: st.w $zero, $a0, 4
|
|
; LA32-NEXT: ori $a1, $zero, 2
|
|
; LA32-NEXT: st.w $a1, $a0, 0
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_multi:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_m64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_m64)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.d $a1, $a0, %le_lo12_r(g_m64)
|
|
; LA64-NEXT: ori $a1, $zero, 2
|
|
; LA64-NEXT: st.d $a1, $a0, %le_lo12_r(g_m64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_m64)
|
|
store volatile i64 1, ptr %0
|
|
store volatile i64 2, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_sf32 = dso_local thread_local(localexec) global float 0.0
|
|
|
|
define dso_local void @tlsle_store_sf32() nounwind {
|
|
; LA32-LABEL: tlsle_store_sf32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_sf32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_sf32)
|
|
; LA32-NEXT: fld.s $fa0, $a0, %le_lo12_r(g_sf32)
|
|
; LA32-NEXT: fst.s $fa0, $a0, %le_lo12_r(g_sf32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_sf32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_sf32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_sf32)
|
|
; LA64-NEXT: fld.s $fa0, $a0, %le_lo12_r(g_sf32)
|
|
; LA64-NEXT: fst.s $fa0, $a0, %le_lo12_r(g_sf32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_sf32)
|
|
%1 = load float, ptr %0
|
|
store volatile float %1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_sf64 = dso_local thread_local(localexec) global double 0.0
|
|
|
|
define dso_local void @tlsle_store_sf64() nounwind {
|
|
; LA32-LABEL: tlsle_store_sf64:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_sf64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_sf64)
|
|
; LA32-NEXT: fld.d $fa0, $a0, %le_lo12_r(g_sf64)
|
|
; LA32-NEXT: fst.d $fa0, $a0, %le_lo12_r(g_sf64)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_sf64:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_sf64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_sf64)
|
|
; LA64-NEXT: fld.d $fa0, $a0, %le_lo12_r(g_sf64)
|
|
; LA64-NEXT: fst.d $fa0, $a0, %le_lo12_r(g_sf64)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_sf64)
|
|
%1 = load double, ptr %0
|
|
store volatile double %1, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_i32x4_src = dso_local thread_local(localexec) global [4 x i32] zeroinitializer, align 16
|
|
@g_i32x4_dst = dso_local thread_local(localexec) global [4 x i32] zeroinitializer, align 16
|
|
|
|
define dso_local void @tlsle_copy_i32x4() nounwind {
|
|
; LA32-LABEL: tlsle_copy_i32x4:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x4_src)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32x4_src)
|
|
; LA32-NEXT: vld $vr0, $a0, %le_lo12_r(g_i32x4_src)
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x4_dst)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32x4_dst)
|
|
; LA32-NEXT: vst $vr0, $a0, %le_lo12_r(g_i32x4_dst)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_copy_i32x4:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x4_src)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32x4_src)
|
|
; LA64-NEXT: vld $vr0, $a0, %le_lo12_r(g_i32x4_src)
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x4_dst)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32x4_dst)
|
|
; LA64-NEXT: vst $vr0, $a0, %le_lo12_r(g_i32x4_dst)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32x4_src)
|
|
%1 = load <4 x i32>, ptr %0, align 16
|
|
%2 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32x4_dst)
|
|
store <4 x i32> %1, ptr %2, align 16
|
|
ret void
|
|
}
|
|
|
|
@g_i32x8_src = dso_local thread_local(localexec) global [8 x i32] zeroinitializer, align 32
|
|
@g_i32x8_dst = dso_local thread_local(localexec) global [8 x i32] zeroinitializer, align 32
|
|
|
|
define dso_local void @tlsle_copy_i32x8() nounwind {
|
|
; LA32-LABEL: tlsle_copy_i32x8:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x8_src)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32x8_src)
|
|
; LA32-NEXT: xvld $xr0, $a0, %le_lo12_r(g_i32x8_src)
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x8_dst)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i32x8_dst)
|
|
; LA32-NEXT: xvst $xr0, $a0, %le_lo12_r(g_i32x8_dst)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_copy_i32x8:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x8_src)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32x8_src)
|
|
; LA64-NEXT: xvld $xr0, $a0, %le_lo12_r(g_i32x8_src)
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i32x8_dst)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i32x8_dst)
|
|
; LA64-NEXT: xvst $xr0, $a0, %le_lo12_r(g_i32x8_dst)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32x8_src)
|
|
%1 = load <8 x i32>, ptr %0, align 32
|
|
%2 = call ptr @llvm.threadlocal.address.p0(ptr @g_i32x8_dst)
|
|
store <8 x i32> %1, ptr %2, align 32
|
|
ret void
|
|
}
|
|
|
|
@g_i8x16 = dso_local thread_local(localexec) global <16 x i8> zeroinitializer, align 16
|
|
|
|
define dso_local void @tlsle_copy_i8_to_i8x16() nounwind {
|
|
; LA32-LABEL: tlsle_copy_i8_to_i8x16:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA32-NEXT: vldrepl.b $vr0, $a0, %le_lo12_r(g_i8)
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8x16)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8x16)
|
|
; LA32-NEXT: vst $vr0, $a0, %le_lo12_r(g_i8x16)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_copy_i8_to_i8x16:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA64-NEXT: vldrepl.b $vr0, $a0, %le_lo12_r(g_i8)
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8x16)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8x16)
|
|
; LA64-NEXT: vst $vr0, $a0, %le_lo12_r(g_i8x16)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8)
|
|
%1 = call <16 x i8> @llvm.loongarch.lsx.vldrepl.b(ptr %0, i32 0)
|
|
%2 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8x16)
|
|
store <16 x i8> %1, ptr %2, align 16
|
|
ret void
|
|
}
|
|
|
|
@g_i8x32 = dso_local thread_local(localexec) global <32 x i8> zeroinitializer, align 32
|
|
|
|
define dso_local void @tlsle_copy_i8_to_i8x32() nounwind {
|
|
; LA32-LABEL: tlsle_copy_i8_to_i8x32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA32-NEXT: xvldrepl.b $xr0, $a0, %le_lo12_r(g_i8)
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_i8x32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_i8x32)
|
|
; LA32-NEXT: xvst $xr0, $a0, %le_lo12_r(g_i8x32)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_copy_i8_to_i8x32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8)
|
|
; LA64-NEXT: xvldrepl.b $xr0, $a0, %le_lo12_r(g_i8)
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_i8x32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_i8x32)
|
|
; LA64-NEXT: xvst $xr0, $a0, %le_lo12_r(g_i8x32)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8)
|
|
%1 = call <32 x i8> @llvm.loongarch.lasx.xvldrepl.b(ptr %0, i32 0)
|
|
%2 = call ptr @llvm.threadlocal.address.p0(ptr @g_i8x32)
|
|
store <32 x i8> %1, ptr %2, align 32
|
|
ret void
|
|
}
|
|
|
|
@g_rmw = dso_local thread_local(localexec) global i64 0
|
|
|
|
define dso_local void @tlsle_rmw() nounwind {
|
|
; LA32-LABEL: tlsle_rmw:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_rmw)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_rmw)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_rmw)
|
|
; LA32-NEXT: ld.w $a1, $a0, 0
|
|
; LA32-NEXT: ld.w $a2, $a0, 4
|
|
; LA32-NEXT: addi.w $a1, $a1, 1
|
|
; LA32-NEXT: sltui $a3, $a1, 1
|
|
; LA32-NEXT: add.w $a2, $a2, $a3
|
|
; LA32-NEXT: st.w $a1, $a0, 0
|
|
; LA32-NEXT: st.w $a2, $a0, 4
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_rmw:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_rmw)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_rmw)
|
|
; LA64-NEXT: ld.d $a1, $a0, %le_lo12_r(g_rmw)
|
|
; LA64-NEXT: addi.d $a1, $a1, 1
|
|
; LA64-NEXT: st.d $a1, $a0, %le_lo12_r(g_rmw)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = call ptr @llvm.threadlocal.address.p0(ptr @g_rmw)
|
|
%1 = load i64, ptr %0
|
|
%2 = add i64 %1, 1
|
|
store i64 %2, ptr %0
|
|
ret void
|
|
}
|
|
|
|
@g_a32 = dso_local thread_local(localexec) global [2048 x i32] zeroinitializer, align 4
|
|
|
|
define dso_local void @tlsle_store_a32() nounwind {
|
|
; LA32-LABEL: tlsle_store_a32:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a32+4096)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a32+4096)
|
|
; LA32-NEXT: ori $a1, $zero, 1
|
|
; LA32-NEXT: st.w $a1, $a0, %le_lo12_r(g_a32+4096)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_a32:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a32+4096)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a32+4096)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: st.w $a1, $a0, %le_lo12_r(g_a32+4096)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
store i32 1, ptr getelementptr inbounds ([1 x i32], ptr @g_a32, i32 1024), align 4
|
|
ret void
|
|
}
|
|
|
|
define dso_local void @tlsle_store_a32_2() nounwind {
|
|
; LA32-LABEL: tlsle_store_a32_2:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a32)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a32)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a32)
|
|
; LA32-NEXT: lu12i.w $a1, 1
|
|
; LA32-NEXT: add.w $a2, $a0, $a1
|
|
; LA32-NEXT: ori $a3, $zero, 1
|
|
; LA32-NEXT: st.w $a3, $a2, 0
|
|
; LA32-NEXT: ori $a1, $a1, 8
|
|
; LA32-NEXT: add.w $a0, $a0, $a1
|
|
; LA32-NEXT: ori $a1, $zero, 2
|
|
; LA32-NEXT: st.w $a1, $a0, 0
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_store_a32_2:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a32)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a32)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a32)
|
|
; LA64-NEXT: ori $a1, $zero, 1
|
|
; LA64-NEXT: stptr.w $a1, $a0, 4096
|
|
; LA64-NEXT: ori $a1, $zero, 2
|
|
; LA64-NEXT: stptr.w $a1, $a0, 4104
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
store i32 1, ptr getelementptr inbounds ([1 x i32], ptr @g_a32, i32 1024), align 4
|
|
store i32 2, ptr getelementptr inbounds ([1 x i32], ptr @g_a32, i32 1026), align 4
|
|
ret void
|
|
}
|
|
|
|
define dso_local void @tlsle_control_flow_with_mem_access() nounwind {
|
|
; LA32-LABEL: tlsle_control_flow_with_mem_access:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a32+4)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a32+4)
|
|
; LA32-NEXT: ld.w $a1, $a0, %le_lo12_r(g_a32+4)
|
|
; LA32-NEXT: ori $a2, $zero, 1
|
|
; LA32-NEXT: blt $a1, $a2, .LBB25_2
|
|
; LA32-NEXT: # %bb.1: # %if.then
|
|
; LA32-NEXT: ori $a1, $zero, 10
|
|
; LA32-NEXT: st.w $a1, $a0, %le_lo12_r(g_a32+4)
|
|
; LA32-NEXT: .LBB25_2: # %if.end
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_control_flow_with_mem_access:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a32+4)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a32+4)
|
|
; LA64-NEXT: ld.w $a1, $a0, %le_lo12_r(g_a32+4)
|
|
; LA64-NEXT: ori $a2, $zero, 1
|
|
; LA64-NEXT: blt $a1, $a2, .LBB25_2
|
|
; LA64-NEXT: # %bb.1: # %if.then
|
|
; LA64-NEXT: ori $a1, $zero, 10
|
|
; LA64-NEXT: st.w $a1, $a0, %le_lo12_r(g_a32+4)
|
|
; LA64-NEXT: .LBB25_2: # %if.end
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
%0 = load i32, ptr getelementptr inbounds ([1 x i32], ptr @g_a32, i32 1), align 4
|
|
%cmp = icmp sgt i32 %0, 0
|
|
br i1 %cmp, label %if.then, label %if.end
|
|
|
|
if.then:
|
|
store i32 10, ptr getelementptr inbounds ([1 x i32], ptr @g_a32, i32 1), align 4
|
|
br label %if.end
|
|
|
|
if.end:
|
|
ret void
|
|
}
|
|
|
|
@g_a64 = dso_local thread_local(localexec) global [614750729487779976 x i64] zeroinitializer, align 8
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_1() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_1:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+8)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+8)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_1:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+8)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+8)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 1)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_257() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_257:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2056)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+2056)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+2056)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_257:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2056)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+2056)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+2056)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 257)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_1048576() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_1048576:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8388608)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+8388608)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+8388608)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_1048576:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8388608)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+8388608)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+8388608)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 1048576)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_1048577() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_1048577:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8388616)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+8388616)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+8388616)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_1048577:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+8388616)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+8388616)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+8388616)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 1048577)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_268432896() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_268432896:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2147463168)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+2147463168)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+2147463168)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_268432896:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2147463168)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+2147463168)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+2147463168)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 268432896)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_268432897() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_268432897:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2147463176)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+2147463176)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+2147463176)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_268432897:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2147463176)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64+2147463176)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64+2147463176)
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 268432897)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_281474439839744() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_281474439839744:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_281474439839744:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA64-NEXT: ori $a1, $zero, 0
|
|
; LA64-NEXT: lu32i.d $a1, 524287
|
|
; LA64-NEXT: add.d $a0, $a0, $a1
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 281474439839744)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_248792680471040() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_248792680471040:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+2059194368)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+2059194368)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+2059194368)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_248792680471040:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA64-NEXT: lu12i.w $a1, 502733
|
|
; LA64-NEXT: lu32i.d $a1, 463412
|
|
; LA64-NEXT: add.d $a0, $a0, $a1
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 248792680471040)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_9380351707272() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_9380351707272:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+1145062464)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+1145062464)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+1145062464)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_9380351707272:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA64-NEXT: lu12i.w $a1, 279556
|
|
; LA64-NEXT: ori $a1, $a1, 1088
|
|
; LA64-NEXT: lu32i.d $a1, 17472
|
|
; LA64-NEXT: add.d $a0, $a0, $a1
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 9380351707272)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_562949953421312() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_562949953421312:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_562949953421312:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA64-NEXT: lu52i.d $a1, $zero, 1
|
|
; LA64-NEXT: add.d $a0, $a0, $a1
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 562949953421312)
|
|
}
|
|
|
|
define dso_local ptr @tlsle_load_addr_offset_614749556925924693() nounwind {
|
|
; LA32-LABEL: tlsle_load_addr_offset_614749556925924693:
|
|
; LA32: # %bb.0: # %entry
|
|
; LA32-NEXT: lu12i.w $a0, %le_hi20_r(g_a64+858794664)
|
|
; LA32-NEXT: add.w $a0, $a0, $tp, %le_add_r(g_a64+858794664)
|
|
; LA32-NEXT: addi.w $a0, $a0, %le_lo12_r(g_a64+858794664)
|
|
; LA32-NEXT: ret
|
|
;
|
|
; LA64-LABEL: tlsle_load_addr_offset_614749556925924693:
|
|
; LA64: # %bb.0: # %entry
|
|
; LA64-NEXT: lu12i.w $a0, %le_hi20_r(g_a64)
|
|
; LA64-NEXT: add.d $a0, $a0, $tp, %le_add_r(g_a64)
|
|
; LA64-NEXT: addi.d $a0, $a0, %le_lo12_r(g_a64)
|
|
; LA64-NEXT: lu12i.w $a1, 209666
|
|
; LA64-NEXT: ori $a1, $a1, 2728
|
|
; LA64-NEXT: lu32i.d $a1, 15288
|
|
; LA64-NEXT: lu52i.d $a1, $a1, 1092
|
|
; LA64-NEXT: add.d $a0, $a0, $a1
|
|
; LA64-NEXT: ret
|
|
entry:
|
|
ret ptr getelementptr inbounds ([1 x i64], ptr @g_a64, i64 614749556925924693)
|
|
}
|
|
|
|
declare nonnull ptr @llvm.threadlocal.address.p0(ptr nonnull)
|