[ArgPromotion] Remove incorrect TranspBlocks set for loads. (#84835)
The TranspBlocks set was used to cache aliasing decision for all processed loads in the parent loop. This is incorrect, because each load can access a different location, which means one load not being modified in a block doesn't translate to another load not being modified in the same block. All loads access the same underlying object, so we could perhaps use a location without size for all loads and retain the cache, but that would mean we loose precision. For now, just drop the cache. Fixes https://github.com/llvm/llvm-project/issues/84807 PR: https://github.com/llvm/llvm-project/pull/84835
This commit is contained in:
parent
a3b52509d5
commit
bba4a1daff
@ -653,10 +653,6 @@ static bool findArgParts(Argument *Arg, const DataLayout &DL, AAResults &AAR,
|
||||
// check to see if the pointer is guaranteed to not be modified from entry of
|
||||
// the function to each of the load instructions.
|
||||
|
||||
// Because there could be several/many load instructions, remember which
|
||||
// blocks we know to be transparent to the load.
|
||||
df_iterator_default_set<BasicBlock *, 16> TranspBlocks;
|
||||
|
||||
for (LoadInst *Load : Loads) {
|
||||
// Check to see if the load is invalidated from the start of the block to
|
||||
// the load itself.
|
||||
@ -670,7 +666,7 @@ static bool findArgParts(Argument *Arg, const DataLayout &DL, AAResults &AAR,
|
||||
// To do this, we perform a depth first search on the inverse CFG from the
|
||||
// loading block.
|
||||
for (BasicBlock *P : predecessors(BB)) {
|
||||
for (BasicBlock *TranspBB : inverse_depth_first_ext(P, TranspBlocks))
|
||||
for (BasicBlock *TranspBB : inverse_depth_first(P))
|
||||
if (AAR.canBasicBlockModify(*TranspBB, Loc))
|
||||
return false;
|
||||
}
|
||||
|
@ -7,17 +7,14 @@ target datalayout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:
|
||||
|
||||
; Test case for https://github.com/llvm/llvm-project/issues/84807.
|
||||
|
||||
; FIXME: Currently the loads from @callee are moved to @caller, even though
|
||||
; the store in %then may aliases to load from %q.
|
||||
; Make sure the loads from @callee are not moved to @caller, as the store
|
||||
; in %then may aliases to load from %q.
|
||||
|
||||
define i32 @caller1(i1 %c) {
|
||||
; CHECK-LABEL: define i32 @caller1(
|
||||
; CHECK-SAME: i1 [[C:%.*]]) {
|
||||
; CHECK-NEXT: entry:
|
||||
; CHECK-NEXT: [[F_VAL:%.*]] = load i16, ptr @f, align 8
|
||||
; CHECK-NEXT: [[TMP0:%.*]] = getelementptr i8, ptr @f, i64 8
|
||||
; CHECK-NEXT: [[F_VAL1:%.*]] = load i64, ptr [[TMP0]], align 8
|
||||
; CHECK-NEXT: call void @callee1(i16 [[F_VAL]], i64 [[F_VAL1]], i1 [[C]])
|
||||
; CHECK-NEXT: call void @callee1(ptr noundef nonnull @f, i1 [[C]])
|
||||
; CHECK-NEXT: ret i32 0
|
||||
;
|
||||
entry:
|
||||
@ -27,13 +24,16 @@ entry:
|
||||
|
||||
define internal void @callee1(ptr nocapture noundef readonly %q, i1 %c) {
|
||||
; CHECK-LABEL: define internal void @callee1(
|
||||
; CHECK-SAME: i16 [[Q_0_VAL:%.*]], i64 [[Q_8_VAL:%.*]], i1 [[C:%.*]]) {
|
||||
; CHECK-SAME: ptr nocapture noundef readonly [[Q:%.*]], i1 [[C:%.*]]) {
|
||||
; CHECK-NEXT: entry:
|
||||
; CHECK-NEXT: br i1 [[C]], label [[THEN:%.*]], label [[EXIT:%.*]]
|
||||
; CHECK: then:
|
||||
; CHECK-NEXT: store i16 123, ptr @f, align 8
|
||||
; CHECK-NEXT: br label [[EXIT]]
|
||||
; CHECK: exit:
|
||||
; CHECK-NEXT: [[Q_0_VAL:%.*]] = load i16, ptr [[Q]], align 8
|
||||
; CHECK-NEXT: [[GEP_8:%.*]] = getelementptr inbounds i8, ptr [[Q]], i64 8
|
||||
; CHECK-NEXT: [[Q_8_VAL:%.*]] = load i64, ptr [[GEP_8]], align 8
|
||||
; CHECK-NEXT: call void @use(i16 [[Q_0_VAL]], i64 [[Q_8_VAL]])
|
||||
; CHECK-NEXT: ret void
|
||||
;
|
||||
|
Loading…
x
Reference in New Issue
Block a user