Skip to content

Commit 489d180

Browse files
committed
Merging r246694:
------------------------------------------------------------------------ r246694 | benny.kra | 2015-09-02 15:52:23 -0400 (Wed, 02 Sep 2015) | 44 lines [RemoveDuplicatePHINodes] Start over after removing a PHI. This makes RemoveDuplicatePHINodes more effective and fixes an assertion failure. Triggering the assertions requires a DenseSet reallocation so this change only contains a constructive test. I'll explain the issue with a small example. In the following function there's a duplicate PHI, %4 and %5 are identical. When this is found the DenseSet in RemoveDuplicatePHINodes contains %2, %3 and %4. define void @f() { br label %1 ; <label>:1 ; preds = %1, %0 %2 = phi i32 [ 42, %0 ], [ %4, %1 ] %3 = phi i32 [ 42, %0 ], [ %5, %1 ] %4 = phi i32 [ 42, %0 ], [ 23, %1 ] %5 = phi i32 [ 42, %0 ], [ 23, %1 ] br label %1 } after RemoveDuplicatePHINodes runs the function looks like this. %3 has changed and is now identical to %2, but RemoveDuplicatePHINodes never saw this. define void @f() { br label %1 ; <label>:1 ; preds = %1, %0 %2 = phi i32 [ 42, %0 ], [ %4, %1 ] %3 = phi i32 [ 42, %0 ], [ %4, %1 ] %4 = phi i32 [ 42, %0 ], [ 23, %1 ] br label %1 } If the DenseSet does a reallocation now it will reinsert all keys and stumble over %3 now having a different hash value than it had when inserted into the map for the first time. This change clears the set whenever a PHI is deleted and starts the progress from the beginning, allowing %3 to be deleted and avoiding inconsistent DenseSet state. This potentially has a negative performance impact because it rescans all PHIs, but I don't think that this ever makes a difference in practice. ------------------------------------------------------------------------ git-svn-id: https://llvm.org/svn/llvm-project/llvm/branches/release_37@252938 91177308-0d34-0410-b5e6-96231b3b80d8
1 parent a9269ea commit 489d180

File tree

2 files changed

+42
-0
lines changed

2 files changed

+42
-0
lines changed

lib/Transforms/Utils/Local.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -869,6 +869,11 @@ bool llvm::EliminateDuplicatePHINodes(BasicBlock *BB) {
869869
PN->replaceAllUsesWith(*Inserted.first);
870870
PN->eraseFromParent();
871871
Changed = true;
872+
873+
// The RAUW can change PHIs that we already visited. Start over from the
874+
// beginning.
875+
PHISet.clear();
876+
I = BB->begin();
872877
}
873878
}
874879

unittests/Transforms/Utils/Local.cpp

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,3 +58,40 @@ TEST(Local, RecursivelyDeleteDeadPHINodes) {
5858
delete bb0;
5959
delete bb1;
6060
}
61+
62+
TEST(Local, RemoveDuplicatePHINodes) {
63+
LLVMContext &C(getGlobalContext());
64+
IRBuilder<> B(C);
65+
66+
std::unique_ptr<Function> F(
67+
Function::Create(FunctionType::get(B.getVoidTy(), false),
68+
GlobalValue::ExternalLinkage, "F"));
69+
BasicBlock *Entry(BasicBlock::Create(C, "", F.get()));
70+
BasicBlock *BB(BasicBlock::Create(C, "", F.get()));
71+
BranchInst::Create(BB, Entry);
72+
73+
B.SetInsertPoint(BB);
74+
75+
AssertingVH<PHINode> P1 = B.CreatePHI(Type::getInt32Ty(C), 2);
76+
P1->addIncoming(B.getInt32(42), Entry);
77+
78+
PHINode *P2 = B.CreatePHI(Type::getInt32Ty(C), 2);
79+
P2->addIncoming(B.getInt32(42), Entry);
80+
81+
AssertingVH<PHINode> P3 = B.CreatePHI(Type::getInt32Ty(C), 2);
82+
P3->addIncoming(B.getInt32(42), Entry);
83+
P3->addIncoming(B.getInt32(23), BB);
84+
85+
PHINode *P4 = B.CreatePHI(Type::getInt32Ty(C), 2);
86+
P4->addIncoming(B.getInt32(42), Entry);
87+
P4->addIncoming(B.getInt32(23), BB);
88+
89+
P1->addIncoming(P3, BB);
90+
P2->addIncoming(P4, BB);
91+
BranchInst::Create(BB, BB);
92+
93+
// Verify that we can eliminate PHIs that become duplicates after chaning PHIs
94+
// downstream.
95+
EXPECT_TRUE(EliminateDuplicatePHINodes(BB));
96+
EXPECT_EQ(3U, BB->size());
97+
}

0 commit comments

Comments
 (0)