33
33
#include " llvm/IR/Argument.h"
34
34
#include " llvm/IR/Attributes.h"
35
35
#include " llvm/IR/BasicBlock.h"
36
- #include " llvm/IR/CallSite.h"
37
36
#include " llvm/IR/Constant.h"
38
37
#include " llvm/IR/Constants.h"
39
38
#include " llvm/IR/Function.h"
@@ -160,8 +159,7 @@ static MemoryAccessKind checkFunctionMemoryAccess(Function &F, bool ThisBody,
160
159
161
160
// Check whether all pointer arguments point to local memory, and
162
161
// ignore calls that only access local memory.
163
- for (CallSite::arg_iterator CI = Call->arg_begin (), CE = Call->arg_end ();
164
- CI != CE; ++CI) {
162
+ for (auto CI = Call->arg_begin (), CE = Call->arg_end (); CI != CE; ++CI) {
165
163
Value *Arg = *CI;
166
164
if (!Arg->getType ()->isPtrOrPtrVectorTy ())
167
165
continue ;
@@ -362,13 +360,13 @@ struct ArgumentUsesTracker : public CaptureTracker {
362
360
void tooManyUses () override { Captured = true ; }
363
361
364
362
bool captured (const Use *U) override {
365
- CallSite CS (U->getUser ());
366
- if (!CS. getInstruction () ) {
363
+ CallBase *CB = dyn_cast<CallBase> (U->getUser ());
364
+ if (!CB ) {
367
365
Captured = true ;
368
366
return true ;
369
367
}
370
368
371
- Function *F = CS. getCalledFunction ();
369
+ Function *F = CB-> getCalledFunction ();
372
370
if (!F || !F->hasExactDefinition () || !SCCNodes.count (F)) {
373
371
Captured = true ;
374
372
return true ;
@@ -379,14 +377,14 @@ struct ArgumentUsesTracker : public CaptureTracker {
379
377
// these.
380
378
381
379
unsigned UseIndex =
382
- std::distance (const_cast <const Use *>(CS. arg_begin ()), U);
380
+ std::distance (const_cast <const Use *>(CB-> arg_begin ()), U);
383
381
384
- assert (UseIndex < CS. data_operands_size () &&
382
+ assert (UseIndex < CB-> data_operands_size () &&
385
383
" Indirect function calls should have been filtered above!" );
386
384
387
- if (UseIndex >= CS. getNumArgOperands ()) {
385
+ if (UseIndex >= CB-> getNumArgOperands ()) {
388
386
// Data operand, but not a argument operand -- must be a bundle operand
389
- assert (CS. hasOperandBundles () && " Must be!" );
387
+ assert (CB-> hasOperandBundles () && " Must be!" );
390
388
391
389
// CaptureTracking told us that we're being captured by an operand bundle
392
390
// use. In this case it does not matter if the callee is within our SCC
@@ -490,15 +488,15 @@ determinePointerReadAttrs(Argument *A,
490
488
Worklist.push_back (&UU);
491
489
};
492
490
493
- CallSite CS ( I);
494
- if (CS .doesNotAccessMemory ()) {
491
+ CallBase &CB = cast<CallBase>(* I);
492
+ if (CB .doesNotAccessMemory ()) {
495
493
AddUsersToWorklistIfCapturing ();
496
494
continue ;
497
495
}
498
496
499
- Function *F = CS .getCalledFunction ();
497
+ Function *F = CB .getCalledFunction ();
500
498
if (!F) {
501
- if (CS .onlyReadsMemory ()) {
499
+ if (CB .onlyReadsMemory ()) {
502
500
IsRead = true ;
503
501
AddUsersToWorklistIfCapturing ();
504
502
continue ;
@@ -510,23 +508,23 @@ determinePointerReadAttrs(Argument *A,
510
508
// operands. This means there is no need to adjust UseIndex to account
511
509
// for these.
512
510
513
- unsigned UseIndex = std::distance (CS .arg_begin (), U);
511
+ unsigned UseIndex = std::distance (CB .arg_begin (), U);
514
512
515
513
// U cannot be the callee operand use: since we're exploring the
516
514
// transitive uses of an Argument, having such a use be a callee would
517
- // imply the CallSite is an indirect call or invoke; and we'd take the
515
+ // imply the call site is an indirect call or invoke; and we'd take the
518
516
// early exit above.
519
- assert (UseIndex < CS .data_operands_size () &&
517
+ assert (UseIndex < CB .data_operands_size () &&
520
518
" Data operand use expected!" );
521
519
522
- bool IsOperandBundleUse = UseIndex >= CS .getNumArgOperands ();
520
+ bool IsOperandBundleUse = UseIndex >= CB .getNumArgOperands ();
523
521
524
522
if (UseIndex >= F->arg_size () && !IsOperandBundleUse) {
525
523
assert (F->isVarArg () && " More params than args in non-varargs call" );
526
524
return Attribute::None;
527
525
}
528
526
529
- Captures &= !CS .doesNotCapture (UseIndex);
527
+ Captures &= !CB .doesNotCapture (UseIndex);
530
528
531
529
// Since the optimizer (by design) cannot see the data flow corresponding
532
530
// to a operand bundle use, these cannot participate in the optimistic SCC
@@ -535,12 +533,12 @@ determinePointerReadAttrs(Argument *A,
535
533
if (IsOperandBundleUse ||
536
534
!SCCNodes.count (&*std::next (F->arg_begin (), UseIndex))) {
537
535
538
- // The accessors used on CallSite here do the right thing for calls and
536
+ // The accessors used on call site here do the right thing for calls and
539
537
// invokes with operand bundles.
540
538
541
- if (!CS .onlyReadsMemory () && !CS .onlyReadsMemory (UseIndex))
539
+ if (!CB .onlyReadsMemory () && !CB .onlyReadsMemory (UseIndex))
542
540
return Attribute::None;
543
- if (!CS .doesNotAccessMemory (UseIndex))
541
+ if (!CB .doesNotAccessMemory (UseIndex))
544
542
IsRead = true ;
545
543
}
546
544
@@ -638,16 +636,16 @@ static bool addArgumentAttrsFromCallsites(Function &F) {
638
636
// callsite.
639
637
BasicBlock &Entry = F.getEntryBlock ();
640
638
for (Instruction &I : Entry) {
641
- if (auto CS = CallSite (&I)) {
642
- if (auto *CalledFunc = CS. getCalledFunction ()) {
639
+ if (auto *CB = dyn_cast<CallBase> (&I)) {
640
+ if (auto *CalledFunc = CB-> getCalledFunction ()) {
643
641
for (auto &CSArg : CalledFunc->args ()) {
644
642
if (!CSArg.hasNonNullAttr ())
645
643
continue ;
646
644
647
645
// If the non-null callsite argument operand is an argument to 'F'
648
646
// (the caller) and the call is guaranteed to execute, then the value
649
647
// must be non-null throughout 'F'.
650
- auto *FArg = dyn_cast<Argument>(CS. getArgOperand (CSArg.getArgNo ()));
648
+ auto *FArg = dyn_cast<Argument>(CB-> getArgOperand (CSArg.getArgNo ()));
651
649
if (FArg && !FArg->hasNonNullAttr ()) {
652
650
FArg->addAttr (Attribute::NonNull);
653
651
Changed = true ;
@@ -904,10 +902,10 @@ static bool isFunctionMallocLike(Function *F, const SCCNodeSet &SCCNodes) {
904
902
break ;
905
903
case Instruction::Call:
906
904
case Instruction::Invoke: {
907
- CallSite CS ( RVI);
908
- if (CS .hasRetAttr (Attribute::NoAlias))
905
+ CallBase &CB = cast<CallBase>(* RVI);
906
+ if (CB .hasRetAttr (Attribute::NoAlias))
909
907
break ;
910
- if (CS .getCalledFunction () && SCCNodes.count (CS .getCalledFunction ()))
908
+ if (CB .getCalledFunction () && SCCNodes.count (CB .getCalledFunction ()))
911
909
break ;
912
910
LLVM_FALLTHROUGH;
913
911
}
@@ -1013,8 +1011,8 @@ static bool isReturnNonNull(Function *F, const SCCNodeSet &SCCNodes,
1013
1011
}
1014
1012
case Instruction::Call:
1015
1013
case Instruction::Invoke: {
1016
- CallSite CS ( RVI);
1017
- Function *Callee = CS .getCalledFunction ();
1014
+ CallBase &CB = cast<CallBase>(* RVI);
1015
+ Function *Callee = CB .getCalledFunction ();
1018
1016
// A call to a node within the SCC is assumed to return null until
1019
1017
// proven otherwise
1020
1018
if (Callee && SCCNodes.count (Callee)) {
@@ -1223,10 +1221,11 @@ bool AttributeInferer::run(const SCCNodeSet &SCCNodes) {
1223
1221
// / Helper for non-Convergent inference predicate InstrBreaksAttribute.
1224
1222
static bool InstrBreaksNonConvergent (Instruction &I,
1225
1223
const SCCNodeSet &SCCNodes) {
1226
- const CallSite CS (&I);
1224
+ const CallBase *CB = dyn_cast<CallBase> (&I);
1227
1225
// Breaks non-convergent assumption if CS is a convergent call to a function
1228
1226
// not in the SCC.
1229
- return CS && CS.isConvergent () && SCCNodes.count (CS.getCalledFunction ()) == 0 ;
1227
+ return CB && CB->isConvergent () &&
1228
+ SCCNodes.count (CB->getCalledFunction ()) == 0 ;
1230
1229
}
1231
1230
1232
1231
// / Helper for NoUnwind inference predicate InstrBreaksAttribute.
@@ -1247,11 +1246,11 @@ static bool InstrBreaksNonThrowing(Instruction &I, const SCCNodeSet &SCCNodes) {
1247
1246
1248
1247
// / Helper for NoFree inference predicate InstrBreaksAttribute.
1249
1248
static bool InstrBreaksNoFree (Instruction &I, const SCCNodeSet &SCCNodes) {
1250
- CallSite CS (&I);
1251
- if (!CS )
1249
+ CallBase *CB = dyn_cast<CallBase> (&I);
1250
+ if (!CB )
1252
1251
return false ;
1253
1252
1254
- Function *Callee = CS. getCalledFunction ();
1253
+ Function *Callee = CB-> getCalledFunction ();
1255
1254
if (!Callee)
1256
1255
return true ;
1257
1256
@@ -1368,8 +1367,8 @@ static bool addNoRecurseAttrs(const SCCNodeSet &SCCNodes) {
1368
1367
// marked norecurse, so any called from F to F will not be marked norecurse.
1369
1368
for (auto &BB : *F)
1370
1369
for (auto &I : BB.instructionsWithoutDebug ())
1371
- if (auto CS = CallSite (&I)) {
1372
- Function *Callee = CS. getCalledFunction ();
1370
+ if (auto *CB = dyn_cast<CallBase> (&I)) {
1371
+ Function *Callee = CB-> getCalledFunction ();
1373
1372
if (!Callee || Callee == F || !Callee->doesNotRecurse ())
1374
1373
// Function calls a potentially recursive function.
1375
1374
return false ;
@@ -1439,8 +1438,8 @@ PreservedAnalyses PostOrderFunctionAttrsPass::run(LazyCallGraph::SCC &C,
1439
1438
// function.
1440
1439
if (!HasUnknownCall)
1441
1440
for (Instruction &I : instructions (F))
1442
- if (auto CS = CallSite (&I))
1443
- if (!CS. getCalledFunction ()) {
1441
+ if (auto *CB = dyn_cast<CallBase> (&I))
1442
+ if (!CB-> getCalledFunction ()) {
1444
1443
HasUnknownCall = true ;
1445
1444
break ;
1446
1445
}
@@ -1575,8 +1574,8 @@ static bool addNoRecurseAttrsTopDown(Function &F) {
1575
1574
auto *I = dyn_cast<Instruction>(U);
1576
1575
if (!I)
1577
1576
return false ;
1578
- CallSite CS (I);
1579
- if (!CS || !CS. getParent ()->getParent ()->doesNotRecurse ())
1577
+ CallBase *CB = dyn_cast<CallBase> (I);
1578
+ if (!CB || !CB-> getParent ()->getParent ()->doesNotRecurse ())
1580
1579
return false ;
1581
1580
}
1582
1581
return setDoesNotRecurse (F);
0 commit comments