@@ -3277,7 +3277,7 @@ static bool isRemovableWrite(CallBase &CB, Value *UsedV,
3277
3277
return Dest && Dest->Ptr == UsedV;
3278
3278
}
3279
3279
3280
- static bool isAllocSiteRemovable (Instruction *AI,
3280
+ static std::optional<ModRefInfo> isAllocSiteRemovable (Instruction *AI,
3281
3281
SmallVectorImpl<WeakTrackingVH> &Users,
3282
3282
const TargetLibraryInfo &TLI, bool KnowInit) {
3283
3283
SmallVector<Instruction*, 4 > Worklist;
@@ -3292,7 +3292,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
3292
3292
switch (I->getOpcode ()) {
3293
3293
default :
3294
3294
// Give up the moment we see something we can't handle.
3295
- return false ;
3295
+ return std::nullopt ;
3296
3296
3297
3297
case Instruction::AddrSpaceCast:
3298
3298
case Instruction::BitCast:
@@ -3307,10 +3307,10 @@ static bool isAllocSiteRemovable(Instruction *AI,
3307
3307
// We also fold comparisons in some conditions provided the alloc has
3308
3308
// not escaped (see isNeverEqualToUnescapedAlloc).
3309
3309
if (!ICI->isEquality ())
3310
- return false ;
3310
+ return std::nullopt ;
3311
3311
unsigned OtherIndex = (ICI->getOperand (0 ) == PI) ? 1 : 0 ;
3312
3312
if (!isNeverEqualToUnescapedAlloc (ICI->getOperand (OtherIndex), TLI, AI))
3313
- return false ;
3313
+ return std::nullopt ;
3314
3314
3315
3315
// Do not fold compares to aligned_alloc calls, as they may have to
3316
3316
// return null in case the required alignment cannot be satisfied,
@@ -3330,7 +3330,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
3330
3330
if (CB && TLI.getLibFunc (*CB->getCalledFunction (), TheLibFunc) &&
3331
3331
TLI.has (TheLibFunc) && TheLibFunc == LibFunc_aligned_alloc &&
3332
3332
!AlignmentAndSizeKnownValid (CB))
3333
- return false ;
3333
+ return std::nullopt ;
3334
3334
Users.emplace_back (I);
3335
3335
continue ;
3336
3336
}
@@ -3340,20 +3340,20 @@ static bool isAllocSiteRemovable(Instruction *AI,
3340
3340
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
3341
3341
switch (II->getIntrinsicID ()) {
3342
3342
default :
3343
- return false ;
3343
+ return std::nullopt ;
3344
3344
3345
3345
case Intrinsic::memmove:
3346
3346
case Intrinsic::memcpy:
3347
3347
case Intrinsic::memset: {
3348
3348
MemIntrinsic *MI = cast<MemIntrinsic>(II);
3349
3349
if (MI->isVolatile ())
3350
- return false ;
3350
+ return std::nullopt ;
3351
3351
// Note: this could also be ModRef, but we can still interpret that
3352
3352
// as just Mod in that case.
3353
3353
ModRefInfo NewAccess =
3354
3354
MI->getRawDest () == PI ? ModRefInfo::Mod : ModRefInfo::Ref;
3355
3355
if ((Access & ~NewAccess) != ModRefInfo::NoModRef)
3356
- return false ;
3356
+ return std::nullopt ;
3357
3357
Access |= NewAccess;
3358
3358
}
3359
3359
[[fallthrough]];
@@ -3393,14 +3393,14 @@ static bool isAllocSiteRemovable(Instruction *AI,
3393
3393
continue ;
3394
3394
}
3395
3395
3396
- return false ;
3396
+ return std::nullopt ;
3397
3397
3398
3398
case Instruction::Store: {
3399
3399
StoreInst *SI = cast<StoreInst>(I);
3400
3400
if (SI->isVolatile () || SI->getPointerOperand () != PI)
3401
- return false ;
3401
+ return std::nullopt ;
3402
3402
if (isRefSet (Access))
3403
- return false ;
3403
+ return std::nullopt ;
3404
3404
Access |= ModRefInfo::Mod;
3405
3405
Users.emplace_back (I);
3406
3406
continue ;
@@ -3409,9 +3409,9 @@ static bool isAllocSiteRemovable(Instruction *AI,
3409
3409
case Instruction::Load: {
3410
3410
LoadInst *LI = cast<LoadInst>(I);
3411
3411
if (LI->isVolatile () || LI->getPointerOperand () != PI)
3412
- return false ;
3412
+ return std::nullopt ;
3413
3413
if (isModSet (Access))
3414
- return false ;
3414
+ return std::nullopt ;
3415
3415
Access |= ModRefInfo::Ref;
3416
3416
Users.emplace_back (I);
3417
3417
continue ;
@@ -3421,7 +3421,8 @@ static bool isAllocSiteRemovable(Instruction *AI,
3421
3421
}
3422
3422
} while (!Worklist.empty ());
3423
3423
3424
- return true ;
3424
+ assert (Access != ModRefInfo::ModRef);
3425
+ return Access;
3425
3426
}
3426
3427
3427
3428
Instruction *InstCombinerImpl::visitAllocSite (Instruction &MI) {
@@ -3451,20 +3452,25 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
3451
3452
3452
3453
// Determine what getInitialValueOfAllocation would return without actually
3453
3454
// allocating the result.
3454
- bool KnowInitUndef = isa<AllocaInst>(MI) ;
3455
+ bool KnowInitUndef = false ;
3455
3456
bool KnowInitZero = false ;
3456
- if (!KnowInitUndef) {
3457
- Constant *Init = getInitialValueOfAllocation (
3458
- &MI, &TLI, Type::getInt8Ty (MI.getContext ()));
3459
- if (Init) {
3460
- if (isa<UndefValue>(Init))
3461
- KnowInitUndef = true ;
3462
- else if (Init->isNullValue ())
3463
- KnowInitZero = true ;
3464
- }
3465
- }
3466
-
3467
- if (isAllocSiteRemovable (&MI, Users, TLI, KnowInitZero | KnowInitUndef)) {
3457
+ Constant *Init = getInitialValueOfAllocation (
3458
+ &MI, &TLI, Type::getInt8Ty (MI.getContext ()));
3459
+ if (Init) {
3460
+ if (isa<UndefValue>(Init))
3461
+ KnowInitUndef = true ;
3462
+ else if (Init->isNullValue ())
3463
+ KnowInitZero = true ;
3464
+ }
3465
+ // The various sanitizers don't actually return undef memory, but rather
3466
+ // memory initialized with special forms of runtime poison
3467
+ auto &F = *MI.getFunction ();
3468
+ if (F.hasFnAttribute (Attribute::SanitizeMemory) ||
3469
+ F.hasFnAttribute (Attribute::SanitizeAddress))
3470
+ KnowInitUndef = false ;
3471
+
3472
+ auto Removable = isAllocSiteRemovable (&MI, Users, TLI, KnowInitZero | KnowInitUndef);
3473
+ if (Removable) {
3468
3474
for (unsigned i = 0 , e = Users.size (); i != e; ++i) {
3469
3475
// Lowering all @llvm.objectsize and MTI calls first because they may use
3470
3476
// a bitcast/GEP of the alloca we are removing.
@@ -3485,14 +3491,14 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
3485
3491
Users[i] = nullptr ; // Skip examining in the next loop.
3486
3492
}
3487
3493
if (auto *MTI = dyn_cast<MemTransferInst>(I)) {
3488
- if (KnowInitZero && getUnderlyingObject (MTI-> getRawDest ()) != &MI ) {
3494
+ if (KnowInitZero && isRefSet (*Removable) ) {
3489
3495
IRBuilderBase::InsertPointGuard Guard (Builder);
3490
3496
Builder.SetInsertPoint (MTI);
3491
3497
auto *M = Builder.CreateMemSet (
3492
3498
MTI->getRawDest (),
3493
3499
ConstantInt::get (Type::getInt8Ty (MI.getContext ()), 0 ),
3494
3500
MTI->getLength (), MTI->getDestAlign ());
3495
- M->copyMetadata (*MTI, LLVMContext::MD_DIAssignID );
3501
+ M->copyMetadata (*MTI);
3496
3502
}
3497
3503
}
3498
3504
}
0 commit comments