blob: 332eeaa00e7346c12eae6a14668420ca0d5b798f [file] [log] [blame]
Dan Gohmandb4708c2010-10-19 23:09:08 +00001//===- BasicAliasAnalysis.cpp - Stateless Alias Analysis Impl -------------===//
Misha Brukman2b37d7c2005-04-21 21:13:18 +00002//
John Criswellb576c942003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukman2b37d7c2005-04-21 21:13:18 +00007//
John Criswellb576c942003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattnerd501c132003-02-26 19:41:54 +00009//
Dan Gohmandb4708c2010-10-19 23:09:08 +000010// This file defines the primary stateless implementation of the
11// Alias Analysis interface that implements identities (two different
12// globals cannot alias, etc), but does no stateful analysis.
Chris Lattnerd501c132003-02-26 19:41:54 +000013//
14//===----------------------------------------------------------------------===//
15
Chandler Carruth734c7782015-08-06 07:33:15 +000016#include "llvm/Analysis/BasicAliasAnalysis.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000017#include "llvm/ADT/APInt.h"
18#include "llvm/ADT/SmallPtrSet.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000019#include "llvm/ADT/SmallVector.h"
Wei Mi9a461c72015-08-05 23:40:30 +000020#include "llvm/ADT/Statistic.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000021#include "llvm/Analysis/AliasAnalysis.h"
Craig Topper94b63a82017-05-14 06:18:34 +000022#include "llvm/Analysis/AssumptionCache.h"
Arnold Schwaighofer30361822014-01-03 05:47:03 +000023#include "llvm/Analysis/CFG.h"
Chandler Carruth974a4452014-01-07 11:48:04 +000024#include "llvm/Analysis/CaptureTracking.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000025#include "llvm/Analysis/InstructionSimplify.h"
Arnold Schwaighofer30361822014-01-03 05:47:03 +000026#include "llvm/Analysis/LoopInfo.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000027#include "llvm/Analysis/MemoryBuiltins.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000028#include "llvm/Analysis/MemoryLocation.h"
29#include "llvm/Analysis/TargetLibraryInfo.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000030#include "llvm/Analysis/ValueTracking.h"
John Brawndf2b9392018-07-30 11:52:08 +000031#include "llvm/Analysis/PhiValues.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000032#include "llvm/IR/Argument.h"
33#include "llvm/IR/Attributes.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000034#include "llvm/IR/Constant.h"
Chandler Carruth0b8c9a82013-01-02 11:36:10 +000035#include "llvm/IR/Constants.h"
36#include "llvm/IR/DataLayout.h"
37#include "llvm/IR/DerivedTypes.h"
Chandler Carruth56e13942014-01-13 09:26:24 +000038#include "llvm/IR/Dominators.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000039#include "llvm/IR/Function.h"
40#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth0b8c9a82013-01-02 11:36:10 +000041#include "llvm/IR/GlobalAlias.h"
42#include "llvm/IR/GlobalVariable.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000043#include "llvm/IR/InstrTypes.h"
44#include "llvm/IR/Instruction.h"
Chandler Carruth0b8c9a82013-01-02 11:36:10 +000045#include "llvm/IR/Instructions.h"
46#include "llvm/IR/IntrinsicInst.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000047#include "llvm/IR/Intrinsics.h"
48#include "llvm/IR/Metadata.h"
Chandler Carruth0b8c9a82013-01-02 11:36:10 +000049#include "llvm/IR/Operator.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000050#include "llvm/IR/Type.h"
51#include "llvm/IR/User.h"
52#include "llvm/IR/Value.h"
Chris Lattner4244bb52004-03-15 03:36:49 +000053#include "llvm/Pass.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000054#include "llvm/Support/Casting.h"
55#include "llvm/Support/CommandLine.h"
56#include "llvm/Support/Compiler.h"
Craig Topper32a237d2017-05-15 06:39:41 +000057#include "llvm/Support/KnownBits.h"
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +000058#include <cassert>
59#include <cstdint>
60#include <cstdlib>
61#include <utility>
Gerolf Hoflehnerf6d75ee2016-01-30 02:42:11 +000062
63#define DEBUG_TYPE "basicaa"
64
Chris Lattnerec4e8082003-11-25 18:33:40 +000065using namespace llvm;
Brian Gaeked0fde302003-11-11 22:41:34 +000066
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +000067/// Enable analysis of recursive PHI nodes.
Chandler Carruth940e92b2015-08-06 07:57:58 +000068static cl::opt<bool> EnableRecPhiAnalysis("basicaa-recphi", cl::Hidden,
69 cl::init(false));
Hal Finkelccb51b92019-01-02 16:28:09 +000070
71/// By default, even on 32-bit architectures we use 64-bit integers for
72/// calculations. This will allow us to more-aggressively decompose indexing
73/// expressions calculated using i64 values (e.g., long long in C) which is
74/// common enough to worry about.
75static cl::opt<bool> ForceAtLeast64Bits("basicaa-force-at-least-64b",
76 cl::Hidden, cl::init(true));
77static cl::opt<bool> DoubleCalcBits("basicaa-double-calc-bits",
78 cl::Hidden, cl::init(false));
79
Wei Mi9a461c72015-08-05 23:40:30 +000080/// SearchLimitReached / SearchTimes shows how often the limit of
81/// to decompose GEPs is reached. It will affect the precision
82/// of basic alias analysis.
Wei Mi9a461c72015-08-05 23:40:30 +000083STATISTIC(SearchLimitReached, "Number of times the limit to "
84 "decompose GEPs is reached");
85STATISTIC(SearchTimes, "Number of times a GEP is decomposed");
86
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +000087/// Cutoff after which to stop analysing a set of phi nodes potentially involved
Sanjay Patel1b9680c2016-01-17 23:13:48 +000088/// in a cycle. Because we are analysing 'through' phi nodes, we need to be
Arnold Schwaighofer30361822014-01-03 05:47:03 +000089/// careful with value equivalence. We use reachability to make sure a value
90/// cannot be involved in a cycle.
91const unsigned MaxNumPhiBBsValueReachabilityCheck = 20;
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +000092
Arnold Schwaighofer23463c92014-03-26 21:30:19 +000093// The max limit of the search depth in DecomposeGEPExpression() and
94// GetUnderlyingObject(), both functions need to use the same search
95// depth otherwise the algorithm in aliasGEP will assert.
96static const unsigned MaxLookupSearchDepth = 6;
97
Manoj Guptac6da6862018-07-09 22:27:23 +000098bool BasicAAResult::invalidate(Function &Fn, const PreservedAnalyses &PA,
Chandler Carruthb09bd972016-12-27 10:30:45 +000099 FunctionAnalysisManager::Invalidator &Inv) {
Chandler Carruthb09bd972016-12-27 10:30:45 +0000100 // We don't care if this analysis itself is preserved, it has no state. But
101 // we need to check that the analyses it depends on have been. Note that we
102 // may be created without handles to some analyses and in that case don't
103 // depend on them.
Manoj Guptac6da6862018-07-09 22:27:23 +0000104 if (Inv.invalidate<AssumptionAnalysis>(Fn, PA) ||
105 (DT && Inv.invalidate<DominatorTreeAnalysis>(Fn, PA)) ||
John Brawndf2b9392018-07-30 11:52:08 +0000106 (LI && Inv.invalidate<LoopAnalysis>(Fn, PA)) ||
107 (PV && Inv.invalidate<PhiValuesAnalysis>(Fn, PA)))
Chandler Carruthb09bd972016-12-27 10:30:45 +0000108 return true;
109
110 // Otherwise this analysis result remains valid.
111 return false;
112}
113
Chris Lattnerdefa1c82008-06-16 06:30:22 +0000114//===----------------------------------------------------------------------===//
115// Useful predicates
116//===----------------------------------------------------------------------===//
Devang Patel794fd752007-05-01 21:15:47 +0000117
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000118/// Returns true if the pointer is to a function-local object that never
119/// escapes from the function.
Dan Gohman21de4c02010-07-01 20:08:40 +0000120static bool isNonEscapingLocalObject(const Value *V) {
Chris Lattnere7275792008-06-16 06:28:01 +0000121 // If this is a local allocation, check to see if it escapes.
Dan Gohman21de4c02010-07-01 20:08:40 +0000122 if (isa<AllocaInst>(V) || isNoAliasCall(V))
Dan Gohmanf94b5ed2009-11-19 21:57:48 +0000123 // Set StoreCaptures to True so that we can assume in our callers that the
124 // pointer is not the result of a load instruction. Currently
125 // PointerMayBeCaptured doesn't have any special analysis for the
126 // StoreCaptures=false case; if it did, our callers could be refined to be
127 // more precise.
128 return !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
Duncan Sands91c9c3102009-01-05 21:19:53 +0000129
Chris Lattnere7275792008-06-16 06:28:01 +0000130 // If this is an argument that corresponds to a byval or noalias argument,
Duncan Sands91c9c3102009-01-05 21:19:53 +0000131 // then it has not escaped before entering the function. Check if it escapes
132 // inside the function.
Dan Gohman21de4c02010-07-01 20:08:40 +0000133 if (const Argument *A = dyn_cast<Argument>(V))
Richard Osborne54453332012-11-05 10:48:24 +0000134 if (A->hasByValAttr() || A->hasNoAliasAttr())
Sanjay Patel1b9680c2016-01-17 23:13:48 +0000135 // Note even if the argument is marked nocapture, we still need to check
Richard Osborne54453332012-11-05 10:48:24 +0000136 // for copies made inside the function. The nocapture attribute only
137 // specifies that there are no copies made that outlive the function.
Dan Gohman21de4c02010-07-01 20:08:40 +0000138 return !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
Richard Osborne54453332012-11-05 10:48:24 +0000139
Chris Lattner845f0d22008-06-16 06:19:11 +0000140 return false;
141}
142
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000143/// Returns true if the pointer is one which would have been considered an
144/// escape by isNonEscapingLocalObject.
Dan Gohman21de4c02010-07-01 20:08:40 +0000145static bool isEscapeSource(const Value *V) {
Chandler Carruth81aa7122019-01-07 05:42:51 +0000146 if (isa<CallBase>(V))
Krzysztof Pszenicznya2173832018-05-16 13:16:54 +0000147 return true;
Krzysztof Pszenicznya2173832018-05-16 13:16:54 +0000148
149 if (isa<Argument>(V))
Dan Gohman21de4c02010-07-01 20:08:40 +0000150 return true;
Dan Gohman6be2bd52010-06-29 00:50:39 +0000151
152 // The load case works because isNonEscapingLocalObject considers all
153 // stores to be escapes (it passes true for the StoreCaptures argument
154 // to PointerMayBeCaptured).
155 if (isa<LoadInst>(V))
156 return true;
157
158 return false;
159}
Chris Lattner845f0d22008-06-16 06:19:11 +0000160
Sanjay Patel1b9680c2016-01-17 23:13:48 +0000161/// Returns the size of the object specified by V or UnknownSize if unknown.
Rafael Espindola75ece7a2014-02-21 18:34:28 +0000162static uint64_t getObjectSize(const Value *V, const DataLayout &DL,
Benjamin Kramer8e0d1c02012-08-29 15:32:21 +0000163 const TargetLibraryInfo &TLI,
Manoj Guptac6da6862018-07-09 22:27:23 +0000164 bool NullIsValidLoc,
Eli Friedman1680a242012-02-27 20:46:07 +0000165 bool RoundToAlign = false) {
Nuno Lopes9e72a792012-06-21 15:45:28 +0000166 uint64_t Size;
George Burgess IV3479ed62017-03-21 20:08:59 +0000167 ObjectSizeOpts Opts;
168 Opts.RoundToAlign = RoundToAlign;
Manoj Guptac6da6862018-07-09 22:27:23 +0000169 Opts.NullIsUnknownSize = NullIsValidLoc;
George Burgess IV3479ed62017-03-21 20:08:59 +0000170 if (getObjectSize(V, Size, DL, &TLI, Opts))
Nuno Lopes9e72a792012-06-21 15:45:28 +0000171 return Size;
Chandler Carruth2cdca0c42015-06-17 07:21:38 +0000172 return MemoryLocation::UnknownSize;
Dan Gohman615da1a2011-01-18 21:16:06 +0000173}
174
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000175/// Returns true if we can prove that the object specified by V is smaller than
176/// Size.
Dan Gohman615da1a2011-01-18 21:16:06 +0000177static bool isObjectSmallerThan(const Value *V, uint64_t Size,
Rafael Espindola75ece7a2014-02-21 18:34:28 +0000178 const DataLayout &DL,
Manoj Guptac6da6862018-07-09 22:27:23 +0000179 const TargetLibraryInfo &TLI,
180 bool NullIsValidLoc) {
Nadav Rotem8e4df482013-04-09 18:16:05 +0000181 // Note that the meanings of the "object" are slightly different in the
182 // following contexts:
183 // c1: llvm::getObjectSize()
184 // c2: llvm.objectsize() intrinsic
185 // c3: isObjectSmallerThan()
186 // c1 and c2 share the same meaning; however, the meaning of "object" in c3
187 // refers to the "entire object".
188 //
189 // Consider this example:
190 // char *p = (char*)malloc(100)
191 // char *q = p+80;
192 //
193 // In the context of c1 and c2, the "object" pointed by q refers to the
194 // stretch of memory of q[0:19]. So, getObjectSize(q) should return 20.
195 //
196 // However, in the context of c3, the "object" refers to the chunk of memory
197 // being allocated. So, the "object" has 100 bytes, and q points to the middle
198 // the "object". In case q is passed to isObjectSmallerThan() as the 1st
199 // parameter, before the llvm::getObjectSize() is called to get the size of
200 // entire object, we should:
201 // - either rewind the pointer q to the base-address of the object in
202 // question (in this case rewind to p), or
203 // - just give up. It is up to caller to make sure the pointer is pointing
204 // to the base address the object.
Jakub Staszak394e5a92013-08-24 14:16:00 +0000205 //
Nadav Rotem8e4df482013-04-09 18:16:05 +0000206 // We go for 2nd option for simplicity.
207 if (!isIdentifiedObject(V))
208 return false;
209
Eli Friedman1680a242012-02-27 20:46:07 +0000210 // This function needs to use the aligned object size because we allow
211 // reads a bit past the end given sufficient alignment.
Manoj Guptac6da6862018-07-09 22:27:23 +0000212 uint64_t ObjectSize = getObjectSize(V, DL, TLI, NullIsValidLoc,
213 /*RoundToAlign*/ true);
Jakub Staszak394e5a92013-08-24 14:16:00 +0000214
Chandler Carruth2cdca0c42015-06-17 07:21:38 +0000215 return ObjectSize != MemoryLocation::UnknownSize && ObjectSize < Size;
Dan Gohman615da1a2011-01-18 21:16:06 +0000216}
217
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000218/// Returns true if we can prove that the object specified by V has size Size.
Chandler Carruth940e92b2015-08-06 07:57:58 +0000219static bool isObjectSize(const Value *V, uint64_t Size, const DataLayout &DL,
Manoj Guptac6da6862018-07-09 22:27:23 +0000220 const TargetLibraryInfo &TLI, bool NullIsValidLoc) {
221 uint64_t ObjectSize = getObjectSize(V, DL, TLI, NullIsValidLoc);
Chandler Carruth2cdca0c42015-06-17 07:21:38 +0000222 return ObjectSize != MemoryLocation::UnknownSize && ObjectSize == Size;
Chris Lattnera4139602008-06-16 06:10:11 +0000223}
224
Chris Lattnerdefa1c82008-06-16 06:30:22 +0000225//===----------------------------------------------------------------------===//
Chris Lattner30963fb2010-08-18 22:07:29 +0000226// GetElementPtr Instruction Decomposition and Analysis
227//===----------------------------------------------------------------------===//
228
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000229/// Analyzes the specified value as a linear expression: "A*V + B", where A and
230/// B are constant integers.
231///
232/// Returns the scale and offset values as APInts and return V as a Value*, and
233/// return whether we looked through any sign or zero extends. The incoming
Sanjay Patel1b9680c2016-01-17 23:13:48 +0000234/// Value is known to have IntegerType, and it may already be sign or zero
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000235/// extended.
Chris Lattner2215c602010-08-18 23:09:49 +0000236///
237/// Note that this looks through extends, so the high bits may not be
238/// represented in the result.
Chandler Carruth91468332015-09-09 17:55:00 +0000239/*static*/ const Value *BasicAAResult::GetLinearExpression(
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000240 const Value *V, APInt &Scale, APInt &Offset, unsigned &ZExtBits,
241 unsigned &SExtBits, const DataLayout &DL, unsigned Depth,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000242 AssumptionCache *AC, DominatorTree *DT, bool &NSW, bool &NUW) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000243 assert(V->getType()->isIntegerTy() && "Not an integer value");
244
245 // Limit our recursion depth.
246 if (Depth == 6) {
247 Scale = 1;
248 Offset = 0;
249 return V;
250 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000251
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000252 if (const ConstantInt *Const = dyn_cast<ConstantInt>(V)) {
Sanjay Patel1b9680c2016-01-17 23:13:48 +0000253 // If it's a constant, just convert it to an offset and remove the variable.
254 // If we've been called recursively, the Offset bit width will be greater
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000255 // than the constant's (the Offset's always as wide as the outermost call),
256 // so we'll zext here and process any extension in the isa<SExtInst> &
257 // isa<ZExtInst> cases below.
258 Offset += Const->getValue().zextOrSelf(Offset.getBitWidth());
259 assert(Scale == 0 && "Constant values don't have a scale");
260 return V;
261 }
262
263 if (const BinaryOperator *BOp = dyn_cast<BinaryOperator>(V)) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000264 if (ConstantInt *RHSC = dyn_cast<ConstantInt>(BOp->getOperand(1))) {
Sanjay Patel1b9680c2016-01-17 23:13:48 +0000265 // If we've been called recursively, then Offset and Scale will be wider
266 // than the BOp operands. We'll always zext it here as we'll process sign
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000267 // extensions below (see the isa<SExtInst> / isa<ZExtInst> cases).
268 APInt RHS = RHSC->getValue().zextOrSelf(Offset.getBitWidth());
269
Chris Lattner30963fb2010-08-18 22:07:29 +0000270 switch (BOp->getOpcode()) {
Chandler Carruth940e92b2015-08-06 07:57:58 +0000271 default:
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000272 // We don't understand this instruction, so we can't decompose it any
273 // further.
274 Scale = 1;
275 Offset = 0;
276 return V;
Chris Lattner30963fb2010-08-18 22:07:29 +0000277 case Instruction::Or:
278 // X|C == X+C if all the bits in C are unset in X. Otherwise we can't
279 // analyze it.
Daniel Jasper8de3a542016-12-19 08:22:17 +0000280 if (!MaskedValueIsZero(BOp->getOperand(0), RHSC->getValue(), DL, 0, AC,
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000281 BOp, DT)) {
282 Scale = 1;
283 Offset = 0;
284 return V;
285 }
Justin Bogner7d7a23e2016-08-17 20:30:52 +0000286 LLVM_FALLTHROUGH;
Chris Lattner30963fb2010-08-18 22:07:29 +0000287 case Instruction::Add:
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000288 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000289 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000290 Offset += RHS;
291 break;
292 case Instruction::Sub:
293 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000294 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000295 Offset -= RHS;
296 break;
Chris Lattner30963fb2010-08-18 22:07:29 +0000297 case Instruction::Mul:
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000298 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000299 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000300 Offset *= RHS;
301 Scale *= RHS;
302 break;
Chris Lattner30963fb2010-08-18 22:07:29 +0000303 case Instruction::Shl:
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000304 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000305 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Davide Italiano4b913c22018-01-05 16:18:47 +0000306
307 // We're trying to linearize an expression of the kind:
308 // shl i8 -128, 36
309 // where the shift count exceeds the bitwidth of the type.
310 // We can't decompose this further (the expression would return
311 // a poison value).
312 if (Offset.getBitWidth() < RHS.getLimitedValue() ||
313 Scale.getBitWidth() < RHS.getLimitedValue()) {
314 Scale = 1;
315 Offset = 0;
316 return V;
317 }
318
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000319 Offset <<= RHS.getLimitedValue();
320 Scale <<= RHS.getLimitedValue();
321 // the semantics of nsw and nuw for left shifts don't match those of
322 // multiplications, so we won't propagate them.
323 NSW = NUW = false;
Chris Lattner30963fb2010-08-18 22:07:29 +0000324 return V;
325 }
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000326
327 if (isa<OverflowingBinaryOperator>(BOp)) {
328 NUW &= BOp->hasNoUnsignedWrap();
329 NSW &= BOp->hasNoSignedWrap();
330 }
331 return V;
Chris Lattner30963fb2010-08-18 22:07:29 +0000332 }
333 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000334
Chris Lattner30963fb2010-08-18 22:07:29 +0000335 // Since GEP indices are sign extended anyway, we don't care about the high
Chris Lattner2215c602010-08-18 23:09:49 +0000336 // bits of a sign or zero extended value - just scales and offsets. The
337 // extensions have to be consistent though.
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000338 if (isa<SExtInst>(V) || isa<ZExtInst>(V)) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000339 Value *CastOp = cast<CastInst>(V)->getOperand(0);
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000340 unsigned NewWidth = V->getType()->getPrimitiveSizeInBits();
Chris Lattner30963fb2010-08-18 22:07:29 +0000341 unsigned SmallWidth = CastOp->getType()->getPrimitiveSizeInBits();
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000342 unsigned OldZExtBits = ZExtBits, OldSExtBits = SExtBits;
343 const Value *Result =
344 GetLinearExpression(CastOp, Scale, Offset, ZExtBits, SExtBits, DL,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000345 Depth + 1, AC, DT, NSW, NUW);
Chris Lattner2215c602010-08-18 23:09:49 +0000346
Simon Pilgrim84d1e912016-11-20 13:47:59 +0000347 // zext(zext(%x)) == zext(%x), and similarly for sext; we'll handle this
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000348 // by just incrementing the number of bits we've extended by.
349 unsigned ExtendedBy = NewWidth - SmallWidth;
350
351 if (isa<SExtInst>(V) && ZExtBits == 0) {
352 // sext(sext(%x, a), b) == sext(%x, a + b)
353
354 if (NSW) {
355 // We haven't sign-wrapped, so it's valid to decompose sext(%x + c)
356 // into sext(%x) + sext(c). We'll sext the Offset ourselves:
357 unsigned OldWidth = Offset.getBitWidth();
358 Offset = Offset.trunc(SmallWidth).sext(NewWidth).zextOrSelf(OldWidth);
359 } else {
360 // We may have signed-wrapped, so don't decompose sext(%x + c) into
361 // sext(%x) + sext(c)
362 Scale = 1;
363 Offset = 0;
364 Result = CastOp;
365 ZExtBits = OldZExtBits;
366 SExtBits = OldSExtBits;
367 }
368 SExtBits += ExtendedBy;
369 } else {
370 // sext(zext(%x, a), b) = zext(zext(%x, a), b) = zext(%x, a + b)
371
372 if (!NUW) {
373 // We may have unsigned-wrapped, so don't decompose zext(%x + c) into
374 // zext(%x) + zext(c)
375 Scale = 1;
376 Offset = 0;
377 Result = CastOp;
378 ZExtBits = OldZExtBits;
379 SExtBits = OldSExtBits;
380 }
381 ZExtBits += ExtendedBy;
382 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000383
Chris Lattner30963fb2010-08-18 22:07:29 +0000384 return Result;
385 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000386
Chris Lattner30963fb2010-08-18 22:07:29 +0000387 Scale = 1;
388 Offset = 0;
389 return V;
390}
391
Gerolf Hoflehnerf6d75ee2016-01-30 02:42:11 +0000392/// To ensure a pointer offset fits in an integer of size PointerSize
Hal Finkelccb51b92019-01-02 16:28:09 +0000393/// (in bits) when that size is smaller than the maximum pointer size. This is
394/// an issue, for example, in particular for 32b pointers with negative indices
395/// that rely on two's complement wrap-arounds for precise alias information
396/// where the maximum pointer size is 64b.
397static APInt adjustToPointerSize(APInt Offset, unsigned PointerSize) {
398 assert(PointerSize <= Offset.getBitWidth() && "Invalid PointerSize!");
399 unsigned ShiftBits = Offset.getBitWidth() - PointerSize;
400 return (Offset << ShiftBits).ashr(ShiftBits);
401}
402
403static unsigned getMaxPointerSize(const DataLayout &DL) {
404 unsigned MaxPointerSize = DL.getMaxPointerSizeInBits();
405 if (MaxPointerSize < 64 && ForceAtLeast64Bits) MaxPointerSize = 64;
406 if (DoubleCalcBits) MaxPointerSize *= 2;
407
408 return MaxPointerSize;
Gerolf Hoflehnerf6d75ee2016-01-30 02:42:11 +0000409}
410
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000411/// If V is a symbolic pointer expression, decompose it into a base pointer
412/// with a constant offset and a number of scaled symbolic offsets.
Chris Lattner30963fb2010-08-18 22:07:29 +0000413///
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000414/// The scaled symbolic offsets (represented by pairs of a Value* and a scale
415/// in the VarIndices vector) are Value*'s that are known to be scaled by the
416/// specified amount, but which may have other unrepresented high bits. As
417/// such, the gep cannot necessarily be reconstructed from its decomposed form.
Chris Lattner30963fb2010-08-18 22:07:29 +0000418///
Micah Villmow3574eca2012-10-08 16:38:25 +0000419/// When DataLayout is around, this function is capable of analyzing everything
Arnold Schwaighofer23463c92014-03-26 21:30:19 +0000420/// that GetUnderlyingObject can look through. To be able to do that
421/// GetUnderlyingObject and DecomposeGEPExpression must use the same search
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000422/// depth (MaxLookupSearchDepth). When DataLayout not is around, it just looks
423/// through pointer casts.
Michael Kuperstein92631b82016-05-25 22:23:08 +0000424bool BasicAAResult::DecomposeGEPExpression(const Value *V,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000425 DecomposedGEP &Decomposed, const DataLayout &DL, AssumptionCache *AC,
Michael Kuperstein92631b82016-05-25 22:23:08 +0000426 DominatorTree *DT) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000427 // Limit recursion depth to limit compile time in crazy cases.
Arnold Schwaighofer23463c92014-03-26 21:30:19 +0000428 unsigned MaxLookup = MaxLookupSearchDepth;
Wei Mi9a461c72015-08-05 23:40:30 +0000429 SearchTimes++;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000430
Hal Finkelccb51b92019-01-02 16:28:09 +0000431 unsigned MaxPointerSize = getMaxPointerSize(DL);
Michael Kuperstein92631b82016-05-25 22:23:08 +0000432 Decomposed.VarIndices.clear();
Chris Lattner30963fb2010-08-18 22:07:29 +0000433 do {
434 // See if this is a bitcast or GEP.
435 const Operator *Op = dyn_cast<Operator>(V);
Craig Topper570e52c2014-04-15 04:59:12 +0000436 if (!Op) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000437 // The only non-operator case we can handle are GlobalAliases.
438 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Dasc9e3e3c2016-04-08 00:48:30 +0000439 if (!GA->isInterposable()) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000440 V = GA->getAliasee();
441 continue;
442 }
443 }
Michael Kuperstein92631b82016-05-25 22:23:08 +0000444 Decomposed.Base = V;
445 return false;
Chris Lattner30963fb2010-08-18 22:07:29 +0000446 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000447
Matt Arsenault7137eb32014-07-15 00:56:40 +0000448 if (Op->getOpcode() == Instruction::BitCast ||
449 Op->getOpcode() == Instruction::AddrSpaceCast) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000450 V = Op->getOperand(0);
451 continue;
452 }
Dan Gohmanc01895c2010-12-15 20:49:55 +0000453
Chris Lattner30963fb2010-08-18 22:07:29 +0000454 const GEPOperator *GEPOp = dyn_cast<GEPOperator>(Op);
Craig Topper570e52c2014-04-15 04:59:12 +0000455 if (!GEPOp) {
Chandler Carruth81aa7122019-01-07 05:42:51 +0000456 if (const auto *Call = dyn_cast<CallBase>(V)) {
Piotr Padlewskic2f24d92018-07-02 04:49:30 +0000457 // CaptureTracking can know about special capturing properties of some
458 // intrinsics like launder.invariant.group, that can't be expressed with
459 // the attributes, but have properties like returning aliasing pointer.
460 // Because some analysis may assume that nocaptured pointer is not
461 // returned from some special intrinsic (because function would have to
462 // be marked with returns attribute), it is crucial to use this function
463 // because it should be in sync with CaptureTracking. Not using it may
464 // cause weird miscompilations where 2 aliasing pointers are assumed to
465 // noalias.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000466 if (auto *RP = getArgumentAliasingToReturnedPointer(Call)) {
Piotr Padlewski2e3226a2018-05-23 09:16:44 +0000467 V = RP;
Hal Finkel4cb33662016-07-11 01:32:20 +0000468 continue;
469 }
Piotr Padlewski2e3226a2018-05-23 09:16:44 +0000470 }
Hal Finkel4cb33662016-07-11 01:32:20 +0000471
Dan Gohman9adf1512011-05-24 18:24:08 +0000472 // If it's not a GEP, hand it off to SimplifyInstruction to see if it
473 // can come up with something. This matches what GetUnderlyingObject does.
474 if (const Instruction *I = dyn_cast<Instruction>(V))
Daniel Jasper8de3a542016-12-19 08:22:17 +0000475 // TODO: Get a DominatorTree and AssumptionCache and use them here
476 // (these are both now available in this function, but this should be
477 // updated when GetUnderlyingObject is updated). TLI should be
478 // provided also.
Dan Gohman9adf1512011-05-24 18:24:08 +0000479 if (const Value *Simplified =
Chandler Carruth940e92b2015-08-06 07:57:58 +0000480 SimplifyInstruction(const_cast<Instruction *>(I), DL)) {
Dan Gohman9adf1512011-05-24 18:24:08 +0000481 V = Simplified;
482 continue;
483 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000484
Michael Kuperstein92631b82016-05-25 22:23:08 +0000485 Decomposed.Base = V;
486 return false;
Dan Gohman9adf1512011-05-24 18:24:08 +0000487 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000488
Chris Lattner30963fb2010-08-18 22:07:29 +0000489 // Don't attempt to analyze GEPs over unsized objects.
Michael Kuperstein92631b82016-05-25 22:23:08 +0000490 if (!GEPOp->getSourceElementType()->isSized()) {
491 Decomposed.Base = V;
492 return false;
493 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000494
Matt Arsenault74c996c2013-11-16 00:36:43 +0000495 unsigned AS = GEPOp->getPointerAddressSpace();
Chris Lattner30963fb2010-08-18 22:07:29 +0000496 // Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
497 gep_type_iterator GTI = gep_type_begin(GEPOp);
Gerolf Hoflehnerf6d75ee2016-01-30 02:42:11 +0000498 unsigned PointerSize = DL.getPointerSizeInBits(AS);
Gerolf Hoflehnere5138202016-10-22 02:41:39 +0000499 // Assume all GEP operands are constants until proven otherwise.
500 bool GepHasConstantOffset = true;
Chandler Carruth940e92b2015-08-06 07:57:58 +0000501 for (User::const_op_iterator I = GEPOp->op_begin() + 1, E = GEPOp->op_end();
Peter Collingbourne06115802016-12-02 02:24:42 +0000502 I != E; ++I, ++GTI) {
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000503 const Value *Index = *I;
Chris Lattner30963fb2010-08-18 22:07:29 +0000504 // Compute the (potentially symbolic) offset in bytes for this index.
Peter Collingbourne06115802016-12-02 02:24:42 +0000505 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chris Lattner30963fb2010-08-18 22:07:29 +0000506 // For a struct, add the member offset.
507 unsigned FieldNo = cast<ConstantInt>(Index)->getZExtValue();
Chandler Carruth940e92b2015-08-06 07:57:58 +0000508 if (FieldNo == 0)
509 continue;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000510
Michael Kuperstein92631b82016-05-25 22:23:08 +0000511 Decomposed.StructOffset +=
512 DL.getStructLayout(STy)->getElementOffset(FieldNo);
Chris Lattner30963fb2010-08-18 22:07:29 +0000513 continue;
514 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000515
Chris Lattner30963fb2010-08-18 22:07:29 +0000516 // For an array/pointer, add the element offset, explicitly scaled.
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000517 if (const ConstantInt *CIdx = dyn_cast<ConstantInt>(Index)) {
Chandler Carruth940e92b2015-08-06 07:57:58 +0000518 if (CIdx->isZero())
519 continue;
Michael Kuperstein92631b82016-05-25 22:23:08 +0000520 Decomposed.OtherOffset +=
Hal Finkelccb51b92019-01-02 16:28:09 +0000521 (DL.getTypeAllocSize(GTI.getIndexedType()) *
522 CIdx->getValue().sextOrSelf(MaxPointerSize))
523 .sextOrTrunc(MaxPointerSize);
Chris Lattner30963fb2010-08-18 22:07:29 +0000524 continue;
525 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000526
Gerolf Hoflehnere5138202016-10-22 02:41:39 +0000527 GepHasConstantOffset = false;
528
Hal Finkelccb51b92019-01-02 16:28:09 +0000529 APInt Scale(MaxPointerSize, DL.getTypeAllocSize(GTI.getIndexedType()));
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000530 unsigned ZExtBits = 0, SExtBits = 0;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000531
Chris Lattner2215c602010-08-18 23:09:49 +0000532 // If the integer type is smaller than the pointer size, it is implicitly
533 // sign extended to pointer size.
Matt Arsenaultb060a462013-09-27 22:18:51 +0000534 unsigned Width = Index->getType()->getIntegerBitWidth();
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000535 if (PointerSize > Width)
536 SExtBits += PointerSize - Width;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000537
Chris Lattner2215c602010-08-18 23:09:49 +0000538 // Use GetLinearExpression to decompose the index into a C1*V+C2 form.
Chris Lattner30963fb2010-08-18 22:07:29 +0000539 APInt IndexScale(Width, 0), IndexOffset(Width, 0);
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000540 bool NSW = true, NUW = true;
Hal Finkelccb51b92019-01-02 16:28:09 +0000541 const Value *OrigIndex = Index;
Quentin Colombet37d8ade2015-08-31 22:32:47 +0000542 Index = GetLinearExpression(Index, IndexScale, IndexOffset, ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +0000543 SExtBits, DL, 0, AC, DT, NSW, NUW);
Jakub Staszak394e5a92013-08-24 14:16:00 +0000544
Chris Lattner30963fb2010-08-18 22:07:29 +0000545 // The GEP index scale ("Scale") scales C1*V+C2, yielding (C1*V+C2)*Scale.
546 // This gives us an aggregate computation of (C1*Scale)*V + C2*Scale.
Hal Finkelccb51b92019-01-02 16:28:09 +0000547
548 // It can be the case that, even through C1*V+C2 does not overflow for
549 // relevant values of V, (C2*Scale) can overflow. In that case, we cannot
550 // decompose the expression in this way.
551 //
552 // FIXME: C1*Scale and the other operations in the decomposed
553 // (C1*Scale)*V+C2*Scale can also overflow. We should check for this
554 // possibility.
555 APInt WideScaledOffset = IndexOffset.sextOrTrunc(MaxPointerSize*2) *
556 Scale.sext(MaxPointerSize*2);
557 if (WideScaledOffset.getMinSignedBits() > MaxPointerSize) {
558 Index = OrigIndex;
559 IndexScale = 1;
560 IndexOffset = 0;
561
562 ZExtBits = SExtBits = 0;
563 if (PointerSize > Width)
564 SExtBits += PointerSize - Width;
565 } else {
566 Decomposed.OtherOffset += IndexOffset.sextOrTrunc(MaxPointerSize) * Scale;
567 Scale *= IndexScale.sextOrTrunc(MaxPointerSize);
568 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000569
Chris Lattner7a2bdde2011-04-15 05:18:47 +0000570 // If we already had an occurrence of this index variable, merge this
Chris Lattner30963fb2010-08-18 22:07:29 +0000571 // scale into it. For example, we want to handle:
572 // A[x][x] -> x*16 + x*4 -> x*20
573 // This also ensures that 'x' only appears in the index list once.
Michael Kuperstein92631b82016-05-25 22:23:08 +0000574 for (unsigned i = 0, e = Decomposed.VarIndices.size(); i != e; ++i) {
Gerolf Hoflehnere5138202016-10-22 02:41:39 +0000575 if (Decomposed.VarIndices[i].V == Index &&
Michael Kuperstein92631b82016-05-25 22:23:08 +0000576 Decomposed.VarIndices[i].ZExtBits == ZExtBits &&
577 Decomposed.VarIndices[i].SExtBits == SExtBits) {
578 Scale += Decomposed.VarIndices[i].Scale;
579 Decomposed.VarIndices.erase(Decomposed.VarIndices.begin() + i);
Chris Lattner30963fb2010-08-18 22:07:29 +0000580 break;
581 }
582 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000583
Chris Lattner30963fb2010-08-18 22:07:29 +0000584 // Make sure that we have a scale that makes sense for this target's
585 // pointer size.
Gerolf Hoflehnerf6d75ee2016-01-30 02:42:11 +0000586 Scale = adjustToPointerSize(Scale, PointerSize);
Jakub Staszak394e5a92013-08-24 14:16:00 +0000587
Hal Finkelccb51b92019-01-02 16:28:09 +0000588 if (!!Scale) {
589 VariableGEPIndex Entry = {Index, ZExtBits, SExtBits, Scale};
Michael Kuperstein92631b82016-05-25 22:23:08 +0000590 Decomposed.VarIndices.push_back(Entry);
Chris Lattner8807e9f2010-08-18 22:47:56 +0000591 }
Chris Lattner30963fb2010-08-18 22:07:29 +0000592 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000593
Gerolf Hoflehner86f53ba2016-01-30 05:52:53 +0000594 // Take care of wrap-arounds
Gerolf Hoflehnere5138202016-10-22 02:41:39 +0000595 if (GepHasConstantOffset) {
596 Decomposed.StructOffset =
597 adjustToPointerSize(Decomposed.StructOffset, PointerSize);
598 Decomposed.OtherOffset =
599 adjustToPointerSize(Decomposed.OtherOffset, PointerSize);
600 }
Gerolf Hoflehner86f53ba2016-01-30 05:52:53 +0000601
Chris Lattner30963fb2010-08-18 22:07:29 +0000602 // Analyze the base pointer next.
603 V = GEPOp->getOperand(0);
604 } while (--MaxLookup);
Jakub Staszak394e5a92013-08-24 14:16:00 +0000605
Chris Lattner30963fb2010-08-18 22:07:29 +0000606 // If the chain of expressions is too deep, just return early.
Michael Kuperstein92631b82016-05-25 22:23:08 +0000607 Decomposed.Base = V;
Wei Mi9a461c72015-08-05 23:40:30 +0000608 SearchLimitReached++;
Michael Kuperstein92631b82016-05-25 22:23:08 +0000609 return true;
Chris Lattner30963fb2010-08-18 22:07:29 +0000610}
611
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000612/// Returns whether the given pointer value points to memory that is local to
613/// the function, with global constants being considered local to all
614/// functions.
Chandler Carruth91468332015-09-09 17:55:00 +0000615bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
616 bool OrLocal) {
Dan Gohmana25e5db2010-11-08 16:45:26 +0000617 assert(Visited.empty() && "Visited must be cleared after use!");
Chris Lattnerdefa1c82008-06-16 06:30:22 +0000618
Dan Gohman3fcfc9f2010-11-08 20:26:19 +0000619 unsigned MaxLookup = 8;
Dan Gohmana25e5db2010-11-08 16:45:26 +0000620 SmallVector<const Value *, 16> Worklist;
621 Worklist.push_back(Loc.Ptr);
622 do {
Chandler Carruth91468332015-09-09 17:55:00 +0000623 const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
David Blaikie5401ba72014-11-19 07:49:26 +0000624 if (!Visited.insert(V).second) {
Dan Gohmana25e5db2010-11-08 16:45:26 +0000625 Visited.clear();
Chandler Carruth91468332015-09-09 17:55:00 +0000626 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohmana25e5db2010-11-08 16:45:26 +0000627 }
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000628
Dan Gohmana25e5db2010-11-08 16:45:26 +0000629 // An alloca instruction defines local memory.
630 if (OrLocal && isa<AllocaInst>(V))
631 continue;
632
633 // A global constant counts as local memory for our purposes.
634 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
635 // Note: this doesn't require GV to be "ODR" because it isn't legal for a
636 // global to be marked constant in some modules and non-constant in
637 // others. GV may even be a declaration, not a definition.
638 if (!GV->isConstant()) {
639 Visited.clear();
Chandler Carruth91468332015-09-09 17:55:00 +0000640 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohmana25e5db2010-11-08 16:45:26 +0000641 }
642 continue;
643 }
644
645 // If both select values point to local memory, then so does the select.
646 if (const SelectInst *SI = dyn_cast<SelectInst>(V)) {
647 Worklist.push_back(SI->getTrueValue());
648 Worklist.push_back(SI->getFalseValue());
649 continue;
650 }
651
652 // If all values incoming to a phi node point to local memory, then so does
653 // the phi.
654 if (const PHINode *PN = dyn_cast<PHINode>(V)) {
Dan Gohman3fcfc9f2010-11-08 20:26:19 +0000655 // Don't bother inspecting phi nodes with many operands.
656 if (PN->getNumIncomingValues() > MaxLookup) {
657 Visited.clear();
Chandler Carruth91468332015-09-09 17:55:00 +0000658 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman3fcfc9f2010-11-08 20:26:19 +0000659 }
Pete Cooperf23c6af2015-05-12 20:05:31 +0000660 for (Value *IncValue : PN->incoming_values())
661 Worklist.push_back(IncValue);
Dan Gohmana25e5db2010-11-08 16:45:26 +0000662 continue;
663 }
664
665 // Otherwise be conservative.
666 Visited.clear();
Chandler Carruth91468332015-09-09 17:55:00 +0000667 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman3fcfc9f2010-11-08 20:26:19 +0000668 } while (!Worklist.empty() && --MaxLookup);
Dan Gohmana25e5db2010-11-08 16:45:26 +0000669
670 Visited.clear();
Dan Gohman3fcfc9f2010-11-08 20:26:19 +0000671 return Worklist.empty();
Chris Lattnerdefa1c82008-06-16 06:30:22 +0000672}
673
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000674/// Returns the behavior when calling the given call site.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000675FunctionModRefBehavior BasicAAResult::getModRefBehavior(const CallBase *Call) {
676 if (Call->doesNotAccessMemory())
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000677 // Can't do better than this.
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000678 return FMRB_DoesNotAccessMemory;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000679
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000680 FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000681
682 // If the callsite knows it only reads memory, don't return worse
683 // than that.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000684 if (Call->onlyReadsMemory())
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000685 Min = FMRB_OnlyReadsMemory;
Chandler Carruth81aa7122019-01-07 05:42:51 +0000686 else if (Call->doesNotReadMemory())
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000687 Min = FMRB_DoesNotReadMemory;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000688
Chandler Carruth81aa7122019-01-07 05:42:51 +0000689 if (Call->onlyAccessesArgMemory())
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000690 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesArgumentPointees);
Chandler Carruth81aa7122019-01-07 05:42:51 +0000691 else if (Call->onlyAccessesInaccessibleMemory())
Yichao Yu52f6f2c2017-11-02 12:18:33 +0000692 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleMem);
Chandler Carruth81aa7122019-01-07 05:42:51 +0000693 else if (Call->onlyAccessesInaccessibleMemOrArgMem())
Yichao Yu52f6f2c2017-11-02 12:18:33 +0000694 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleOrArgMem);
Igor Laevsky6690dbf2015-07-11 10:30:36 +0000695
Chandler Carruth81aa7122019-01-07 05:42:51 +0000696 // If the call has operand bundles then aliasing attributes from the function
697 // it calls do not directly apply to the call. This can be made more precise
698 // in the future.
699 if (!Call->hasOperandBundles())
700 if (const Function *F = Call->getCalledFunction())
Chandler Carruthcf88e922016-03-02 15:56:53 +0000701 Min =
702 FunctionModRefBehavior(Min & getBestAAResults().getModRefBehavior(F));
703
704 return Min;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000705}
706
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000707/// Returns the behavior when calling the given function. For use when the call
708/// site is not known.
Chandler Carruth91468332015-09-09 17:55:00 +0000709FunctionModRefBehavior BasicAAResult::getModRefBehavior(const Function *F) {
Dan Gohman431c794a2010-11-08 16:08:43 +0000710 // If the function declares it doesn't access memory, we can't do better.
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000711 if (F->doesNotAccessMemory())
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000712 return FMRB_DoesNotAccessMemory;
Dan Gohman431c794a2010-11-08 16:08:43 +0000713
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000714 FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
Dan Gohman42c31a72010-11-10 01:02:18 +0000715
Dan Gohman431c794a2010-11-08 16:08:43 +0000716 // If the function declares it only reads memory, go with that.
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000717 if (F->onlyReadsMemory())
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000718 Min = FMRB_OnlyReadsMemory;
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000719 else if (F->doesNotReadMemory())
720 Min = FMRB_DoesNotReadMemory;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000721
Igor Laevsky6690dbf2015-07-11 10:30:36 +0000722 if (F->onlyAccessesArgMemory())
Chandler Carruth52ab0bc2015-07-22 23:15:57 +0000723 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesArgumentPointees);
Andrew Kaylor665c3d92016-11-08 21:07:42 +0000724 else if (F->onlyAccessesInaccessibleMemory())
725 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleMem);
726 else if (F->onlyAccessesInaccessibleMemOrArgMem())
727 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleOrArgMem);
Igor Laevsky6690dbf2015-07-11 10:30:36 +0000728
Chandler Carruthcf88e922016-03-02 15:56:53 +0000729 return Min;
Dan Gohman6ce9d8b2010-08-06 01:25:49 +0000730}
Owen Andersone7942202009-02-05 23:36:27 +0000731
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000732/// Returns true if this is a writeonly (i.e Mod only) parameter.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000733static bool isWriteOnlyParam(const CallBase *Call, unsigned ArgIdx,
Philip Reames9a7a9002016-01-06 18:10:35 +0000734 const TargetLibraryInfo &TLI) {
Chandler Carruth81aa7122019-01-07 05:42:51 +0000735 if (Call->paramHasAttr(ArgIdx, Attribute::WriteOnly))
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000736 return true;
Hal Finkel8f609692014-07-17 01:28:25 +0000737
738 // We can bound the aliasing properties of memset_pattern16 just as we can
739 // for memcpy/memset. This is particularly important because the
740 // LoopIdiomRecognizer likes to turn loops into calls to memset_pattern16
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000741 // whenever possible.
742 // FIXME Consider handling this in InferFunctionAttr.cpp together with other
743 // attributes.
David L. Jones32028c82017-01-23 23:16:46 +0000744 LibFunc F;
Chandler Carruth81aa7122019-01-07 05:42:51 +0000745 if (Call->getCalledFunction() &&
746 TLI.getLibFunc(*Call->getCalledFunction(), F) &&
David L. Jones32028c82017-01-23 23:16:46 +0000747 F == LibFunc_memset_pattern16 && TLI.has(F))
Philip Reamesef80e8a2016-01-06 04:53:16 +0000748 if (ArgIdx == 0)
Philip Reames9a7a9002016-01-06 18:10:35 +0000749 return true;
750
751 // TODO: memset_pattern4, memset_pattern8
752 // TODO: _chk variants
753 // TODO: strcmp, strcpy
754
755 return false;
756}
757
Chandler Carruth81aa7122019-01-07 05:42:51 +0000758ModRefInfo BasicAAResult::getArgModRefInfo(const CallBase *Call,
Philip Reames9a7a9002016-01-06 18:10:35 +0000759 unsigned ArgIdx) {
Nicolai Haehnleb07f5402016-07-04 08:01:29 +0000760 // Checking for known builtin intrinsics and target library functions.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000761 if (isWriteOnlyParam(Call, ArgIdx, TLI))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000762 return ModRefInfo::Mod;
Hal Finkel8f609692014-07-17 01:28:25 +0000763
Chandler Carruth81aa7122019-01-07 05:42:51 +0000764 if (Call->paramHasAttr(ArgIdx, Attribute::ReadOnly))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000765 return ModRefInfo::Ref;
Igor Laevsky0cacb9e2015-10-28 16:42:00 +0000766
Chandler Carruth81aa7122019-01-07 05:42:51 +0000767 if (Call->paramHasAttr(ArgIdx, Attribute::ReadNone))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000768 return ModRefInfo::NoModRef;
Igor Laevsky5b6459c2015-10-28 17:54:48 +0000769
Chandler Carruth81aa7122019-01-07 05:42:51 +0000770 return AAResultBase::getArgModRefInfo(Call, ArgIdx);
Hal Finkel8f609692014-07-17 01:28:25 +0000771}
772
Chandler Carruth81aa7122019-01-07 05:42:51 +0000773static bool isIntrinsicCall(const CallBase *Call, Intrinsic::ID IID) {
774 const IntrinsicInst *II = dyn_cast<IntrinsicInst>(Call);
Sanjoy Dasbbd902f2016-05-10 02:35:41 +0000775 return II && II->getIntrinsicID() == IID;
Hal Finkel8ef7b172014-07-25 21:13:35 +0000776}
777
Justin Bogner71c9e7e2015-09-24 05:29:31 +0000778#ifndef NDEBUG
Justin Bognerf7904392015-09-24 04:59:24 +0000779static const Function *getParent(const Value *V) {
Daniel Berlina55349e2017-05-19 19:01:21 +0000780 if (const Instruction *inst = dyn_cast<Instruction>(V)) {
781 if (!inst->getParent())
782 return nullptr;
Justin Bognerf7904392015-09-24 04:59:24 +0000783 return inst->getParent()->getParent();
Daniel Berlina55349e2017-05-19 19:01:21 +0000784 }
Justin Bognerf7904392015-09-24 04:59:24 +0000785
786 if (const Argument *arg = dyn_cast<Argument>(V))
787 return arg->getParent();
788
789 return nullptr;
790}
791
792static bool notDifferentParent(const Value *O1, const Value *O2) {
793
794 const Function *F1 = getParent(O1);
795 const Function *F2 = getParent(O2);
796
797 return !F1 || !F2 || F1 == F2;
798}
Justin Bogner71c9e7e2015-09-24 05:29:31 +0000799#endif
Justin Bognerf7904392015-09-24 04:59:24 +0000800
801AliasResult BasicAAResult::alias(const MemoryLocation &LocA,
802 const MemoryLocation &LocB) {
803 assert(notDifferentParent(LocA.Ptr, LocB.Ptr) &&
804 "BasicAliasAnalysis doesn't support interprocedural queries.");
805
806 // If we have a directly cached entry for these locations, we have recursed
807 // through this once, so just return the cached results. Notably, when this
808 // happens, we don't clear the cache.
809 auto CacheIt = AliasCache.find(LocPair(LocA, LocB));
810 if (CacheIt != AliasCache.end())
811 return CacheIt->second;
812
813 AliasResult Alias = aliasCheck(LocA.Ptr, LocA.Size, LocA.AATags, LocB.Ptr,
814 LocB.Size, LocB.AATags);
815 // AliasCache rarely has more than 1 or 2 elements, always use
816 // shrink_and_clear so it quickly returns to the inline capacity of the
817 // SmallDenseMap if it ever grows larger.
818 // FIXME: This should really be shrink_to_inline_capacity_and_clear().
819 AliasCache.shrink_and_clear();
820 VisitedPhiBBs.clear();
821 return Alias;
822}
823
Chandler Carruthe29d3ef2015-08-06 08:17:06 +0000824/// Checks to see if the specified callsite can clobber the specified memory
825/// object.
826///
827/// Since we only look at local properties of this function, we really can't
828/// say much about this query. We do, however, use simple "address taken"
829/// analysis on local objects.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000830ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
Chandler Carruth91468332015-09-09 17:55:00 +0000831 const MemoryLocation &Loc) {
Chandler Carruth81aa7122019-01-07 05:42:51 +0000832 assert(notDifferentParent(Call, Loc.Ptr) &&
Dan Gohman9e86f432010-07-07 14:27:09 +0000833 "AliasAnalysis query involving multiple functions!");
834
Chandler Carruth91468332015-09-09 17:55:00 +0000835 const Value *Object = GetUnderlyingObject(Loc.Ptr, DL);
Jakub Staszak394e5a92013-08-24 14:16:00 +0000836
Reid Kleckner3ea7b0a2018-08-14 01:24:35 +0000837 // Calls marked 'tail' cannot read or write allocas from the current frame
838 // because the current frame might be destroyed by the time they run. However,
839 // a tail call may use an alloca with byval. Calling with byval copies the
840 // contents of the alloca into argument registers or stack slots, so there is
841 // no lifetime issue.
Chris Lattner92e803c2009-11-22 16:05:05 +0000842 if (isa<AllocaInst>(Object))
Chandler Carruth81aa7122019-01-07 05:42:51 +0000843 if (const CallInst *CI = dyn_cast<CallInst>(Call))
Reid Kleckner3ea7b0a2018-08-14 01:24:35 +0000844 if (CI->isTailCall() &&
845 !CI->getAttributes().hasAttrSomewhere(Attribute::ByVal))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000846 return ModRefInfo::NoModRef;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000847
Reid Klecknerbf6ada62018-12-21 19:59:03 +0000848 // Stack restore is able to modify unescaped dynamic allocas. Assume it may
849 // modify them even though the alloca is not escaped.
850 if (auto *AI = dyn_cast<AllocaInst>(Object))
Chandler Carruth81aa7122019-01-07 05:42:51 +0000851 if (!AI->isStaticAlloca() && isIntrinsicCall(Call, Intrinsic::stackrestore))
Reid Klecknerbf6ada62018-12-21 19:59:03 +0000852 return ModRefInfo::Mod;
853
Chris Lattner92e803c2009-11-22 16:05:05 +0000854 // If the pointer is to a locally allocated object that does not escape,
Chris Lattnerb34b82e2009-11-23 16:44:43 +0000855 // then the call can not mod/ref the pointer unless the call takes the pointer
856 // as an argument, and itself doesn't capture it.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000857 if (!isa<Constant>(Object) && Call != Object &&
Dan Gohman21de4c02010-07-01 20:08:40 +0000858 isNonEscapingLocalObject(Object)) {
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000859
860 // Optimistically assume that call doesn't touch Object and check this
861 // assumption in the following loop.
Alina Sbirleac94e8962017-12-07 22:41:34 +0000862 ModRefInfo Result = ModRefInfo::NoModRef;
Alina Sbirlea9680c052017-12-21 21:41:53 +0000863 bool IsMustAlias = true;
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000864
Igor Laevskya08c9222016-01-16 12:15:53 +0000865 unsigned OperandNo = 0;
Chandler Carruth81aa7122019-01-07 05:42:51 +0000866 for (auto CI = Call->data_operands_begin(), CE = Call->data_operands_end();
Igor Laevskya08c9222016-01-16 12:15:53 +0000867 CI != CE; ++CI, ++OperandNo) {
Chris Lattnerc10ecd82011-05-23 05:15:43 +0000868 // Only look at the no-capture or byval pointer arguments. If this
869 // pointer were passed to arguments that were neither of these, then it
870 // couldn't be no-capture.
Duncan Sands1df98592010-02-16 11:11:14 +0000871 if (!(*CI)->getType()->isPointerTy() ||
Chandler Carruth81aa7122019-01-07 05:42:51 +0000872 (!Call->doesNotCapture(OperandNo) &&
873 OperandNo < Call->getNumArgOperands() &&
874 !Call->isByValArgument(OperandNo)))
Chris Lattnerb34b82e2009-11-23 16:44:43 +0000875 continue;
Jakub Staszak394e5a92013-08-24 14:16:00 +0000876
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000877 // Call doesn't access memory through this operand, so we don't care
878 // if it aliases with Object.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000879 if (Call->doesNotAccessMemory(OperandNo))
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000880 continue;
881
Dan Gohmanb2143b62010-09-14 21:25:10 +0000882 // If this is a no-capture pointer argument, see if we can tell that it
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000883 // is impossible to alias the pointer we're checking.
Chandler Carruth91468332015-09-09 17:55:00 +0000884 AliasResult AR =
885 getBestAAResults().alias(MemoryLocation(*CI), MemoryLocation(Object));
Alina Sbirlea9680c052017-12-21 21:41:53 +0000886 if (AR != MustAlias)
887 IsMustAlias = false;
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000888 // Operand doesnt alias 'Object', continue looking for other aliases
889 if (AR == NoAlias)
890 continue;
891 // Operand aliases 'Object', but call doesn't modify it. Strengthen
892 // initial assumption and keep looking in case if there are more aliases.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000893 if (Call->onlyReadsMemory(OperandNo)) {
Alina Sbirleaa2d30e92017-12-05 20:12:23 +0000894 Result = setRef(Result);
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000895 continue;
Chris Lattnerb34b82e2009-11-23 16:44:43 +0000896 }
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000897 // Operand aliases 'Object' but call only writes into it.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000898 if (Call->doesNotReadMemory(OperandNo)) {
Alina Sbirleaa2d30e92017-12-05 20:12:23 +0000899 Result = setMod(Result);
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000900 continue;
901 }
902 // This operand aliases 'Object' and call reads and writes into it.
Alina Sbirlea9680c052017-12-21 21:41:53 +0000903 // Setting ModRef will not yield an early return below, MustAlias is not
904 // used further.
Alina Sbirleac94e8962017-12-07 22:41:34 +0000905 Result = ModRefInfo::ModRef;
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000906 break;
Chris Lattnerb34b82e2009-11-23 16:44:43 +0000907 }
Jakub Staszak394e5a92013-08-24 14:16:00 +0000908
Alina Sbirlea9680c052017-12-21 21:41:53 +0000909 // No operand aliases, reset Must bit. Add below if at least one aliases
910 // and all aliases found are MustAlias.
911 if (isNoModRef(Result))
912 IsMustAlias = false;
913
Igor Laevskyb89bfff2017-03-01 13:19:51 +0000914 // Early return if we improved mod ref information
Alina Sbirleab3ba8e92018-01-19 10:26:40 +0000915 if (!isModAndRefSet(Result)) {
916 if (isNoModRef(Result))
917 return ModRefInfo::NoModRef;
Alina Sbirlea9680c052017-12-21 21:41:53 +0000918 return IsMustAlias ? setMust(Result) : clearMust(Result);
Alina Sbirleab3ba8e92018-01-19 10:26:40 +0000919 }
Chris Lattner92e803c2009-11-22 16:05:05 +0000920 }
921
Chandler Carruth81aa7122019-01-07 05:42:51 +0000922 // If the call is to malloc or calloc, we can assume that it doesn't
Philip Reamesd8f0e4d2016-03-09 23:19:56 +0000923 // modify any IR visible value. This is only valid because we assume these
924 // routines do not read values visible in the IR. TODO: Consider special
925 // casing realloc and strdup routines which access only their arguments as
926 // well. Or alternatively, replace all of this with inaccessiblememonly once
Alina Sbirleaa2d30e92017-12-05 20:12:23 +0000927 // that's implemented fully.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000928 if (isMallocOrCallocLikeFn(Call, &TLI)) {
Philip Reamesd8f0e4d2016-03-09 23:19:56 +0000929 // Be conservative if the accessed pointer may alias the allocation -
930 // fallback to the generic handling below.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000931 if (getBestAAResults().alias(MemoryLocation(Call), Loc) == NoAlias)
Alina Sbirleac94e8962017-12-07 22:41:34 +0000932 return ModRefInfo::NoModRef;
Philip Reamesd8f0e4d2016-03-09 23:19:56 +0000933 }
934
Bryant Wongf7285122016-12-25 22:42:27 +0000935 // The semantics of memcpy intrinsics forbid overlap between their respective
936 // operands, i.e., source and destination of any given memcpy must no-alias.
937 // If Loc must-aliases either one of these two locations, then it necessarily
938 // no-aliases the other.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000939 if (auto *Inst = dyn_cast<AnyMemCpyInst>(Call)) {
Bryant Wongf7285122016-12-25 22:42:27 +0000940 AliasResult SrcAA, DestAA;
941
942 if ((SrcAA = getBestAAResults().alias(MemoryLocation::getForSource(Inst),
943 Loc)) == MustAlias)
944 // Loc is exactly the memcpy source thus disjoint from memcpy dest.
Alina Sbirleac94e8962017-12-07 22:41:34 +0000945 return ModRefInfo::Ref;
Bryant Wongf7285122016-12-25 22:42:27 +0000946 if ((DestAA = getBestAAResults().alias(MemoryLocation::getForDest(Inst),
947 Loc)) == MustAlias)
948 // The converse case.
Alina Sbirleac94e8962017-12-07 22:41:34 +0000949 return ModRefInfo::Mod;
Bryant Wongf7285122016-12-25 22:42:27 +0000950
951 // It's also possible for Loc to alias both src and dest, or neither.
Alina Sbirleac94e8962017-12-07 22:41:34 +0000952 ModRefInfo rv = ModRefInfo::NoModRef;
Bryant Wongf7285122016-12-25 22:42:27 +0000953 if (SrcAA != NoAlias)
Alina Sbirleaa2d30e92017-12-05 20:12:23 +0000954 rv = setRef(rv);
Bryant Wongf7285122016-12-25 22:42:27 +0000955 if (DestAA != NoAlias)
Alina Sbirleaa2d30e92017-12-05 20:12:23 +0000956 rv = setMod(rv);
Bryant Wongf7285122016-12-25 22:42:27 +0000957 return rv;
958 }
959
Hal Finkel8ef7b172014-07-25 21:13:35 +0000960 // While the assume intrinsic is marked as arbitrarily writing so that
961 // proper control dependencies will be maintained, it never aliases any
962 // particular memory location.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000963 if (isIntrinsicCall(Call, Intrinsic::assume))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000964 return ModRefInfo::NoModRef;
Hal Finkel8ef7b172014-07-25 21:13:35 +0000965
Sanjoy Dasbbd902f2016-05-10 02:35:41 +0000966 // Like assumes, guard intrinsics are also marked as arbitrarily writing so
967 // that proper control dependencies are maintained but they never mods any
968 // particular memory location.
969 //
970 // *Unlike* assumes, guard intrinsics are modeled as reading memory since the
971 // heap state at the point the guard is issued needs to be consistent in case
972 // the guard invokes the "deopt" continuation.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000973 if (isIntrinsicCall(Call, Intrinsic::experimental_guard))
Alina Sbirleac94e8962017-12-07 22:41:34 +0000974 return ModRefInfo::Ref;
Sanjoy Dasbbd902f2016-05-10 02:35:41 +0000975
Anna Thomas9cd49072016-08-09 17:18:05 +0000976 // Like assumes, invariant.start intrinsics were also marked as arbitrarily
977 // writing so that proper control dependencies are maintained but they never
978 // mod any particular memory location visible to the IR.
979 // *Unlike* assumes (which are now modeled as NoModRef), invariant.start
980 // intrinsic is now modeled as reading memory. This prevents hoisting the
981 // invariant.start intrinsic over stores. Consider:
982 // *ptr = 40;
983 // *ptr = 50;
984 // invariant_start(ptr)
985 // int val = *ptr;
986 // print(val);
987 //
988 // This cannot be transformed to:
989 //
990 // *ptr = 40;
991 // invariant_start(ptr)
992 // *ptr = 50;
993 // int val = *ptr;
994 // print(val);
995 //
996 // The transformation will cause the second store to be ignored (based on
997 // rules of invariant.start) and print 40, while the first program always
998 // prints 50.
Chandler Carruth81aa7122019-01-07 05:42:51 +0000999 if (isIntrinsicCall(Call, Intrinsic::invariant_start))
Alina Sbirleac94e8962017-12-07 22:41:34 +00001000 return ModRefInfo::Ref;
Anna Thomas9cd49072016-08-09 17:18:05 +00001001
Chandler Carruth91468332015-09-09 17:55:00 +00001002 // The AAResultBase base class has some smarts, lets use them.
Chandler Carruth81aa7122019-01-07 05:42:51 +00001003 return AAResultBase::getModRefInfo(Call, Loc);
Dan Gohman65924112010-09-08 01:32:20 +00001004}
Chris Lattnerdefa1c82008-06-16 06:30:22 +00001005
Chandler Carruth81aa7122019-01-07 05:42:51 +00001006ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call1,
1007 const CallBase *Call2) {
Hal Finkel8ef7b172014-07-25 21:13:35 +00001008 // While the assume intrinsic is marked as arbitrarily writing so that
1009 // proper control dependencies will be maintained, it never aliases any
1010 // particular memory location.
Chandler Carruth81aa7122019-01-07 05:42:51 +00001011 if (isIntrinsicCall(Call1, Intrinsic::assume) ||
1012 isIntrinsicCall(Call2, Intrinsic::assume))
Alina Sbirleac94e8962017-12-07 22:41:34 +00001013 return ModRefInfo::NoModRef;
Hal Finkel8ef7b172014-07-25 21:13:35 +00001014
Sanjoy Dasbbd902f2016-05-10 02:35:41 +00001015 // Like assumes, guard intrinsics are also marked as arbitrarily writing so
1016 // that proper control dependencies are maintained but they never mod any
1017 // particular memory location.
1018 //
1019 // *Unlike* assumes, guard intrinsics are modeled as reading memory since the
1020 // heap state at the point the guard is issued needs to be consistent in case
1021 // the guard invokes the "deopt" continuation.
1022
1023 // NB! This function is *not* commutative, so we specical case two
1024 // possibilities for guard intrinsics.
1025
Chandler Carruth81aa7122019-01-07 05:42:51 +00001026 if (isIntrinsicCall(Call1, Intrinsic::experimental_guard))
1027 return isModSet(createModRefInfo(getModRefBehavior(Call2)))
Alina Sbirleac94e8962017-12-07 22:41:34 +00001028 ? ModRefInfo::Ref
1029 : ModRefInfo::NoModRef;
Sanjoy Dasbbd902f2016-05-10 02:35:41 +00001030
Chandler Carruth81aa7122019-01-07 05:42:51 +00001031 if (isIntrinsicCall(Call2, Intrinsic::experimental_guard))
1032 return isModSet(createModRefInfo(getModRefBehavior(Call1)))
Alina Sbirleac94e8962017-12-07 22:41:34 +00001033 ? ModRefInfo::Mod
1034 : ModRefInfo::NoModRef;
Sanjoy Dasbbd902f2016-05-10 02:35:41 +00001035
Chandler Carruth91468332015-09-09 17:55:00 +00001036 // The AAResultBase base class has some smarts, lets use them.
Chandler Carruth81aa7122019-01-07 05:42:51 +00001037 return AAResultBase::getModRefInfo(Call1, Call2);
Hal Finkel8ef7b172014-07-25 21:13:35 +00001038}
1039
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001040/// Provide ad-hoc rules to disambiguate accesses through two GEP operators,
1041/// both having the exact same pointer operand.
NAKAMURA Takumie582c6f2017-07-11 02:31:51 +00001042static AliasResult aliasSameBasePointerGEPs(const GEPOperator *GEP1,
George Burgess IV5454da32018-10-09 02:14:33 +00001043 LocationSize MaybeV1Size,
NAKAMURA Takumie582c6f2017-07-11 02:31:51 +00001044 const GEPOperator *GEP2,
George Burgess IV5454da32018-10-09 02:14:33 +00001045 LocationSize MaybeV2Size,
NAKAMURA Takumie582c6f2017-07-11 02:31:51 +00001046 const DataLayout &DL) {
Piotr Padlewski9648b462018-05-03 11:03:01 +00001047 assert(GEP1->getPointerOperand()->stripPointerCastsAndInvariantGroups() ==
1048 GEP2->getPointerOperand()->stripPointerCastsAndInvariantGroups() &&
Sanjoy Das51ccb322017-04-18 22:00:54 +00001049 GEP1->getPointerOperandType() == GEP2->getPointerOperandType() &&
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001050 "Expected GEPs with the same pointer operand");
1051
1052 // Try to determine whether GEP1 and GEP2 index through arrays, into structs,
1053 // such that the struct field accesses provably cannot alias.
1054 // We also need at least two indices (the pointer, and the struct field).
Daniel Berlinaed42212016-06-01 18:55:32 +00001055 if (GEP1->getNumIndices() != GEP2->getNumIndices() ||
1056 GEP1->getNumIndices() < 2)
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001057 return MayAlias;
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001058
1059 // If we don't know the size of the accesses through both GEPs, we can't
1060 // determine whether the struct fields accessed can't alias.
George Burgess IVea46abe2018-10-10 21:28:44 +00001061 if (MaybeV1Size == LocationSize::unknown() ||
1062 MaybeV2Size == LocationSize::unknown())
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001063 return MayAlias;
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001064
George Burgess IVdef2c062018-10-09 03:18:56 +00001065 const uint64_t V1Size = MaybeV1Size.getValue();
1066 const uint64_t V2Size = MaybeV2Size.getValue();
George Burgess IV5454da32018-10-09 02:14:33 +00001067
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001068 ConstantInt *C1 =
1069 dyn_cast<ConstantInt>(GEP1->getOperand(GEP1->getNumOperands() - 1));
1070 ConstantInt *C2 =
1071 dyn_cast<ConstantInt>(GEP2->getOperand(GEP2->getNumOperands() - 1));
1072
James Molloydc5a8f22015-10-22 13:28:18 +00001073 // If the last (struct) indices are constants and are equal, the other indices
1074 // might be also be dynamically equal, so the GEPs can alias.
Hal Finkelccb51b92019-01-02 16:28:09 +00001075 if (C1 && C2) {
1076 unsigned BitWidth = std::max(C1->getBitWidth(), C2->getBitWidth());
1077 if (C1->getValue().sextOrSelf(BitWidth) ==
1078 C2->getValue().sextOrSelf(BitWidth))
1079 return MayAlias;
1080 }
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001081
1082 // Find the last-indexed type of the GEP, i.e., the type you'd get if
1083 // you stripped the last index.
1084 // On the way, look at each indexed type. If there's something other
1085 // than an array, different indices can lead to different final types.
1086 SmallVector<Value *, 8> IntermediateIndices;
1087
1088 // Insert the first index; we don't need to check the type indexed
1089 // through it as it only drops the pointer indirection.
1090 assert(GEP1->getNumIndices() > 1 && "Not enough GEP indices to examine");
1091 IntermediateIndices.push_back(GEP1->getOperand(1));
1092
1093 // Insert all the remaining indices but the last one.
1094 // Also, check that they all index through arrays.
1095 for (unsigned i = 1, e = GEP1->getNumIndices() - 1; i != e; ++i) {
1096 if (!isa<ArrayType>(GetElementPtrInst::getIndexedType(
David Blaikie25e3d2d2015-03-30 21:41:43 +00001097 GEP1->getSourceElementType(), IntermediateIndices)))
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001098 return MayAlias;
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001099 IntermediateIndices.push_back(GEP1->getOperand(i + 1));
1100 }
1101
James Molloydc5a8f22015-10-22 13:28:18 +00001102 auto *Ty = GetElementPtrInst::getIndexedType(
1103 GEP1->getSourceElementType(), IntermediateIndices);
1104 StructType *LastIndexedStruct = dyn_cast<StructType>(Ty);
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001105
James Molloydc5a8f22015-10-22 13:28:18 +00001106 if (isa<SequentialType>(Ty)) {
1107 // We know that:
1108 // - both GEPs begin indexing from the exact same pointer;
1109 // - the last indices in both GEPs are constants, indexing into a sequential
1110 // type (array or pointer);
1111 // - both GEPs only index through arrays prior to that.
1112 //
1113 // Because array indices greater than the number of elements are valid in
1114 // GEPs, unless we know the intermediate indices are identical between
1115 // GEP1 and GEP2 we cannot guarantee that the last indexed arrays don't
James Molloy9d4c1312015-10-23 14:17:03 +00001116 // partially overlap. We also need to check that the loaded size matches
1117 // the element size, otherwise we could still have overlap.
1118 const uint64_t ElementSize =
1119 DL.getTypeStoreSize(cast<SequentialType>(Ty)->getElementType());
1120 if (V1Size != ElementSize || V2Size != ElementSize)
1121 return MayAlias;
1122
James Molloydc5a8f22015-10-22 13:28:18 +00001123 for (unsigned i = 0, e = GEP1->getNumIndices() - 1; i != e; ++i)
1124 if (GEP1->getOperand(i + 1) != GEP2->getOperand(i + 1))
1125 return MayAlias;
James Molloy9d4c1312015-10-23 14:17:03 +00001126
James Molloydc5a8f22015-10-22 13:28:18 +00001127 // Now we know that the array/pointer that GEP1 indexes into and that
1128 // that GEP2 indexes into must either precisely overlap or be disjoint.
1129 // Because they cannot partially overlap and because fields in an array
1130 // cannot overlap, if we can prove the final indices are different between
1131 // GEP1 and GEP2, we can conclude GEP1 and GEP2 don't alias.
NAKAMURA Takumicc230b32017-07-11 02:31:54 +00001132
James Molloydc5a8f22015-10-22 13:28:18 +00001133 // If the last indices are constants, we've already checked they don't
1134 // equal each other so we can exit early.
1135 if (C1 && C2)
1136 return NoAlias;
Craig Topper52ffcca2017-06-15 17:16:56 +00001137 {
1138 Value *GEP1LastIdx = GEP1->getOperand(GEP1->getNumOperands() - 1);
1139 Value *GEP2LastIdx = GEP2->getOperand(GEP2->getNumOperands() - 1);
NAKAMURA Takumie582c6f2017-07-11 02:31:51 +00001140 if (isa<PHINode>(GEP1LastIdx) || isa<PHINode>(GEP2LastIdx)) {
Craig Topper52ffcca2017-06-15 17:16:56 +00001141 // If one of the indices is a PHI node, be safe and only use
1142 // computeKnownBits so we don't make any assumptions about the
1143 // relationships between the two indices. This is important if we're
1144 // asking about values from different loop iterations. See PR32314.
1145 // TODO: We may be able to change the check so we only do this when
1146 // we definitely looked through a PHINode.
Craig Topper6de0dc02017-06-22 19:04:14 +00001147 if (GEP1LastIdx != GEP2LastIdx &&
1148 GEP1LastIdx->getType() == GEP2LastIdx->getType()) {
1149 KnownBits Known1 = computeKnownBits(GEP1LastIdx, DL);
1150 KnownBits Known2 = computeKnownBits(GEP2LastIdx, DL);
1151 if (Known1.Zero.intersects(Known2.One) ||
1152 Known1.One.intersects(Known2.Zero))
1153 return NoAlias;
1154 }
Craig Topper52ffcca2017-06-15 17:16:56 +00001155 } else if (isKnownNonEqual(GEP1LastIdx, GEP2LastIdx, DL))
1156 return NoAlias;
1157 }
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001158 return MayAlias;
James Molloydc5a8f22015-10-22 13:28:18 +00001159 } else if (!LastIndexedStruct || !C1 || !C2) {
1160 return MayAlias;
1161 }
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001162
Hal Finkelccb51b92019-01-02 16:28:09 +00001163 if (C1->getValue().getActiveBits() > 64 ||
1164 C2->getValue().getActiveBits() > 64)
1165 return MayAlias;
1166
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001167 // We know that:
1168 // - both GEPs begin indexing from the exact same pointer;
1169 // - the last indices in both GEPs are constants, indexing into a struct;
1170 // - said indices are different, hence, the pointed-to fields are different;
1171 // - both GEPs only index through arrays prior to that.
1172 //
1173 // This lets us determine that the struct that GEP1 indexes into and the
1174 // struct that GEP2 indexes into must either precisely overlap or be
1175 // completely disjoint. Because they cannot partially overlap, indexing into
1176 // different non-overlapping fields of the struct will never alias.
1177
1178 // Therefore, the only remaining thing needed to show that both GEPs can't
1179 // alias is that the fields are not overlapping.
1180 const StructLayout *SL = DL.getStructLayout(LastIndexedStruct);
1181 const uint64_t StructSize = SL->getSizeInBytes();
1182 const uint64_t V1Off = SL->getElementOffset(C1->getZExtValue());
1183 const uint64_t V2Off = SL->getElementOffset(C2->getZExtValue());
1184
1185 auto EltsDontOverlap = [StructSize](uint64_t V1Off, uint64_t V1Size,
1186 uint64_t V2Off, uint64_t V2Size) {
1187 return V1Off < V2Off && V1Off + V1Size <= V2Off &&
1188 ((V2Off + V2Size <= StructSize) ||
1189 (V2Off + V2Size - StructSize <= V1Off));
1190 };
1191
1192 if (EltsDontOverlap(V1Off, V1Size, V2Off, V2Size) ||
1193 EltsDontOverlap(V2Off, V2Size, V1Off, V1Size))
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001194 return NoAlias;
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001195
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001196 return MayAlias;
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001197}
1198
Michael Kuperstein92631b82016-05-25 22:23:08 +00001199// If a we have (a) a GEP and (b) a pointer based on an alloca, and the
1200// beginning of the object the GEP points would have a negative offset with
1201// repsect to the alloca, that means the GEP can not alias pointer (b).
1202// Note that the pointer based on the alloca may not be a GEP. For
1203// example, it may be the alloca itself.
Michael Kupersteinb80d8da2016-05-26 19:30:49 +00001204// The same applies if (b) is based on a GlobalVariable. Note that just being
1205// based on isIdentifiedObject() is not enough - we need an identified object
1206// that does not permit access to negative offsets. For example, a negative
1207// offset from a noalias argument or call can be inbounds w.r.t the actual
1208// underlying object.
Michael Kuperstein92631b82016-05-25 22:23:08 +00001209//
1210// For example, consider:
1211//
1212// struct { int f0, int f1, ...} foo;
1213// foo alloca;
1214// foo* random = bar(alloca);
1215// int *f0 = &alloca.f0
1216// int *f1 = &random->f1;
1217//
1218// Which is lowered, approximately, to:
1219//
1220// %alloca = alloca %struct.foo
1221// %random = call %struct.foo* @random(%struct.foo* %alloca)
1222// %f0 = getelementptr inbounds %struct, %struct.foo* %alloca, i32 0, i32 0
1223// %f1 = getelementptr inbounds %struct, %struct.foo* %random, i32 0, i32 1
1224//
1225// Assume %f1 and %f0 alias. Then %f1 would point into the object allocated
1226// by %alloca. Since the %f1 GEP is inbounds, that means %random must also
1227// point into the same object. But since %f0 points to the beginning of %alloca,
1228// the highest %f1 can be is (%alloca + 3). This means %random can not be higher
1229// than (%alloca - 1), and so is not inbounds, a contradiction.
1230bool BasicAAResult::isGEPBaseAtNegativeOffset(const GEPOperator *GEPOp,
George Burgess IV3bf82982018-05-25 21:16:58 +00001231 const DecomposedGEP &DecompGEP, const DecomposedGEP &DecompObject,
George Burgess IV5454da32018-10-09 02:14:33 +00001232 LocationSize MaybeObjectAccessSize) {
Michael Kupersteinb80d8da2016-05-26 19:30:49 +00001233 // If the object access size is unknown, or the GEP isn't inbounds, bail.
George Burgess IVea46abe2018-10-10 21:28:44 +00001234 if (MaybeObjectAccessSize == LocationSize::unknown() || !GEPOp->isInBounds())
Michael Kuperstein92631b82016-05-25 22:23:08 +00001235 return false;
1236
George Burgess IVdef2c062018-10-09 03:18:56 +00001237 const uint64_t ObjectAccessSize = MaybeObjectAccessSize.getValue();
George Burgess IV5454da32018-10-09 02:14:33 +00001238
Michael Kupersteinb80d8da2016-05-26 19:30:49 +00001239 // We need the object to be an alloca or a globalvariable, and want to know
1240 // the offset of the pointer from the object precisely, so no variable
1241 // indices are allowed.
1242 if (!(isa<AllocaInst>(DecompObject.Base) ||
1243 isa<GlobalVariable>(DecompObject.Base)) ||
1244 !DecompObject.VarIndices.empty())
Michael Kuperstein92631b82016-05-25 22:23:08 +00001245 return false;
1246
Hal Finkelccb51b92019-01-02 16:28:09 +00001247 APInt ObjectBaseOffset = DecompObject.StructOffset +
1248 DecompObject.OtherOffset;
Michael Kuperstein92631b82016-05-25 22:23:08 +00001249
1250 // If the GEP has no variable indices, we know the precise offset
Shiva Chen0692f372018-04-16 01:58:39 +00001251 // from the base, then use it. If the GEP has variable indices,
1252 // we can't get exact GEP offset to identify pointer alias. So return
1253 // false in that case.
1254 if (!DecompGEP.VarIndices.empty())
1255 return false;
Hal Finkelccb51b92019-01-02 16:28:09 +00001256
1257 APInt GEPBaseOffset = DecompGEP.StructOffset;
Shiva Chen0692f372018-04-16 01:58:39 +00001258 GEPBaseOffset += DecompGEP.OtherOffset;
Michael Kuperstein92631b82016-05-25 22:23:08 +00001259
Hal Finkelccb51b92019-01-02 16:28:09 +00001260 return GEPBaseOffset.sge(ObjectBaseOffset + (int64_t)ObjectAccessSize);
Michael Kuperstein92631b82016-05-25 22:23:08 +00001261}
1262
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001263/// Provides a bunch of ad-hoc rules to disambiguate a GEP instruction against
1264/// another pointer.
Chris Lattner539c9b92009-11-26 02:11:08 +00001265///
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001266/// We know that V1 is a GEP, but we don't know anything about V2.
1267/// UnderlyingV1 is GetUnderlyingObject(GEP1, DL), UnderlyingV2 is the same for
1268/// V2.
George Burgess IV3bf82982018-05-25 21:16:58 +00001269AliasResult
1270BasicAAResult::aliasGEP(const GEPOperator *GEP1, LocationSize V1Size,
1271 const AAMDNodes &V1AAInfo, const Value *V2,
1272 LocationSize V2Size, const AAMDNodes &V2AAInfo,
1273 const Value *UnderlyingV1, const Value *UnderlyingV2) {
Michael Kuperstein92631b82016-05-25 22:23:08 +00001274 DecomposedGEP DecompGEP1, DecompGEP2;
Hal Finkelccb51b92019-01-02 16:28:09 +00001275 unsigned MaxPointerSize = getMaxPointerSize(DL);
1276 DecompGEP1.StructOffset = DecompGEP1.OtherOffset = APInt(MaxPointerSize, 0);
1277 DecompGEP2.StructOffset = DecompGEP2.OtherOffset = APInt(MaxPointerSize, 0);
1278
Michael Kuperstein92631b82016-05-25 22:23:08 +00001279 bool GEP1MaxLookupReached =
Daniel Jasper8de3a542016-12-19 08:22:17 +00001280 DecomposeGEPExpression(GEP1, DecompGEP1, DL, &AC, DT);
Michael Kuperstein92631b82016-05-25 22:23:08 +00001281 bool GEP2MaxLookupReached =
Daniel Jasper8de3a542016-12-19 08:22:17 +00001282 DecomposeGEPExpression(V2, DecompGEP2, DL, &AC, DT);
Chris Lattnerd84eb912009-11-26 02:17:34 +00001283
Hal Finkelccb51b92019-01-02 16:28:09 +00001284 APInt GEP1BaseOffset = DecompGEP1.StructOffset + DecompGEP1.OtherOffset;
1285 APInt GEP2BaseOffset = DecompGEP2.StructOffset + DecompGEP2.OtherOffset;
Michael Kuperstein92631b82016-05-25 22:23:08 +00001286
1287 assert(DecompGEP1.Base == UnderlyingV1 && DecompGEP2.Base == UnderlyingV2 &&
1288 "DecomposeGEPExpression returned a result different from "
1289 "GetUnderlyingObject");
1290
1291 // If the GEP's offset relative to its base is such that the base would
1292 // fall below the start of the object underlying V2, then the GEP and V2
1293 // cannot alias.
1294 if (!GEP1MaxLookupReached && !GEP2MaxLookupReached &&
1295 isGEPBaseAtNegativeOffset(GEP1, DecompGEP1, DecompGEP2, V2Size))
1296 return NoAlias;
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001297 // If we have two gep instructions with must-alias or not-alias'ing base
1298 // pointers, figure out if the indexes to the GEP tell us anything about the
1299 // derived pointer.
Chris Lattner539c9b92009-11-26 02:11:08 +00001300 if (const GEPOperator *GEP2 = dyn_cast<GEPOperator>(V2)) {
Michael Kuperstein92631b82016-05-25 22:23:08 +00001301 // Check for the GEP base being at a negative offset, this time in the other
1302 // direction.
1303 if (!GEP1MaxLookupReached && !GEP2MaxLookupReached &&
1304 isGEPBaseAtNegativeOffset(GEP2, DecompGEP2, DecompGEP1, V1Size))
1305 return NoAlias;
Arnold Schwaighofer742dbc12013-03-26 18:07:53 +00001306 // Do the base pointers alias?
Chandler Carruth2cdca0c42015-06-17 07:21:38 +00001307 AliasResult BaseAlias =
George Burgess IVea46abe2018-10-10 21:28:44 +00001308 aliasCheck(UnderlyingV1, LocationSize::unknown(), AAMDNodes(),
1309 UnderlyingV2, LocationSize::unknown(), AAMDNodes());
Arnold Schwaighofer742dbc12013-03-26 18:07:53 +00001310
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001311 // Check for geps of non-aliasing underlying pointers where the offsets are
1312 // identical.
Arnold Schwaighofer742dbc12013-03-26 18:07:53 +00001313 if ((BaseAlias == MayAlias) && V1Size == V2Size) {
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001314 // Do the base pointers alias assuming type and size.
Chandler Carruth940e92b2015-08-06 07:57:58 +00001315 AliasResult PreciseBaseAlias = aliasCheck(UnderlyingV1, V1Size, V1AAInfo,
1316 UnderlyingV2, V2Size, V2AAInfo);
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001317 if (PreciseBaseAlias == NoAlias) {
1318 // See if the computed offset from the common pointer tells us about the
1319 // relation of the resulting pointer.
Arnold Schwaighofer23463c92014-03-26 21:30:19 +00001320 // If the max search depth is reached the result is undefined
1321 if (GEP2MaxLookupReached || GEP1MaxLookupReached)
1322 return MayAlias;
1323
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001324 // Same offsets.
1325 if (GEP1BaseOffset == GEP2BaseOffset &&
Michael Kuperstein92631b82016-05-25 22:23:08 +00001326 DecompGEP1.VarIndices == DecompGEP2.VarIndices)
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001327 return NoAlias;
Arnold Schwaighofer02903262012-09-06 14:31:51 +00001328 }
1329 }
Jakub Staszak394e5a92013-08-24 14:16:00 +00001330
Chris Lattnerd84eb912009-11-26 02:17:34 +00001331 // If we get a No or May, then return it immediately, no amount of analysis
1332 // will improve this situation.
Nuno Lopesdb16a7c2017-08-08 16:13:24 +00001333 if (BaseAlias != MustAlias) {
1334 assert(BaseAlias == NoAlias || BaseAlias == MayAlias);
Chandler Carruth940e92b2015-08-06 07:57:58 +00001335 return BaseAlias;
Nuno Lopesdb16a7c2017-08-08 16:13:24 +00001336 }
Jakub Staszak394e5a92013-08-24 14:16:00 +00001337
Chris Lattnerd84eb912009-11-26 02:17:34 +00001338 // Otherwise, we have a MustAlias. Since the base pointers alias each other
1339 // exactly, see if the computed offset from the common pointer tells us
1340 // about the relation of the resulting pointer.
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001341 // If we know the two GEPs are based off of the exact same pointer (and not
1342 // just the same underlying object), see if that tells us anything about
1343 // the resulting pointers.
Piotr Padlewski9648b462018-05-03 11:03:01 +00001344 if (GEP1->getPointerOperand()->stripPointerCastsAndInvariantGroups() ==
1345 GEP2->getPointerOperand()->stripPointerCastsAndInvariantGroups() &&
Sanjoy Das51ccb322017-04-18 22:00:54 +00001346 GEP1->getPointerOperandType() == GEP2->getPointerOperandType()) {
Chandler Carruth91468332015-09-09 17:55:00 +00001347 AliasResult R = aliasSameBasePointerGEPs(GEP1, V1Size, GEP2, V2Size, DL);
Ahmed Bougacha9c252162015-02-07 17:04:29 +00001348 // If we couldn't find anything interesting, don't abandon just yet.
1349 if (R != MayAlias)
1350 return R;
1351 }
1352
Sanjay Patel1b9680c2016-01-17 23:13:48 +00001353 // If the max search depth is reached, the result is undefined
Arnold Schwaighofer23463c92014-03-26 21:30:19 +00001354 if (GEP2MaxLookupReached || GEP1MaxLookupReached)
1355 return MayAlias;
Jakub Staszak394e5a92013-08-24 14:16:00 +00001356
Chris Lattnerd84eb912009-11-26 02:17:34 +00001357 // Subtract the GEP2 pointer from the GEP1 pointer to find out their
1358 // symbolic difference.
1359 GEP1BaseOffset -= GEP2BaseOffset;
Michael Kuperstein92631b82016-05-25 22:23:08 +00001360 GetIndexDifference(DecompGEP1.VarIndices, DecompGEP2.VarIndices);
Jakub Staszak394e5a92013-08-24 14:16:00 +00001361
Chris Lattnerd84eb912009-11-26 02:17:34 +00001362 } else {
1363 // Check to see if these two pointers are related by the getelementptr
1364 // instruction. If one pointer is a GEP with a non-zero index of the other
1365 // pointer, we know they cannot alias.
Chris Lattner53692502009-11-26 16:52:32 +00001366
1367 // If both accesses are unknown size, we can't do anything useful here.
George Burgess IVea46abe2018-10-10 21:28:44 +00001368 if (V1Size == LocationSize::unknown() && V2Size == LocationSize::unknown())
Chris Lattnerd84eb912009-11-26 02:17:34 +00001369 return MayAlias;
Chris Lattnerb307c882003-12-11 22:44:13 +00001370
George Burgess IVea46abe2018-10-10 21:28:44 +00001371 AliasResult R =
1372 aliasCheck(UnderlyingV1, LocationSize::unknown(), AAMDNodes(), V2,
1373 LocationSize::unknown(), V2AAInfo, nullptr, UnderlyingV2);
Nuno Lopesb95a5702017-08-08 21:25:26 +00001374 if (R != MustAlias) {
Chris Lattnerd84eb912009-11-26 02:17:34 +00001375 // If V2 may alias GEP base pointer, conservatively returns MayAlias.
1376 // If V2 is known not to alias GEP base pointer, then the two values
Mehdi Amini887da0d2017-01-27 16:12:22 +00001377 // cannot alias per GEP semantics: "Any memory access must be done through
1378 // a pointer value associated with an address range of the memory access,
1379 // otherwise the behavior is undefined.".
Nuno Lopesb95a5702017-08-08 21:25:26 +00001380 assert(R == NoAlias || R == MayAlias);
Chris Lattnerd84eb912009-11-26 02:17:34 +00001381 return R;
Nuno Lopesb95a5702017-08-08 21:25:26 +00001382 }
Chris Lattnerb307c882003-12-11 22:44:13 +00001383
Arnold Schwaighofer23463c92014-03-26 21:30:19 +00001384 // If the max search depth is reached the result is undefined
1385 if (GEP1MaxLookupReached)
1386 return MayAlias;
Chris Lattnerb307c882003-12-11 22:44:13 +00001387 }
Jakub Staszak394e5a92013-08-24 14:16:00 +00001388
Chris Lattnerd84eb912009-11-26 02:17:34 +00001389 // In the two GEP Case, if there is no difference in the offsets of the
1390 // computed pointers, the resultant pointers are a must alias. This
Sanjay Patel1b9680c2016-01-17 23:13:48 +00001391 // happens when we have two lexically identical GEP's (for example).
Chris Lattnerd501c132003-02-26 19:41:54 +00001392 //
Chris Lattnerd84eb912009-11-26 02:17:34 +00001393 // In the other case, if we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2
1394 // must aliases the GEP, the end result is a must alias also.
Michael Kuperstein92631b82016-05-25 22:23:08 +00001395 if (GEP1BaseOffset == 0 && DecompGEP1.VarIndices.empty())
Evan Cheng681a33e2009-10-14 06:41:49 +00001396 return MustAlias;
Evan Cheng094f04b2009-10-13 18:42:04 +00001397
Eli Friedman81ac8dd2011-09-08 02:23:31 +00001398 // If there is a constant difference between the pointers, but the difference
1399 // is less than the size of the associated memory object, then we know
1400 // that the objects are partially overlapping. If the difference is
1401 // greater, we know they do not overlap.
Michael Kuperstein92631b82016-05-25 22:23:08 +00001402 if (GEP1BaseOffset != 0 && DecompGEP1.VarIndices.empty()) {
Hal Finkelccb51b92019-01-02 16:28:09 +00001403 if (GEP1BaseOffset.sge(0)) {
George Burgess IVea46abe2018-10-10 21:28:44 +00001404 if (V2Size != LocationSize::unknown()) {
Hal Finkelccb51b92019-01-02 16:28:09 +00001405 if (GEP1BaseOffset.ult(V2Size.getValue()))
Eli Friedman81ac8dd2011-09-08 02:23:31 +00001406 return PartialAlias;
1407 return NoAlias;
1408 }
1409 } else {
Arnold Schwaighofer89630712014-01-16 04:53:18 +00001410 // We have the situation where:
1411 // + +
1412 // | BaseOffset |
1413 // ---------------->|
1414 // |-->V1Size |-------> V2Size
1415 // GEP1 V2
1416 // We need to know that V2Size is not unknown, otherwise we might have
1417 // stripped a gep with negative index ('gep <ptr>, -1, ...).
George Burgess IVea46abe2018-10-10 21:28:44 +00001418 if (V1Size != LocationSize::unknown() &&
1419 V2Size != LocationSize::unknown()) {
Hal Finkelccb51b92019-01-02 16:28:09 +00001420 if ((-GEP1BaseOffset).ult(V1Size.getValue()))
Eli Friedman81ac8dd2011-09-08 02:23:31 +00001421 return PartialAlias;
1422 return NoAlias;
1423 }
1424 }
Dan Gohman0f7f1942010-12-13 22:50:24 +00001425 }
1426
Michael Kuperstein92631b82016-05-25 22:23:08 +00001427 if (!DecompGEP1.VarIndices.empty()) {
Hal Finkelccb51b92019-01-02 16:28:09 +00001428 APInt Modulo(MaxPointerSize, 0);
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001429 bool AllPositive = true;
Michael Kuperstein92631b82016-05-25 22:23:08 +00001430 for (unsigned i = 0, e = DecompGEP1.VarIndices.size(); i != e; ++i) {
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001431
1432 // Try to distinguish something like &A[i][1] against &A[42][0].
1433 // Grab the least significant bit set in any of the scales. We
1434 // don't need std::abs here (even if the scale's negative) as we'll
1435 // be ^'ing Modulo with itself later.
Hal Finkelccb51b92019-01-02 16:28:09 +00001436 Modulo |= DecompGEP1.VarIndices[i].Scale;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001437
1438 if (AllPositive) {
1439 // If the Value could change between cycles, then any reasoning about
1440 // the Value this cycle may not hold in the next cycle. We'll just
1441 // give up if we can't determine conditions that hold for every cycle:
Michael Kuperstein92631b82016-05-25 22:23:08 +00001442 const Value *V = DecompGEP1.VarIndices[i].V;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001443
Craig Topper32a237d2017-05-15 06:39:41 +00001444 KnownBits Known = computeKnownBits(V, DL, 0, &AC, nullptr, DT);
1445 bool SignKnownZero = Known.isNonNegative();
1446 bool SignKnownOne = Known.isNegative();
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001447
1448 // Zero-extension widens the variable, and so forces the sign
1449 // bit to zero.
Michael Kuperstein92631b82016-05-25 22:23:08 +00001450 bool IsZExt = DecompGEP1.VarIndices[i].ZExtBits > 0 || isa<ZExtInst>(V);
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001451 SignKnownZero |= IsZExt;
1452 SignKnownOne &= !IsZExt;
1453
1454 // If the variable begins with a zero then we know it's
1455 // positive, regardless of whether the value is signed or
1456 // unsigned.
Hal Finkelccb51b92019-01-02 16:28:09 +00001457 APInt Scale = DecompGEP1.VarIndices[i].Scale;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001458 AllPositive =
Hal Finkelccb51b92019-01-02 16:28:09 +00001459 (SignKnownZero && Scale.sge(0)) || (SignKnownOne && Scale.slt(0));
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001460 }
1461 }
1462
Eli Friedman184166d2011-09-08 02:37:07 +00001463 Modulo = Modulo ^ (Modulo & (Modulo - 1));
Eli Friedman81ac8dd2011-09-08 02:23:31 +00001464
Eli Friedman184166d2011-09-08 02:37:07 +00001465 // We can compute the difference between the two addresses
1466 // mod Modulo. Check whether that difference guarantees that the
1467 // two locations do not alias.
Hal Finkelccb51b92019-01-02 16:28:09 +00001468 APInt ModOffset = GEP1BaseOffset & (Modulo - 1);
George Burgess IVea46abe2018-10-10 21:28:44 +00001469 if (V1Size != LocationSize::unknown() &&
Hal Finkelccb51b92019-01-02 16:28:09 +00001470 V2Size != LocationSize::unknown() && ModOffset.uge(V2Size.getValue()) &&
1471 (Modulo - ModOffset).uge(V1Size.getValue()))
Eli Friedman184166d2011-09-08 02:37:07 +00001472 return NoAlias;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001473
1474 // If we know all the variables are positive, then GEP1 >= GEP1BasePtr.
1475 // If GEP1BasePtr > V2 (GEP1BaseOffset > 0) then we know the pointers
1476 // don't alias if V2Size can fit in the gap between V2 and GEP1BasePtr.
Hal Finkelccb51b92019-01-02 16:28:09 +00001477 if (AllPositive && GEP1BaseOffset.sgt(0) &&
George Burgess IVea46abe2018-10-10 21:28:44 +00001478 V2Size != LocationSize::unknown() &&
Hal Finkelccb51b92019-01-02 16:28:09 +00001479 GEP1BaseOffset.uge(V2Size.getValue()))
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001480 return NoAlias;
1481
Michael Kuperstein92631b82016-05-25 22:23:08 +00001482 if (constantOffsetHeuristic(DecompGEP1.VarIndices, V1Size, V2Size,
Daniel Jasper8de3a542016-12-19 08:22:17 +00001483 GEP1BaseOffset, &AC, DT))
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001484 return NoAlias;
Eli Friedman184166d2011-09-08 02:37:07 +00001485 }
Eli Friedman81ac8dd2011-09-08 02:23:31 +00001486
Dan Gohman5f1312c2011-06-04 06:50:18 +00001487 // Statically, we can see that the base objects are the same, but the
1488 // pointers have dynamic offsets which we can't resolve. And none of our
1489 // little tricks above worked.
Michael Kruse52ebe032017-06-21 18:25:37 +00001490 return MayAlias;
Evan Cheng094f04b2009-10-13 18:42:04 +00001491}
1492
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001493static AliasResult MergeAliasResults(AliasResult A, AliasResult B) {
Dan Gohman965fefa2011-06-03 20:17:36 +00001494 // If the results agree, take it.
1495 if (A == B)
1496 return A;
1497 // A mix of PartialAlias and MustAlias is PartialAlias.
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001498 if ((A == PartialAlias && B == MustAlias) ||
1499 (B == PartialAlias && A == MustAlias))
1500 return PartialAlias;
Dan Gohman965fefa2011-06-03 20:17:36 +00001501 // Otherwise, we don't know anything.
Chandler Carruth1e3557d2015-06-22 02:16:51 +00001502 return MayAlias;
Dan Gohman965fefa2011-06-03 20:17:36 +00001503}
1504
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001505/// Provides a bunch of ad-hoc rules to disambiguate a Select instruction
1506/// against another.
George Burgess IV3bf82982018-05-25 21:16:58 +00001507AliasResult BasicAAResult::aliasSelect(const SelectInst *SI,
1508 LocationSize SISize,
Chandler Carruth91468332015-09-09 17:55:00 +00001509 const AAMDNodes &SIAAInfo,
George Burgess IV3bf82982018-05-25 21:16:58 +00001510 const Value *V2, LocationSize V2Size,
Ehsan Amiria3c08032016-08-12 16:05:03 +00001511 const AAMDNodes &V2AAInfo,
1512 const Value *UnderV2) {
Dan Gohman6665b0e2009-10-26 21:55:43 +00001513 // If the values are Selects with the same condition, we can do a more precise
1514 // check: just check for aliases between the values on corresponding arms.
1515 if (const SelectInst *SI2 = dyn_cast<SelectInst>(V2))
1516 if (SI->getCondition() == SI2->getCondition()) {
Chandler Carruth940e92b2015-08-06 07:57:58 +00001517 AliasResult Alias = aliasCheck(SI->getTrueValue(), SISize, SIAAInfo,
1518 SI2->getTrueValue(), V2Size, V2AAInfo);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001519 if (Alias == MayAlias)
1520 return MayAlias;
1521 AliasResult ThisAlias =
Chandler Carruth940e92b2015-08-06 07:57:58 +00001522 aliasCheck(SI->getFalseValue(), SISize, SIAAInfo,
1523 SI2->getFalseValue(), V2Size, V2AAInfo);
Dan Gohman965fefa2011-06-03 20:17:36 +00001524 return MergeAliasResults(ThisAlias, Alias);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001525 }
1526
1527 // If both arms of the Select node NoAlias or MustAlias V2, then returns
1528 // NoAlias / MustAlias. Otherwise, returns MayAlias.
1529 AliasResult Alias =
Ehsan Amiria3c08032016-08-12 16:05:03 +00001530 aliasCheck(V2, V2Size, V2AAInfo, SI->getTrueValue(),
1531 SISize, SIAAInfo, UnderV2);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001532 if (Alias == MayAlias)
1533 return MayAlias;
Dan Gohman50f424c2010-06-28 21:16:52 +00001534
Dan Gohman6665b0e2009-10-26 21:55:43 +00001535 AliasResult ThisAlias =
Ehsan Amiria3c08032016-08-12 16:05:03 +00001536 aliasCheck(V2, V2Size, V2AAInfo, SI->getFalseValue(), SISize, SIAAInfo,
1537 UnderV2);
Dan Gohman965fefa2011-06-03 20:17:36 +00001538 return MergeAliasResults(ThisAlias, Alias);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001539}
1540
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001541/// Provide a bunch of ad-hoc rules to disambiguate a PHI instruction against
1542/// another.
George Burgess IV3bf82982018-05-25 21:16:58 +00001543AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
Chandler Carruth91468332015-09-09 17:55:00 +00001544 const AAMDNodes &PNAAInfo, const Value *V2,
George Burgess IV3bf82982018-05-25 21:16:58 +00001545 LocationSize V2Size,
1546 const AAMDNodes &V2AAInfo,
Ehsan Amiria3c08032016-08-12 16:05:03 +00001547 const Value *UnderV2) {
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001548 // Track phi nodes we have visited. We use this information when we determine
1549 // value equivalence.
1550 VisitedPhiBBs.insert(PN->getParent());
1551
Dan Gohman6665b0e2009-10-26 21:55:43 +00001552 // If the values are PHIs in the same block, we can do a more precise
1553 // as well as efficient check: just check for aliases between the values
1554 // on corresponding edges.
1555 if (const PHINode *PN2 = dyn_cast<PHINode>(V2))
1556 if (PN2->getParent() == PN->getParent()) {
Chandler Carruth4d7ed392015-06-17 07:18:54 +00001557 LocPair Locs(MemoryLocation(PN, PNSize, PNAAInfo),
1558 MemoryLocation(V2, V2Size, V2AAInfo));
Arnold Schwaighofer3d5f96e2012-09-06 14:41:53 +00001559 if (PN > V2)
1560 std::swap(Locs.first, Locs.second);
Arnold Schwaighofer2b475922012-12-10 23:02:41 +00001561 // Analyse the PHIs' inputs under the assumption that the PHIs are
1562 // NoAlias.
1563 // If the PHIs are May/MustAlias there must be (recursively) an input
1564 // operand from outside the PHIs' cycle that is MayAlias/MustAlias or
1565 // there must be an operation on the PHIs within the PHIs' value cycle
1566 // that causes a MayAlias.
1567 // Pretend the phis do not alias.
1568 AliasResult Alias = NoAlias;
1569 assert(AliasCache.count(Locs) &&
1570 "There must exist an entry for the phi node");
1571 AliasResult OrigAliasResult = AliasCache[Locs];
1572 AliasCache[Locs] = NoAlias;
Arnold Schwaighofer3d5f96e2012-09-06 14:41:53 +00001573
Hal Finkelc37f5022012-11-17 02:33:15 +00001574 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
Dan Gohman6665b0e2009-10-26 21:55:43 +00001575 AliasResult ThisAlias =
Chandler Carruth940e92b2015-08-06 07:57:58 +00001576 aliasCheck(PN->getIncomingValue(i), PNSize, PNAAInfo,
1577 PN2->getIncomingValueForBlock(PN->getIncomingBlock(i)),
1578 V2Size, V2AAInfo);
Dan Gohman965fefa2011-06-03 20:17:36 +00001579 Alias = MergeAliasResults(ThisAlias, Alias);
1580 if (Alias == MayAlias)
1581 break;
Dan Gohman6665b0e2009-10-26 21:55:43 +00001582 }
Arnold Schwaighofer3d5f96e2012-09-06 14:41:53 +00001583
1584 // Reset if speculation failed.
Arnold Schwaighofer2b475922012-12-10 23:02:41 +00001585 if (Alias != NoAlias)
Arnold Schwaighofer3d5f96e2012-09-06 14:41:53 +00001586 AliasCache[Locs] = OrigAliasResult;
1587
Dan Gohman6665b0e2009-10-26 21:55:43 +00001588 return Alias;
1589 }
1590
Chandler Carruth940e92b2015-08-06 07:57:58 +00001591 SmallVector<Value *, 4> V1Srcs;
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +00001592 bool isRecursive = false;
John Brawndf2b9392018-07-30 11:52:08 +00001593 if (PV) {
1594 // If we have PhiValues then use it to get the underlying phi values.
1595 const PhiValues::ValueSet &PhiValueSet = PV->getValuesForPhi(PN);
1596 // If we have more phi values than the search depth then return MayAlias
1597 // conservatively to avoid compile time explosion. The worst possible case
1598 // is if both sides are PHI nodes. In which case, this is O(m x n) time
1599 // where 'm' and 'n' are the number of PHI sources.
1600 if (PhiValueSet.size() > MaxLookupSearchDepth)
Evan Cheng50a59142009-10-13 22:02:20 +00001601 return MayAlias;
John Brawndf2b9392018-07-30 11:52:08 +00001602 // Add the values to V1Srcs
1603 for (Value *PV1 : PhiValueSet) {
1604 if (EnableRecPhiAnalysis) {
1605 if (GEPOperator *PV1GEP = dyn_cast<GEPOperator>(PV1)) {
1606 // Check whether the incoming value is a GEP that advances the pointer
1607 // result of this PHI node (e.g. in a loop). If this is the case, we
1608 // would recurse and always get a MayAlias. Handle this case specially
1609 // below.
1610 if (PV1GEP->getPointerOperand() == PN && PV1GEP->getNumIndices() == 1 &&
1611 isa<ConstantInt>(PV1GEP->idx_begin())) {
1612 isRecursive = true;
1613 continue;
1614 }
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +00001615 }
1616 }
Evan Cheng50a59142009-10-13 22:02:20 +00001617 V1Srcs.push_back(PV1);
John Brawndf2b9392018-07-30 11:52:08 +00001618 }
1619 } else {
1620 // If we don't have PhiInfo then just look at the operands of the phi itself
1621 // FIXME: Remove this once we can guarantee that we have PhiInfo always
1622 SmallPtrSet<Value *, 4> UniqueSrc;
1623 for (Value *PV1 : PN->incoming_values()) {
1624 if (isa<PHINode>(PV1))
1625 // If any of the source itself is a PHI, return MayAlias conservatively
1626 // to avoid compile time explosion. The worst possible case is if both
1627 // sides are PHI nodes. In which case, this is O(m x n) time where 'm'
1628 // and 'n' are the number of PHI sources.
1629 return MayAlias;
1630
1631 if (EnableRecPhiAnalysis)
1632 if (GEPOperator *PV1GEP = dyn_cast<GEPOperator>(PV1)) {
1633 // Check whether the incoming value is a GEP that advances the pointer
1634 // result of this PHI node (e.g. in a loop). If this is the case, we
1635 // would recurse and always get a MayAlias. Handle this case specially
1636 // below.
1637 if (PV1GEP->getPointerOperand() == PN && PV1GEP->getNumIndices() == 1 &&
1638 isa<ConstantInt>(PV1GEP->idx_begin())) {
1639 isRecursive = true;
1640 continue;
1641 }
1642 }
1643
1644 if (UniqueSrc.insert(PV1).second)
1645 V1Srcs.push_back(PV1);
1646 }
Evan Cheng50a59142009-10-13 22:02:20 +00001647 }
1648
John Brawndf2b9392018-07-30 11:52:08 +00001649 // If V1Srcs is empty then that means that the phi has no underlying non-phi
1650 // value. This should only be possible in blocks unreachable from the entry
1651 // block, but return MayAlias just in case.
1652 if (V1Srcs.empty())
1653 return MayAlias;
1654
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +00001655 // If this PHI node is recursive, set the size of the accessed memory to
1656 // unknown to represent all the possible values the GEP could advance the
1657 // pointer to.
1658 if (isRecursive)
George Burgess IVea46abe2018-10-10 21:28:44 +00001659 PNSize = LocationSize::unknown();
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +00001660
Chandler Carruth940e92b2015-08-06 07:57:58 +00001661 AliasResult Alias =
Ehsan Amiria3c08032016-08-12 16:05:03 +00001662 aliasCheck(V2, V2Size, V2AAInfo, V1Srcs[0],
1663 PNSize, PNAAInfo, UnderV2);
Tobias Edler von Koch3659b8a2015-07-15 19:32:22 +00001664
Evan Chengd83c2ca2009-10-14 05:22:03 +00001665 // Early exit if the check of the first PHI source against V2 is MayAlias.
1666 // Other results are not possible.
1667 if (Alias == MayAlias)
1668 return MayAlias;
1669
Evan Cheng50a59142009-10-13 22:02:20 +00001670 // If all sources of the PHI node NoAlias or MustAlias V2, then returns
1671 // NoAlias / MustAlias. Otherwise, returns MayAlias.
Evan Cheng50a59142009-10-13 22:02:20 +00001672 for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {
1673 Value *V = V1Srcs[i];
Dan Gohman6665b0e2009-10-26 21:55:43 +00001674
Chandler Carruth940e92b2015-08-06 07:57:58 +00001675 AliasResult ThisAlias =
Ehsan Amiria3c08032016-08-12 16:05:03 +00001676 aliasCheck(V2, V2Size, V2AAInfo, V, PNSize, PNAAInfo, UnderV2);
Dan Gohman965fefa2011-06-03 20:17:36 +00001677 Alias = MergeAliasResults(ThisAlias, Alias);
1678 if (Alias == MayAlias)
1679 break;
Evan Cheng50a59142009-10-13 22:02:20 +00001680 }
1681
1682 return Alias;
1683}
1684
David Majnemera56e1d62015-11-17 08:15:08 +00001685/// Provides a bunch of ad-hoc rules to disambiguate in common cases, such as
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001686/// array references.
George Burgess IV3bf82982018-05-25 21:16:58 +00001687AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
Chandler Carruth91468332015-09-09 17:55:00 +00001688 AAMDNodes V1AAInfo, const Value *V2,
George Burgess IV3bf82982018-05-25 21:16:58 +00001689 LocationSize V2Size, AAMDNodes V2AAInfo,
Ehsan Amiria3c08032016-08-12 16:05:03 +00001690 const Value *O1, const Value *O2) {
Dan Gohmanb57b6f12010-04-08 18:11:50 +00001691 // If either of the memory references is empty, it doesn't matter what the
1692 // pointer values are.
George Burgess IVf50772a2018-12-22 18:23:21 +00001693 if (V1Size.isZero() || V2Size.isZero())
Dan Gohmanb57b6f12010-04-08 18:11:50 +00001694 return NoAlias;
1695
Evan Cheng094f04b2009-10-13 18:42:04 +00001696 // Strip off any casts if they exist.
Piotr Padlewski9648b462018-05-03 11:03:01 +00001697 V1 = V1->stripPointerCastsAndInvariantGroups();
1698 V2 = V2->stripPointerCastsAndInvariantGroups();
Evan Cheng094f04b2009-10-13 18:42:04 +00001699
Daniel Berlin7a5c0e52015-05-05 18:10:49 +00001700 // If V1 or V2 is undef, the result is NoAlias because we can always pick a
1701 // value for undef that aliases nothing in the program.
1702 if (isa<UndefValue>(V1) || isa<UndefValue>(V2))
1703 return NoAlias;
1704
Evan Cheng094f04b2009-10-13 18:42:04 +00001705 // Are we checking for alias of the same value?
Sanjay Patel1b9680c2016-01-17 23:13:48 +00001706 // Because we look 'through' phi nodes, we could look at "Value" pointers from
Arnold Schwaighofer30361822014-01-03 05:47:03 +00001707 // different iterations. We must therefore make sure that this is not the
1708 // case. The function isValueEqualInPotentialCycles ensures that this cannot
1709 // happen by looking at the visited phi nodes and making sure they cannot
1710 // reach the value.
1711 if (isValueEqualInPotentialCycles(V1, V2))
1712 return MustAlias;
Evan Cheng094f04b2009-10-13 18:42:04 +00001713
Duncan Sands1df98592010-02-16 11:11:14 +00001714 if (!V1->getType()->isPointerTy() || !V2->getType()->isPointerTy())
Chandler Carruth940e92b2015-08-06 07:57:58 +00001715 return NoAlias; // Scalars cannot alias each other
Evan Cheng094f04b2009-10-13 18:42:04 +00001716
1717 // Figure out what objects these things are pointing to if we can.
Ehsan Amiria3c08032016-08-12 16:05:03 +00001718 if (O1 == nullptr)
1719 O1 = GetUnderlyingObject(V1, DL, MaxLookupSearchDepth);
1720
1721 if (O2 == nullptr)
1722 O2 = GetUnderlyingObject(V2, DL, MaxLookupSearchDepth);
Evan Cheng094f04b2009-10-13 18:42:04 +00001723
Dan Gohmanf75ef662009-11-09 19:29:11 +00001724 // Null values in the default address space don't point to any object, so they
1725 // don't alias any other pointer.
1726 if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O1))
Manoj Guptac6da6862018-07-09 22:27:23 +00001727 if (!NullPointerIsDefined(&F, CPN->getType()->getAddressSpace()))
Dan Gohmanf75ef662009-11-09 19:29:11 +00001728 return NoAlias;
1729 if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O2))
Manoj Guptac6da6862018-07-09 22:27:23 +00001730 if (!NullPointerIsDefined(&F, CPN->getType()->getAddressSpace()))
Dan Gohmanf75ef662009-11-09 19:29:11 +00001731 return NoAlias;
1732
Evan Cheng094f04b2009-10-13 18:42:04 +00001733 if (O1 != O2) {
Sanjay Patel1b9680c2016-01-17 23:13:48 +00001734 // If V1/V2 point to two different objects, we know that we have no alias.
Dan Gohman9e86f432010-07-07 14:27:09 +00001735 if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
Evan Cheng094f04b2009-10-13 18:42:04 +00001736 return NoAlias;
Nick Lewycky20162ac2009-11-14 06:15:14 +00001737
1738 // Constant pointers can't alias with non-const isIdentifiedObject objects.
Dan Gohman9e86f432010-07-07 14:27:09 +00001739 if ((isa<Constant>(O1) && isIdentifiedObject(O2) && !isa<Constant>(O2)) ||
1740 (isa<Constant>(O2) && isIdentifiedObject(O1) && !isa<Constant>(O1)))
Nick Lewycky20162ac2009-11-14 06:15:14 +00001741 return NoAlias;
1742
Michael Kuperstein9f5de6d2013-05-28 08:17:48 +00001743 // Function arguments can't alias with things that are known to be
1744 // unambigously identified at the function level.
1745 if ((isa<Argument>(O1) && isIdentifiedFunctionLocal(O2)) ||
1746 (isa<Argument>(O2) && isIdentifiedFunctionLocal(O1)))
Dan Gohman21de4c02010-07-01 20:08:40 +00001747 return NoAlias;
Evan Cheng094f04b2009-10-13 18:42:04 +00001748
Dan Gohmanb8c86a02010-07-07 14:30:04 +00001749 // If one pointer is the result of a call/invoke or load and the other is a
1750 // non-escaping local object within the same function, then we know the
1751 // object couldn't escape to a point where the call could return it.
1752 //
1753 // Note that if the pointers are in different functions, there are a
1754 // variety of complications. A call with a nocapture argument may still
1755 // temporary store the nocapture argument's value in a temporary memory
1756 // location if that memory location doesn't escape. Or it may pass a
1757 // nocapture value to other functions as long as they don't capture it.
1758 if (isEscapeSource(O1) && isNonEscapingLocalObject(O2))
1759 return NoAlias;
1760 if (isEscapeSource(O2) && isNonEscapingLocalObject(O1))
1761 return NoAlias;
1762 }
1763
Evan Cheng094f04b2009-10-13 18:42:04 +00001764 // If the size of one access is larger than the entire object on the other
1765 // side, then we know such behavior is undefined and can assume no alias.
Manoj Guptac6da6862018-07-09 22:27:23 +00001766 bool NullIsValidLocation = NullPointerIsDefined(&F);
George Burgess IV91f4bc632018-10-10 06:39:40 +00001767 if ((V1Size.isPrecise() && isObjectSmallerThan(O2, V1Size.getValue(), DL, TLI,
1768 NullIsValidLocation)) ||
1769 (V2Size.isPrecise() && isObjectSmallerThan(O1, V2Size.getValue(), DL, TLI,
1770 NullIsValidLocation)))
Chandler Carruth91468332015-09-09 17:55:00 +00001771 return NoAlias;
Jakub Staszak394e5a92013-08-24 14:16:00 +00001772
Dan Gohman1fc18d72011-06-04 00:31:50 +00001773 // Check the cache before climbing up use-def chains. This also terminates
1774 // otherwise infinitely recursive queries.
Chandler Carruth4d7ed392015-06-17 07:18:54 +00001775 LocPair Locs(MemoryLocation(V1, V1Size, V1AAInfo),
1776 MemoryLocation(V2, V2Size, V2AAInfo));
Dan Gohman1fc18d72011-06-04 00:31:50 +00001777 if (V1 > V2)
1778 std::swap(Locs.first, Locs.second);
1779 std::pair<AliasCacheTy::iterator, bool> Pair =
Chandler Carruth940e92b2015-08-06 07:57:58 +00001780 AliasCache.insert(std::make_pair(Locs, MayAlias));
Dan Gohman1fc18d72011-06-04 00:31:50 +00001781 if (!Pair.second)
1782 return Pair.first->second;
1783
Chris Lattner4e91ee72009-11-26 02:13:03 +00001784 // FIXME: This isn't aggressively handling alias(GEP, PHI) for example: if the
1785 // GEP can't simplify, we don't even look at the PHI cases.
Chris Lattner391d23b2009-10-17 23:48:54 +00001786 if (!isa<GEPOperator>(V1) && isa<GEPOperator>(V2)) {
Chris Lattnerd501c132003-02-26 19:41:54 +00001787 std::swap(V1, V2);
1788 std::swap(V1Size, V2Size);
Chris Lattner23e2a5b2009-11-26 02:14:59 +00001789 std::swap(O1, O2);
Hal Finkel2c7c54c2014-07-24 12:16:19 +00001790 std::swap(V1AAInfo, V2AAInfo);
Chris Lattnerd501c132003-02-26 19:41:54 +00001791 }
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001792 if (const GEPOperator *GV1 = dyn_cast<GEPOperator>(V1)) {
Chandler Carruth940e92b2015-08-06 07:57:58 +00001793 AliasResult Result =
1794 aliasGEP(GV1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O1, O2);
1795 if (Result != MayAlias)
1796 return AliasCache[Locs] = Result;
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001797 }
Evan Cheng50a59142009-10-13 22:02:20 +00001798
1799 if (isa<PHINode>(V2) && !isa<PHINode>(V1)) {
1800 std::swap(V1, V2);
Ehsan Amiria3c08032016-08-12 16:05:03 +00001801 std::swap(O1, O2);
Evan Cheng50a59142009-10-13 22:02:20 +00001802 std::swap(V1Size, V2Size);
Hal Finkel2c7c54c2014-07-24 12:16:19 +00001803 std::swap(V1AAInfo, V2AAInfo);
Evan Cheng50a59142009-10-13 22:02:20 +00001804 }
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001805 if (const PHINode *PN = dyn_cast<PHINode>(V1)) {
Ehsan Amiria3c08032016-08-12 16:05:03 +00001806 AliasResult Result = aliasPHI(PN, V1Size, V1AAInfo,
1807 V2, V2Size, V2AAInfo, O2);
Chandler Carruth940e92b2015-08-06 07:57:58 +00001808 if (Result != MayAlias)
1809 return AliasCache[Locs] = Result;
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001810 }
Misha Brukman2b37d7c2005-04-21 21:13:18 +00001811
Dan Gohman6665b0e2009-10-26 21:55:43 +00001812 if (isa<SelectInst>(V2) && !isa<SelectInst>(V1)) {
1813 std::swap(V1, V2);
Ehsan Amiria3c08032016-08-12 16:05:03 +00001814 std::swap(O1, O2);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001815 std::swap(V1Size, V2Size);
Hal Finkel2c7c54c2014-07-24 12:16:19 +00001816 std::swap(V1AAInfo, V2AAInfo);
Dan Gohman6665b0e2009-10-26 21:55:43 +00001817 }
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001818 if (const SelectInst *S1 = dyn_cast<SelectInst>(V1)) {
Chandler Carruth940e92b2015-08-06 07:57:58 +00001819 AliasResult Result =
Ehsan Amiria3c08032016-08-12 16:05:03 +00001820 aliasSelect(S1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O2);
Chandler Carruth940e92b2015-08-06 07:57:58 +00001821 if (Result != MayAlias)
1822 return AliasCache[Locs] = Result;
Dan Gohmanc1be92f2010-10-18 18:04:47 +00001823 }
Dan Gohman6665b0e2009-10-26 21:55:43 +00001824
Dan Gohman615da1a2011-01-18 21:16:06 +00001825 // If both pointers are pointing into the same object and one of them
Sanjay Patel1b9680c2016-01-17 23:13:48 +00001826 // accesses the entire object, then the accesses must overlap in some way.
Chandler Carruth91468332015-09-09 17:55:00 +00001827 if (O1 == O2)
George Burgess IV91f4bc632018-10-10 06:39:40 +00001828 if (V1Size.isPrecise() && V2Size.isPrecise() &&
George Burgess IVdef2c062018-10-09 03:18:56 +00001829 (isObjectSize(O1, V1Size.getValue(), DL, TLI, NullIsValidLocation) ||
1830 isObjectSize(O2, V2Size.getValue(), DL, TLI, NullIsValidLocation)))
Dan Gohman1fc18d72011-06-04 00:31:50 +00001831 return AliasCache[Locs] = PartialAlias;
Dan Gohman615da1a2011-01-18 21:16:06 +00001832
Chandler Carruth91468332015-09-09 17:55:00 +00001833 // Recurse back into the best AA results we have, potentially with refined
1834 // memory locations. We have already ensured that BasicAA has a MayAlias
1835 // cache result for these, so any recursion back into BasicAA won't loop.
1836 AliasResult Result = getBestAAResults().alias(Locs.first, Locs.second);
Dan Gohman1fc18d72011-06-04 00:31:50 +00001837 return AliasCache[Locs] = Result;
Chris Lattnerd501c132003-02-26 19:41:54 +00001838}
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001839
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001840/// Check whether two Values can be considered equivalent.
1841///
1842/// In addition to pointer equivalence of \p V1 and \p V2 this checks whether
1843/// they can not be part of a cycle in the value graph by looking at all
1844/// visited phi nodes an making sure that the phis cannot reach the value. We
1845/// have to do this because we are looking through phi nodes (That is we say
1846/// noalias(V, phi(VA, VB)) if noalias(V, VA) and noalias(V, VB).
Chandler Carruth91468332015-09-09 17:55:00 +00001847bool BasicAAResult::isValueEqualInPotentialCycles(const Value *V,
1848 const Value *V2) {
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001849 if (V != V2)
1850 return false;
1851
1852 const Instruction *Inst = dyn_cast<Instruction>(V);
1853 if (!Inst)
1854 return true;
1855
Daniel Berline7cec1e2015-03-20 18:05:49 +00001856 if (VisitedPhiBBs.empty())
1857 return true;
1858
Arnold Schwaighofer30361822014-01-03 05:47:03 +00001859 if (VisitedPhiBBs.size() > MaxNumPhiBBsValueReachabilityCheck)
1860 return false;
1861
Arnold Schwaighofer30361822014-01-03 05:47:03 +00001862 // Make sure that the visited phis cannot reach the Value. This ensures that
1863 // the Values cannot come from different iterations of a potential cycle the
1864 // phi nodes could be involved in.
Craig Topper273fd112014-08-24 23:23:06 +00001865 for (auto *P : VisitedPhiBBs)
Duncan P. N. Exon Smithd3a5adc2015-10-10 00:53:03 +00001866 if (isPotentiallyReachable(&P->front(), Inst, DT, LI))
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001867 return false;
1868
Arnold Schwaighofer30361822014-01-03 05:47:03 +00001869 return true;
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001870}
1871
Chandler Carruthe29d3ef2015-08-06 08:17:06 +00001872/// Computes the symbolic difference between two de-composed GEPs.
1873///
1874/// Dest and Src are the variable indices from two decomposed GetElementPtr
1875/// instructions GEP1 and GEP2 which have common base pointers.
Chandler Carruth91468332015-09-09 17:55:00 +00001876void BasicAAResult::GetIndexDifference(
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001877 SmallVectorImpl<VariableGEPIndex> &Dest,
1878 const SmallVectorImpl<VariableGEPIndex> &Src) {
1879 if (Src.empty())
1880 return;
1881
1882 for (unsigned i = 0, e = Src.size(); i != e; ++i) {
1883 const Value *V = Src[i].V;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001884 unsigned ZExtBits = Src[i].ZExtBits, SExtBits = Src[i].SExtBits;
Hal Finkelccb51b92019-01-02 16:28:09 +00001885 APInt Scale = Src[i].Scale;
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001886
1887 // Find V in Dest. This is N^2, but pointer indices almost never have more
1888 // than a few variable indexes.
1889 for (unsigned j = 0, e = Dest.size(); j != e; ++j) {
Arnold Schwaighofer30361822014-01-03 05:47:03 +00001890 if (!isValueEqualInPotentialCycles(Dest[j].V, V) ||
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001891 Dest[j].ZExtBits != ZExtBits || Dest[j].SExtBits != SExtBits)
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001892 continue;
1893
1894 // If we found it, subtract off Scale V's from the entry in Dest. If it
1895 // goes to zero, remove the entry.
1896 if (Dest[j].Scale != Scale)
1897 Dest[j].Scale -= Scale;
1898 else
1899 Dest.erase(Dest.begin() + j);
1900 Scale = 0;
1901 break;
1902 }
1903
1904 // If we didn't consume this entry, add it to the end of the Dest list.
Hal Finkelccb51b92019-01-02 16:28:09 +00001905 if (!!Scale) {
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001906 VariableGEPIndex Entry = {V, ZExtBits, SExtBits, -Scale};
Arnold Schwaighofer1bdb3202014-01-02 03:31:36 +00001907 Dest.push_back(Entry);
1908 }
1909 }
1910}
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001911
Chandler Carruth91468332015-09-09 17:55:00 +00001912bool BasicAAResult::constantOffsetHeuristic(
George Burgess IV5454da32018-10-09 02:14:33 +00001913 const SmallVectorImpl<VariableGEPIndex> &VarIndices,
Hal Finkelccb51b92019-01-02 16:28:09 +00001914 LocationSize MaybeV1Size, LocationSize MaybeV2Size, APInt BaseOffset,
George Burgess IV5454da32018-10-09 02:14:33 +00001915 AssumptionCache *AC, DominatorTree *DT) {
George Burgess IVea46abe2018-10-10 21:28:44 +00001916 if (VarIndices.size() != 2 || MaybeV1Size == LocationSize::unknown() ||
1917 MaybeV2Size == LocationSize::unknown())
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001918 return false;
1919
George Burgess IVdef2c062018-10-09 03:18:56 +00001920 const uint64_t V1Size = MaybeV1Size.getValue();
1921 const uint64_t V2Size = MaybeV2Size.getValue();
George Burgess IV5454da32018-10-09 02:14:33 +00001922
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001923 const VariableGEPIndex &Var0 = VarIndices[0], &Var1 = VarIndices[1];
1924
1925 if (Var0.ZExtBits != Var1.ZExtBits || Var0.SExtBits != Var1.SExtBits ||
1926 Var0.Scale != -Var1.Scale)
1927 return false;
1928
1929 unsigned Width = Var1.V->getType()->getIntegerBitWidth();
1930
1931 // We'll strip off the Extensions of Var0 and Var1 and do another round
1932 // of GetLinearExpression decomposition. In the example above, if Var0
1933 // is zext(%x + 1) we should get V1 == %x and V1Offset == 1.
1934
1935 APInt V0Scale(Width, 0), V0Offset(Width, 0), V1Scale(Width, 0),
1936 V1Offset(Width, 0);
1937 bool NSW = true, NUW = true;
1938 unsigned V0ZExtBits = 0, V0SExtBits = 0, V1ZExtBits = 0, V1SExtBits = 0;
1939 const Value *V0 = GetLinearExpression(Var0.V, V0Scale, V0Offset, V0ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +00001940 V0SExtBits, DL, 0, AC, DT, NSW, NUW);
Richard Trieu1b96cbe2016-02-18 22:09:30 +00001941 NSW = true;
1942 NUW = true;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001943 const Value *V1 = GetLinearExpression(Var1.V, V1Scale, V1Offset, V1ZExtBits,
Daniel Jasper8de3a542016-12-19 08:22:17 +00001944 V1SExtBits, DL, 0, AC, DT, NSW, NUW);
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001945
1946 if (V0Scale != V1Scale || V0ZExtBits != V1ZExtBits ||
1947 V0SExtBits != V1SExtBits || !isValueEqualInPotentialCycles(V0, V1))
1948 return false;
1949
1950 // We have a hit - Var0 and Var1 only differ by a constant offset!
1951
1952 // If we've been sext'ed then zext'd the maximum difference between Var0 and
1953 // Var1 is possible to calculate, but we're just interested in the absolute
Benjamin Kramerd2c7e762015-10-24 11:38:01 +00001954 // minimum difference between the two. The minimum distance may occur due to
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001955 // wrapping; consider "add i3 %i, 5": if %i == 7 then 7 + 5 mod 8 == 4, and so
1956 // the minimum distance between %i and %i + 5 is 3.
Benjamin Kramerd2c7e762015-10-24 11:38:01 +00001957 APInt MinDiff = V0Offset - V1Offset, Wrapped = -MinDiff;
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001958 MinDiff = APIntOps::umin(MinDiff, Wrapped);
Hal Finkelccb51b92019-01-02 16:28:09 +00001959 APInt MinDiffBytes =
1960 MinDiff.zextOrTrunc(Var0.Scale.getBitWidth()) * Var0.Scale.abs();
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001961
1962 // We can't definitely say whether GEP1 is before or after V2 due to wrapping
1963 // arithmetic (i.e. for some values of GEP1 and V2 GEP1 < V2, and for other
1964 // values GEP1 > V2). We'll therefore only declare NoAlias if both V1Size and
1965 // V2Size can fit in the MinDiffBytes gap.
Hal Finkelccb51b92019-01-02 16:28:09 +00001966 return MinDiffBytes.uge(V1Size + BaseOffset.abs()) &&
1967 MinDiffBytes.uge(V2Size + BaseOffset.abs());
Quentin Colombet37d8ade2015-08-31 22:32:47 +00001968}
Chandler Carruth91468332015-09-09 17:55:00 +00001969
1970//===----------------------------------------------------------------------===//
1971// BasicAliasAnalysis Pass
1972//===----------------------------------------------------------------------===//
1973
Chandler Carruth33d56812016-11-23 17:53:26 +00001974AnalysisKey BasicAA::Key;
Chandler Carruthe95015f2016-03-11 10:22:49 +00001975
Sean Silva20b343c2016-08-09 00:28:15 +00001976BasicAAResult BasicAA::run(Function &F, FunctionAnalysisManager &AM) {
Chandler Carruth91468332015-09-09 17:55:00 +00001977 return BasicAAResult(F.getParent()->getDataLayout(),
Manoj Guptac6da6862018-07-09 22:27:23 +00001978 F,
Chandler Carruth8e27cb22016-03-11 11:05:24 +00001979 AM.getResult<TargetLibraryAnalysis>(F),
Daniel Jasper8de3a542016-12-19 08:22:17 +00001980 AM.getResult<AssumptionAnalysis>(F),
Chandler Carruth746124b2016-03-11 13:53:18 +00001981 &AM.getResult<DominatorTreeAnalysis>(F),
John Brawndf2b9392018-07-30 11:52:08 +00001982 AM.getCachedResult<LoopAnalysis>(F),
1983 AM.getCachedResult<PhiValuesAnalysis>(F));
Chandler Carruth91468332015-09-09 17:55:00 +00001984}
1985
Keno Fischer5d34bb02015-10-26 21:22:58 +00001986BasicAAWrapperPass::BasicAAWrapperPass() : FunctionPass(ID) {
1987 initializeBasicAAWrapperPassPass(*PassRegistry::getPassRegistry());
1988}
1989
Chandler Carruth91468332015-09-09 17:55:00 +00001990char BasicAAWrapperPass::ID = 0;
Eugene Zelenkoc5e4ac82017-08-11 21:30:02 +00001991
Chandler Carruth91468332015-09-09 17:55:00 +00001992void BasicAAWrapperPass::anchor() {}
1993
1994INITIALIZE_PASS_BEGIN(BasicAAWrapperPass, "basicaa",
John Brawndf2b9392018-07-30 11:52:08 +00001995 "Basic Alias Analysis (stateless AA impl)", false, true)
Daniel Jasper8de3a542016-12-19 08:22:17 +00001996INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruth746124b2016-03-11 13:53:18 +00001997INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
Chandler Carruth91468332015-09-09 17:55:00 +00001998INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1999INITIALIZE_PASS_END(BasicAAWrapperPass, "basicaa",
John Brawndf2b9392018-07-30 11:52:08 +00002000 "Basic Alias Analysis (stateless AA impl)", false, true)
Chandler Carruth91468332015-09-09 17:55:00 +00002001
2002FunctionPass *llvm::createBasicAAWrapperPass() {
2003 return new BasicAAWrapperPass();
2004}
2005
2006bool BasicAAWrapperPass::runOnFunction(Function &F) {
Daniel Jasper8de3a542016-12-19 08:22:17 +00002007 auto &ACT = getAnalysis<AssumptionCacheTracker>();
Chandler Carruth91468332015-09-09 17:55:00 +00002008 auto &TLIWP = getAnalysis<TargetLibraryInfoWrapperPass>();
Chandler Carruth746124b2016-03-11 13:53:18 +00002009 auto &DTWP = getAnalysis<DominatorTreeWrapperPass>();
Chandler Carruth91468332015-09-09 17:55:00 +00002010 auto *LIWP = getAnalysisIfAvailable<LoopInfoWrapperPass>();
John Brawndf2b9392018-07-30 11:52:08 +00002011 auto *PVWP = getAnalysisIfAvailable<PhiValuesWrapperPass>();
Chandler Carruth91468332015-09-09 17:55:00 +00002012
Manoj Guptac6da6862018-07-09 22:27:23 +00002013 Result.reset(new BasicAAResult(F.getParent()->getDataLayout(), F, TLIWP.getTLI(),
Daniel Jasper8de3a542016-12-19 08:22:17 +00002014 ACT.getAssumptionCache(F), &DTWP.getDomTree(),
John Brawndf2b9392018-07-30 11:52:08 +00002015 LIWP ? &LIWP->getLoopInfo() : nullptr,
2016 PVWP ? &PVWP->getResult() : nullptr));
Chandler Carruth91468332015-09-09 17:55:00 +00002017
2018 return false;
2019}
2020
2021void BasicAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
2022 AU.setPreservesAll();
Daniel Jasper8de3a542016-12-19 08:22:17 +00002023 AU.addRequired<AssumptionCacheTracker>();
Chandler Carruth746124b2016-03-11 13:53:18 +00002024 AU.addRequired<DominatorTreeWrapperPass>();
Chandler Carruth91468332015-09-09 17:55:00 +00002025 AU.addRequired<TargetLibraryInfoWrapperPass>();
John Brawndf2b9392018-07-30 11:52:08 +00002026 AU.addUsedIfAvailable<PhiValuesWrapperPass>();
Chandler Carruth91468332015-09-09 17:55:00 +00002027}
2028
2029BasicAAResult llvm::createLegacyPMBasicAAResult(Pass &P, Function &F) {
2030 return BasicAAResult(
2031 F.getParent()->getDataLayout(),
Manoj Guptac6da6862018-07-09 22:27:23 +00002032 F,
Daniel Jasper8de3a542016-12-19 08:22:17 +00002033 P.getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(),
2034 P.getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F));
Chandler Carruth91468332015-09-09 17:55:00 +00002035}