Nick Lewycky | 8a8d479 | 2011-12-02 22:16:29 +0000 | [diff] [blame] | 1 | //===-- Analysis.cpp - CodeGen LLVM IR Analysis Utilities -----------------===// |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
Eric Christopher | 1916157 | 2014-06-10 20:07:29 +0000 | [diff] [blame] | 10 | // This file defines several CodeGen-specific LLVM IR analysis utilities. |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Eric Christopher | 4d9df32 | 2014-06-10 20:39:35 +0000 | [diff] [blame] | 14 | #include "llvm/CodeGen/Analysis.h" |
Eric Christopher | dd0966e | 2014-06-25 22:36:37 +0000 | [diff] [blame] | 15 | #include "llvm/Analysis/ValueTracking.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 16 | #include "llvm/CodeGen/MachineFunction.h" |
David Blaikie | 4831923 | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 17 | #include "llvm/CodeGen/TargetInstrInfo.h" |
David Blaikie | e3a9b4c | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 18 | #include "llvm/CodeGen/TargetLowering.h" |
| 19 | #include "llvm/CodeGen/TargetSubtargetInfo.h" |
Chandler Carruth | 0b8c9a8 | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 20 | #include "llvm/IR/DataLayout.h" |
| 21 | #include "llvm/IR/DerivedTypes.h" |
| 22 | #include "llvm/IR/Function.h" |
| 23 | #include "llvm/IR/Instructions.h" |
| 24 | #include "llvm/IR/IntrinsicInst.h" |
| 25 | #include "llvm/IR/LLVMContext.h" |
| 26 | #include "llvm/IR/Module.h" |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 27 | #include "llvm/Support/ErrorHandling.h" |
| 28 | #include "llvm/Support/MathExtras.h" |
Rafael Espindola | 8fa6f94 | 2014-07-30 19:42:16 +0000 | [diff] [blame] | 29 | #include "llvm/Transforms/Utils/GlobalStatus.h" |
Eric Christopher | 9f85dcc | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 30 | |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 31 | using namespace llvm; |
| 32 | |
Mehdi Amini | 497b958 | 2015-01-14 05:33:01 +0000 | [diff] [blame] | 33 | /// Compute the linearized index of a member in a nested aggregate/struct/array |
| 34 | /// by recursing and accumulating CurIndex as long as there are indices in the |
| 35 | /// index list. |
Chris Lattner | db125cf | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 36 | unsigned llvm::ComputeLinearIndex(Type *Ty, |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 37 | const unsigned *Indices, |
| 38 | const unsigned *IndicesEnd, |
| 39 | unsigned CurIndex) { |
| 40 | // Base case: We're done. |
| 41 | if (Indices && Indices == IndicesEnd) |
| 42 | return CurIndex; |
| 43 | |
| 44 | // Given a struct type, recursively traverse the elements. |
Chris Lattner | db125cf | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 45 | if (StructType *STy = dyn_cast<StructType>(Ty)) { |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 46 | for (StructType::element_iterator EB = STy->element_begin(), |
| 47 | EI = EB, |
| 48 | EE = STy->element_end(); |
| 49 | EI != EE; ++EI) { |
| 50 | if (Indices && *Indices == unsigned(EI - EB)) |
Dan Gohman | 0dadb15 | 2010-10-06 16:18:29 +0000 | [diff] [blame] | 51 | return ComputeLinearIndex(*EI, Indices+1, IndicesEnd, CurIndex); |
Craig Topper | 4ba8443 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 52 | CurIndex = ComputeLinearIndex(*EI, nullptr, nullptr, CurIndex); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 53 | } |
Mehdi Amini | 125de50 | 2015-01-14 05:38:48 +0000 | [diff] [blame] | 54 | assert(!Indices && "Unexpected out of bound"); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 55 | return CurIndex; |
| 56 | } |
| 57 | // Given an array type, recursively traverse the elements. |
Chris Lattner | db125cf | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 58 | else if (ArrayType *ATy = dyn_cast<ArrayType>(Ty)) { |
| 59 | Type *EltTy = ATy->getElementType(); |
Mehdi Amini | 497b958 | 2015-01-14 05:33:01 +0000 | [diff] [blame] | 60 | unsigned NumElts = ATy->getNumElements(); |
| 61 | // Compute the Linear offset when jumping one element of the array |
| 62 | unsigned EltLinearOffset = ComputeLinearIndex(EltTy, nullptr, nullptr, 0); |
Mehdi Amini | 125de50 | 2015-01-14 05:38:48 +0000 | [diff] [blame] | 63 | if (Indices) { |
| 64 | assert(*Indices < NumElts && "Unexpected out of bound"); |
Mehdi Amini | 497b958 | 2015-01-14 05:33:01 +0000 | [diff] [blame] | 65 | // If the indice is inside the array, compute the index to the requested |
| 66 | // elt and recurse inside the element with the end of the indices list |
| 67 | CurIndex += EltLinearOffset* *Indices; |
| 68 | return ComputeLinearIndex(EltTy, Indices+1, IndicesEnd, CurIndex); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 69 | } |
Mehdi Amini | 497b958 | 2015-01-14 05:33:01 +0000 | [diff] [blame] | 70 | CurIndex += EltLinearOffset*NumElts; |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 71 | return CurIndex; |
| 72 | } |
| 73 | // We haven't found the type we're looking for, so keep searching. |
| 74 | return CurIndex + 1; |
| 75 | } |
| 76 | |
| 77 | /// ComputeValueVTs - Given an LLVM IR type, compute a sequence of |
| 78 | /// EVTs that represent all the individual underlying |
| 79 | /// non-aggregate types that comprise it. |
| 80 | /// |
| 81 | /// If Offsets is non-null, it points to a vector to be filled in |
| 82 | /// with the in-memory offsets of each of the individual values. |
| 83 | /// |
Mehdi Amini | 103bdfc | 2015-07-09 01:57:34 +0000 | [diff] [blame] | 84 | void llvm::ComputeValueVTs(const TargetLowering &TLI, const DataLayout &DL, |
| 85 | Type *Ty, SmallVectorImpl<EVT> &ValueVTs, |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 86 | SmallVectorImpl<uint64_t> *Offsets, |
| 87 | uint64_t StartingOffset) { |
| 88 | // Given a struct type, recursively traverse the elements. |
Chris Lattner | db125cf | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 89 | if (StructType *STy = dyn_cast<StructType>(Ty)) { |
Mehdi Amini | 103bdfc | 2015-07-09 01:57:34 +0000 | [diff] [blame] | 90 | const StructLayout *SL = DL.getStructLayout(STy); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 91 | for (StructType::element_iterator EB = STy->element_begin(), |
| 92 | EI = EB, |
| 93 | EE = STy->element_end(); |
| 94 | EI != EE; ++EI) |
Mehdi Amini | 103bdfc | 2015-07-09 01:57:34 +0000 | [diff] [blame] | 95 | ComputeValueVTs(TLI, DL, *EI, ValueVTs, Offsets, |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 96 | StartingOffset + SL->getElementOffset(EI - EB)); |
| 97 | return; |
| 98 | } |
| 99 | // Given an array type, recursively traverse the elements. |
Chris Lattner | db125cf | 2011-07-18 04:54:35 +0000 | [diff] [blame] | 100 | if (ArrayType *ATy = dyn_cast<ArrayType>(Ty)) { |
| 101 | Type *EltTy = ATy->getElementType(); |
Mehdi Amini | 103bdfc | 2015-07-09 01:57:34 +0000 | [diff] [blame] | 102 | uint64_t EltSize = DL.getTypeAllocSize(EltTy); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 103 | for (unsigned i = 0, e = ATy->getNumElements(); i != e; ++i) |
Mehdi Amini | 103bdfc | 2015-07-09 01:57:34 +0000 | [diff] [blame] | 104 | ComputeValueVTs(TLI, DL, EltTy, ValueVTs, Offsets, |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 105 | StartingOffset + i * EltSize); |
| 106 | return; |
| 107 | } |
| 108 | // Interpret void as zero return values. |
| 109 | if (Ty->isVoidTy()) |
| 110 | return; |
| 111 | // Base case: we can get an EVT for this LLVM IR type. |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 112 | ValueVTs.push_back(TLI.getValueType(DL, Ty)); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 113 | if (Offsets) |
| 114 | Offsets->push_back(StartingOffset); |
| 115 | } |
| 116 | |
| 117 | /// ExtractTypeInfo - Returns the type info, possibly bitcast, encoded in V. |
Reid Kleckner | 98c86d7 | 2014-11-14 00:35:50 +0000 | [diff] [blame] | 118 | GlobalValue *llvm::ExtractTypeInfo(Value *V) { |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 119 | V = V->stripPointerCasts(); |
Reid Kleckner | 98c86d7 | 2014-11-14 00:35:50 +0000 | [diff] [blame] | 120 | GlobalValue *GV = dyn_cast<GlobalValue>(V); |
| 121 | GlobalVariable *Var = dyn_cast<GlobalVariable>(V); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 122 | |
Reid Kleckner | 98c86d7 | 2014-11-14 00:35:50 +0000 | [diff] [blame] | 123 | if (Var && Var->getName() == "llvm.eh.catch.all.value") { |
| 124 | assert(Var->hasInitializer() && |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 125 | "The EH catch-all value must have an initializer"); |
Reid Kleckner | 98c86d7 | 2014-11-14 00:35:50 +0000 | [diff] [blame] | 126 | Value *Init = Var->getInitializer(); |
| 127 | GV = dyn_cast<GlobalValue>(Init); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 128 | if (!GV) V = cast<ConstantPointerNull>(Init); |
| 129 | } |
| 130 | |
| 131 | assert((GV || isa<ConstantPointerNull>(V)) && |
| 132 | "TypeInfo must be a global variable or NULL"); |
| 133 | return GV; |
| 134 | } |
| 135 | |
| 136 | /// hasInlineAsmMemConstraint - Return true if the inline asm instruction being |
| 137 | /// processed uses a memory 'm' constraint. |
| 138 | bool |
John Thompson | 44ab89e | 2010-10-29 17:29:13 +0000 | [diff] [blame] | 139 | llvm::hasInlineAsmMemConstraint(InlineAsm::ConstraintInfoVector &CInfos, |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 140 | const TargetLowering &TLI) { |
| 141 | for (unsigned i = 0, e = CInfos.size(); i != e; ++i) { |
| 142 | InlineAsm::ConstraintInfo &CI = CInfos[i]; |
| 143 | for (unsigned j = 0, ee = CI.Codes.size(); j != ee; ++j) { |
| 144 | TargetLowering::ConstraintType CType = TLI.getConstraintType(CI.Codes[j]); |
| 145 | if (CType == TargetLowering::C_Memory) |
| 146 | return true; |
| 147 | } |
| 148 | |
| 149 | // Indirect operand accesses access memory. |
| 150 | if (CI.isIndirect) |
| 151 | return true; |
| 152 | } |
| 153 | |
| 154 | return false; |
| 155 | } |
| 156 | |
| 157 | /// getFCmpCondCode - Return the ISD condition code corresponding to |
| 158 | /// the given LLVM IR floating-point condition code. This includes |
| 159 | /// consideration of global floating-point math flags. |
| 160 | /// |
| 161 | ISD::CondCode llvm::getFCmpCondCode(FCmpInst::Predicate Pred) { |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 162 | switch (Pred) { |
Nick Lewycky | 8a8d479 | 2011-12-02 22:16:29 +0000 | [diff] [blame] | 163 | case FCmpInst::FCMP_FALSE: return ISD::SETFALSE; |
| 164 | case FCmpInst::FCMP_OEQ: return ISD::SETOEQ; |
| 165 | case FCmpInst::FCMP_OGT: return ISD::SETOGT; |
| 166 | case FCmpInst::FCMP_OGE: return ISD::SETOGE; |
| 167 | case FCmpInst::FCMP_OLT: return ISD::SETOLT; |
| 168 | case FCmpInst::FCMP_OLE: return ISD::SETOLE; |
| 169 | case FCmpInst::FCMP_ONE: return ISD::SETONE; |
| 170 | case FCmpInst::FCMP_ORD: return ISD::SETO; |
| 171 | case FCmpInst::FCMP_UNO: return ISD::SETUO; |
| 172 | case FCmpInst::FCMP_UEQ: return ISD::SETUEQ; |
| 173 | case FCmpInst::FCMP_UGT: return ISD::SETUGT; |
| 174 | case FCmpInst::FCMP_UGE: return ISD::SETUGE; |
| 175 | case FCmpInst::FCMP_ULT: return ISD::SETULT; |
| 176 | case FCmpInst::FCMP_ULE: return ISD::SETULE; |
| 177 | case FCmpInst::FCMP_UNE: return ISD::SETUNE; |
| 178 | case FCmpInst::FCMP_TRUE: return ISD::SETTRUE; |
David Blaikie | 4d6ccb5 | 2012-01-20 21:51:11 +0000 | [diff] [blame] | 179 | default: llvm_unreachable("Invalid FCmp predicate opcode!"); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 180 | } |
Nick Lewycky | 8a8d479 | 2011-12-02 22:16:29 +0000 | [diff] [blame] | 181 | } |
| 182 | |
| 183 | ISD::CondCode llvm::getFCmpCodeWithoutNaN(ISD::CondCode CC) { |
| 184 | switch (CC) { |
| 185 | case ISD::SETOEQ: case ISD::SETUEQ: return ISD::SETEQ; |
| 186 | case ISD::SETONE: case ISD::SETUNE: return ISD::SETNE; |
| 187 | case ISD::SETOLT: case ISD::SETULT: return ISD::SETLT; |
| 188 | case ISD::SETOLE: case ISD::SETULE: return ISD::SETLE; |
| 189 | case ISD::SETOGT: case ISD::SETUGT: return ISD::SETGT; |
| 190 | case ISD::SETOGE: case ISD::SETUGE: return ISD::SETGE; |
David Blaikie | 4d6ccb5 | 2012-01-20 21:51:11 +0000 | [diff] [blame] | 191 | default: return CC; |
Nick Lewycky | 8a8d479 | 2011-12-02 22:16:29 +0000 | [diff] [blame] | 192 | } |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 193 | } |
| 194 | |
| 195 | /// getICmpCondCode - Return the ISD condition code corresponding to |
| 196 | /// the given LLVM IR integer condition code. |
| 197 | /// |
| 198 | ISD::CondCode llvm::getICmpCondCode(ICmpInst::Predicate Pred) { |
| 199 | switch (Pred) { |
| 200 | case ICmpInst::ICMP_EQ: return ISD::SETEQ; |
| 201 | case ICmpInst::ICMP_NE: return ISD::SETNE; |
| 202 | case ICmpInst::ICMP_SLE: return ISD::SETLE; |
| 203 | case ICmpInst::ICMP_ULE: return ISD::SETULE; |
| 204 | case ICmpInst::ICMP_SGE: return ISD::SETGE; |
| 205 | case ICmpInst::ICMP_UGE: return ISD::SETUGE; |
| 206 | case ICmpInst::ICMP_SLT: return ISD::SETLT; |
| 207 | case ICmpInst::ICMP_ULT: return ISD::SETULT; |
| 208 | case ICmpInst::ICMP_SGT: return ISD::SETGT; |
| 209 | case ICmpInst::ICMP_UGT: return ISD::SETUGT; |
| 210 | default: |
| 211 | llvm_unreachable("Invalid ICmp predicate opcode!"); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 212 | } |
| 213 | } |
| 214 | |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 215 | static bool isNoopBitcast(Type *T1, Type *T2, |
Michael Gottesman | 9cb1685 | 2013-07-22 21:05:47 +0000 | [diff] [blame] | 216 | const TargetLoweringBase& TLI) { |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 217 | return T1 == T2 || (T1->isPointerTy() && T2->isPointerTy()) || |
| 218 | (isa<VectorType>(T1) && isa<VectorType>(T2) && |
| 219 | TLI.isTypeLegal(EVT::getEVT(T1)) && TLI.isTypeLegal(EVT::getEVT(T2))); |
Chris Lattner | cd6015c | 2012-06-01 05:01:15 +0000 | [diff] [blame] | 220 | } |
| 221 | |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 222 | /// Look through operations that will be free to find the earliest source of |
| 223 | /// this value. |
| 224 | /// |
| 225 | /// @param ValLoc If V has aggegate type, we will be interested in a particular |
| 226 | /// scalar component. This records its address; the reverse of this list gives a |
| 227 | /// sequence of indices appropriate for an extractvalue to locate the important |
| 228 | /// value. This value is updated during the function and on exit will indicate |
| 229 | /// similar information for the Value returned. |
| 230 | /// |
| 231 | /// @param DataBits If this function looks through truncate instructions, this |
| 232 | /// will record the smallest size attained. |
| 233 | static const Value *getNoopInput(const Value *V, |
| 234 | SmallVectorImpl<unsigned> &ValLoc, |
| 235 | unsigned &DataBits, |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 236 | const TargetLoweringBase &TLI, |
| 237 | const DataLayout &DL) { |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 238 | while (true) { |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 239 | // Try to look through V1; if V1 is not an instruction, it can't be looked |
| 240 | // through. |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 241 | const Instruction *I = dyn_cast<Instruction>(V); |
| 242 | if (!I || I->getNumOperands() == 0) return V; |
Craig Topper | 4ba8443 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 243 | const Value *NoopInput = nullptr; |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 244 | |
| 245 | Value *Op = I->getOperand(0); |
| 246 | if (isa<BitCastInst>(I)) { |
| 247 | // Look through truly no-op bitcasts. |
| 248 | if (isNoopBitcast(Op->getType(), I->getType(), TLI)) |
| 249 | NoopInput = Op; |
| 250 | } else if (isa<GetElementPtrInst>(I)) { |
| 251 | // Look through getelementptr |
| 252 | if (cast<GetElementPtrInst>(I)->hasAllZeroIndices()) |
| 253 | NoopInput = Op; |
| 254 | } else if (isa<IntToPtrInst>(I)) { |
| 255 | // Look through inttoptr. |
| 256 | // Make sure this isn't a truncating or extending cast. We could |
| 257 | // support this eventually, but don't bother for now. |
| 258 | if (!isa<VectorType>(I->getType()) && |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 259 | DL.getPointerSizeInBits() == |
| 260 | cast<IntegerType>(Op->getType())->getBitWidth()) |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 261 | NoopInput = Op; |
| 262 | } else if (isa<PtrToIntInst>(I)) { |
| 263 | // Look through ptrtoint. |
| 264 | // Make sure this isn't a truncating or extending cast. We could |
| 265 | // support this eventually, but don't bother for now. |
| 266 | if (!isa<VectorType>(I->getType()) && |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 267 | DL.getPointerSizeInBits() == |
| 268 | cast<IntegerType>(I->getType())->getBitWidth()) |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 269 | NoopInput = Op; |
| 270 | } else if (isa<TruncInst>(I) && |
| 271 | TLI.allowTruncateForTailCall(Op->getType(), I->getType())) { |
| 272 | DataBits = std::min(DataBits, I->getType()->getPrimitiveSizeInBits()); |
| 273 | NoopInput = Op; |
Ahmed Bougacha | d770fa9 | 2017-01-03 21:42:43 +0000 | [diff] [blame] | 274 | } else if (auto CS = ImmutableCallSite(I)) { |
| 275 | const Value *ReturnedOp = CS.getReturnedArgOperand(); |
Ahmed Bougacha | 26b44dd | 2017-01-03 20:33:22 +0000 | [diff] [blame] | 276 | if (ReturnedOp && isNoopBitcast(ReturnedOp->getType(), I->getType(), TLI)) |
| 277 | NoopInput = ReturnedOp; |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 278 | } else if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(V)) { |
| 279 | // Value may come from either the aggregate or the scalar |
| 280 | ArrayRef<unsigned> InsertLoc = IVI->getIndices(); |
Tim Northover | 84b8c10 | 2015-05-06 20:07:38 +0000 | [diff] [blame] | 281 | if (ValLoc.size() >= InsertLoc.size() && |
| 282 | std::equal(InsertLoc.begin(), InsertLoc.end(), ValLoc.rbegin())) { |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 283 | // The type being inserted is a nested sub-type of the aggregate; we |
| 284 | // have to remove those initial indices to get the location we're |
| 285 | // interested in for the operand. |
| 286 | ValLoc.resize(ValLoc.size() - InsertLoc.size()); |
| 287 | NoopInput = IVI->getInsertedValueOperand(); |
| 288 | } else { |
| 289 | // The struct we're inserting into has the value we're interested in, no |
| 290 | // change of address. |
| 291 | NoopInput = Op; |
| 292 | } |
| 293 | } else if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(V)) { |
| 294 | // The part we're interested in will inevitably be some sub-section of the |
| 295 | // previous aggregate. Combine the two paths to obtain the true address of |
| 296 | // our element. |
| 297 | ArrayRef<unsigned> ExtractLoc = EVI->getIndices(); |
Benjamin Kramer | 31fbd9f | 2015-02-28 10:11:12 +0000 | [diff] [blame] | 298 | ValLoc.append(ExtractLoc.rbegin(), ExtractLoc.rend()); |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 299 | NoopInput = Op; |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 300 | } |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 301 | // Terminate if we couldn't find anything to look through. |
| 302 | if (!NoopInput) |
| 303 | return V; |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 304 | |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 305 | V = NoopInput; |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 306 | } |
Stephen Lin | 5c34e08 | 2013-04-20 04:27:51 +0000 | [diff] [blame] | 307 | } |
Chris Lattner | cd6015c | 2012-06-01 05:01:15 +0000 | [diff] [blame] | 308 | |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 309 | /// Return true if this scalar return value only has bits discarded on its path |
| 310 | /// from the "tail call" to the "ret". This includes the obvious noop |
| 311 | /// instructions handled by getNoopInput above as well as free truncations (or |
| 312 | /// extensions prior to the call). |
| 313 | static bool slotOnlyDiscardsData(const Value *RetVal, const Value *CallVal, |
| 314 | SmallVectorImpl<unsigned> &RetIndices, |
| 315 | SmallVectorImpl<unsigned> &CallIndices, |
Tim Northover | 6a4e44f | 2013-08-12 09:45:46 +0000 | [diff] [blame] | 316 | bool AllowDifferingSizes, |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 317 | const TargetLoweringBase &TLI, |
| 318 | const DataLayout &DL) { |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 319 | |
| 320 | // Trace the sub-value needed by the return value as far back up the graph as |
| 321 | // possible, in the hope that it will intersect with the value produced by the |
| 322 | // call. In the simple case with no "returned" attribute, the hope is actually |
| 323 | // that we end up back at the tail call instruction itself. |
| 324 | unsigned BitsRequired = UINT_MAX; |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 325 | RetVal = getNoopInput(RetVal, RetIndices, BitsRequired, TLI, DL); |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 326 | |
| 327 | // If this slot in the value returned is undef, it doesn't matter what the |
| 328 | // call puts there, it'll be fine. |
| 329 | if (isa<UndefValue>(RetVal)) |
| 330 | return true; |
| 331 | |
| 332 | // Now do a similar search up through the graph to find where the value |
| 333 | // actually returned by the "tail call" comes from. In the simple case without |
| 334 | // a "returned" attribute, the search will be blocked immediately and the loop |
| 335 | // a Noop. |
| 336 | unsigned BitsProvided = UINT_MAX; |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 337 | CallVal = getNoopInput(CallVal, CallIndices, BitsProvided, TLI, DL); |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 338 | |
| 339 | // There's no hope if we can't actually trace them to (the same part of!) the |
| 340 | // same value. |
| 341 | if (CallVal != RetVal || CallIndices != RetIndices) |
| 342 | return false; |
| 343 | |
| 344 | // However, intervening truncates may have made the call non-tail. Make sure |
| 345 | // all the bits that are needed by the "ret" have been provided by the "tail |
| 346 | // call". FIXME: with sufficiently cunning bit-tracking, we could look through |
| 347 | // extensions too. |
Tim Northover | 6a4e44f | 2013-08-12 09:45:46 +0000 | [diff] [blame] | 348 | if (BitsProvided < BitsRequired || |
| 349 | (!AllowDifferingSizes && BitsProvided != BitsRequired)) |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 350 | return false; |
| 351 | |
| 352 | return true; |
| 353 | } |
| 354 | |
| 355 | /// For an aggregate type, determine whether a given index is within bounds or |
| 356 | /// not. |
| 357 | static bool indexReallyValid(CompositeType *T, unsigned Idx) { |
| 358 | if (ArrayType *AT = dyn_cast<ArrayType>(T)) |
| 359 | return Idx < AT->getNumElements(); |
| 360 | |
| 361 | return Idx < cast<StructType>(T)->getNumElements(); |
| 362 | } |
| 363 | |
| 364 | /// Move the given iterators to the next leaf type in depth first traversal. |
| 365 | /// |
| 366 | /// Performs a depth-first traversal of the type as specified by its arguments, |
| 367 | /// stopping at the next leaf node (which may be a legitimate scalar type or an |
| 368 | /// empty struct or array). |
| 369 | /// |
| 370 | /// @param SubTypes List of the partial components making up the type from |
| 371 | /// outermost to innermost non-empty aggregate. The element currently |
| 372 | /// represented is SubTypes.back()->getTypeAtIndex(Path.back() - 1). |
| 373 | /// |
| 374 | /// @param Path Set of extractvalue indices leading from the outermost type |
| 375 | /// (SubTypes[0]) to the leaf node currently represented. |
| 376 | /// |
| 377 | /// @returns true if a new type was found, false otherwise. Calling this |
| 378 | /// function again on a finished iterator will repeatedly return |
| 379 | /// false. SubTypes.back()->getTypeAtIndex(Path.back()) is either an empty |
| 380 | /// aggregate or a non-aggregate |
Benjamin Kramer | b0e8d37 | 2013-08-09 14:44:41 +0000 | [diff] [blame] | 381 | static bool advanceToNextLeafType(SmallVectorImpl<CompositeType *> &SubTypes, |
| 382 | SmallVectorImpl<unsigned> &Path) { |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 383 | // First march back up the tree until we can successfully increment one of the |
| 384 | // coordinates in Path. |
| 385 | while (!Path.empty() && !indexReallyValid(SubTypes.back(), Path.back() + 1)) { |
| 386 | Path.pop_back(); |
| 387 | SubTypes.pop_back(); |
| 388 | } |
| 389 | |
| 390 | // If we reached the top, then the iterator is done. |
| 391 | if (Path.empty()) |
| 392 | return false; |
| 393 | |
| 394 | // We know there's *some* valid leaf now, so march back down the tree picking |
| 395 | // out the left-most element at each node. |
| 396 | ++Path.back(); |
| 397 | Type *DeeperType = SubTypes.back()->getTypeAtIndex(Path.back()); |
| 398 | while (DeeperType->isAggregateType()) { |
| 399 | CompositeType *CT = cast<CompositeType>(DeeperType); |
| 400 | if (!indexReallyValid(CT, 0)) |
| 401 | return true; |
| 402 | |
| 403 | SubTypes.push_back(CT); |
| 404 | Path.push_back(0); |
| 405 | |
| 406 | DeeperType = CT->getTypeAtIndex(0U); |
| 407 | } |
| 408 | |
| 409 | return true; |
| 410 | } |
| 411 | |
| 412 | /// Find the first non-empty, scalar-like type in Next and setup the iterator |
| 413 | /// components. |
| 414 | /// |
| 415 | /// Assuming Next is an aggregate of some kind, this function will traverse the |
| 416 | /// tree from left to right (i.e. depth-first) looking for the first |
| 417 | /// non-aggregate type which will play a role in function return. |
| 418 | /// |
| 419 | /// For example, if Next was {[0 x i64], {{}, i32, {}}, i32} then we would setup |
| 420 | /// Path as [1, 1] and SubTypes as [Next, {{}, i32, {}}] to represent the first |
| 421 | /// i32 in that type. |
| 422 | static bool firstRealType(Type *Next, |
| 423 | SmallVectorImpl<CompositeType *> &SubTypes, |
| 424 | SmallVectorImpl<unsigned> &Path) { |
| 425 | // First initialise the iterator components to the first "leaf" node |
| 426 | // (i.e. node with no valid sub-type at any index, so {} does count as a leaf |
| 427 | // despite nominally being an aggregate). |
| 428 | while (Next->isAggregateType() && |
| 429 | indexReallyValid(cast<CompositeType>(Next), 0)) { |
| 430 | SubTypes.push_back(cast<CompositeType>(Next)); |
| 431 | Path.push_back(0); |
| 432 | Next = cast<CompositeType>(Next)->getTypeAtIndex(0U); |
| 433 | } |
| 434 | |
| 435 | // If there's no Path now, Next was originally scalar already (or empty |
| 436 | // leaf). We're done. |
| 437 | if (Path.empty()) |
| 438 | return true; |
| 439 | |
| 440 | // Otherwise, use normal iteration to keep looking through the tree until we |
| 441 | // find a non-aggregate type. |
| 442 | while (SubTypes.back()->getTypeAtIndex(Path.back())->isAggregateType()) { |
| 443 | if (!advanceToNextLeafType(SubTypes, Path)) |
| 444 | return false; |
| 445 | } |
| 446 | |
| 447 | return true; |
| 448 | } |
| 449 | |
| 450 | /// Set the iterator data-structures to the next non-empty, non-aggregate |
| 451 | /// subtype. |
Benjamin Kramer | b0e8d37 | 2013-08-09 14:44:41 +0000 | [diff] [blame] | 452 | static bool nextRealType(SmallVectorImpl<CompositeType *> &SubTypes, |
| 453 | SmallVectorImpl<unsigned> &Path) { |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 454 | do { |
| 455 | if (!advanceToNextLeafType(SubTypes, Path)) |
| 456 | return false; |
| 457 | |
| 458 | assert(!Path.empty() && "found a leaf but didn't set the path?"); |
| 459 | } while (SubTypes.back()->getTypeAtIndex(Path.back())->isAggregateType()); |
| 460 | |
| 461 | return true; |
| 462 | } |
| 463 | |
| 464 | |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 465 | /// Test if the given instruction is in a position to be optimized |
| 466 | /// with a tail-call. This roughly means that it's in a block with |
| 467 | /// a return and there's nothing that needs to be scheduled |
| 468 | /// between it and the return. |
| 469 | /// |
| 470 | /// This function only tests target-independent requirements. |
Juergen Ributzka | 7e752a3 | 2014-07-16 00:01:22 +0000 | [diff] [blame] | 471 | bool llvm::isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM) { |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 472 | const Instruction *I = CS.getInstruction(); |
| 473 | const BasicBlock *ExitBB = I->getParent(); |
Chandler Carruth | 2aaf722 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 474 | const Instruction *Term = ExitBB->getTerminator(); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 475 | const ReturnInst *Ret = dyn_cast<ReturnInst>(Term); |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 476 | |
| 477 | // The block must end in a return statement or unreachable. |
| 478 | // |
| 479 | // FIXME: Decline tailcall if it's not guaranteed and if the block ends in |
| 480 | // an unreachable, for now. The way tailcall optimization is currently |
| 481 | // implemented means it will add an epilogue followed by a jump. That is |
| 482 | // not profitable. Also, if the callee is a special function (e.g. |
| 483 | // longjmp on x86), it can end up causing miscompilation that has not |
| 484 | // been fully understood. |
| 485 | if (!Ret && |
Juergen Ributzka | 56b7de6 | 2014-07-11 20:50:47 +0000 | [diff] [blame] | 486 | (!TM.Options.GuaranteedTailCallOpt || !isa<UnreachableInst>(Term))) |
Chris Lattner | cd6015c | 2012-06-01 05:01:15 +0000 | [diff] [blame] | 487 | return false; |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 488 | |
| 489 | // If I will have a chain, make sure no other instruction that will have a |
| 490 | // chain interposes between I and the return. |
David Majnemer | c13175f | 2015-08-28 21:13:39 +0000 | [diff] [blame] | 491 | if (I->mayHaveSideEffects() || I->mayReadFromMemory() || |
| 492 | !isSafeToSpeculativelyExecute(I)) |
| 493 | for (BasicBlock::const_iterator BBI = std::prev(ExitBB->end(), 2);; --BBI) { |
| 494 | if (&*BBI == I) |
| 495 | break; |
| 496 | // Debug info intrinsics do not get in the way of tail call optimization. |
| 497 | if (isa<DbgInfoIntrinsic>(BBI)) |
| 498 | continue; |
Robert Lougher | e781359 | 2018-10-24 17:03:19 +0000 | [diff] [blame] | 499 | // A lifetime end intrinsic should not stop tail call optimization. |
| 500 | if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(BBI)) |
| 501 | if (II->getIntrinsicID() == Intrinsic::lifetime_end) |
| 502 | continue; |
David Majnemer | c13175f | 2015-08-28 21:13:39 +0000 | [diff] [blame] | 503 | if (BBI->mayHaveSideEffects() || BBI->mayReadFromMemory() || |
Duncan P. N. Exon Smith | aa464da | 2015-10-09 18:23:49 +0000 | [diff] [blame] | 504 | !isSafeToSpeculativelyExecute(&*BBI)) |
David Majnemer | c13175f | 2015-08-28 21:13:39 +0000 | [diff] [blame] | 505 | return false; |
| 506 | } |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 507 | |
Eric Christopher | 0bbf62f | 2015-02-20 18:44:17 +0000 | [diff] [blame] | 508 | const Function *F = ExitBB->getParent(); |
Eric Christopher | 9f85dcc | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 509 | return returnTypeIsEligibleForTailCall( |
Eric Christopher | 0bbf62f | 2015-02-20 18:44:17 +0000 | [diff] [blame] | 510 | F, I, Ret, *TM.getSubtargetImpl(*F)->getTargetLowering()); |
Michael Gottesman | 9d6852c | 2013-08-20 08:36:50 +0000 | [diff] [blame] | 511 | } |
| 512 | |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 513 | bool llvm::attributesPermitTailCall(const Function *F, const Instruction *I, |
| 514 | const ReturnInst *Ret, |
| 515 | const TargetLoweringBase &TLI, |
| 516 | bool *AllowDifferingSizes) { |
| 517 | // ADS may be null, so don't write to it directly. |
| 518 | bool DummyADS; |
| 519 | bool &ADS = AllowDifferingSizes ? *AllowDifferingSizes : DummyADS; |
| 520 | ADS = true; |
| 521 | |
Reid Kleckner | 6707770 | 2017-03-21 16:57:19 +0000 | [diff] [blame] | 522 | AttrBuilder CallerAttrs(F->getAttributes(), AttributeList::ReturnIndex); |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 523 | AttrBuilder CalleeAttrs(cast<CallInst>(I)->getAttributes(), |
Reid Kleckner | 6707770 | 2017-03-21 16:57:19 +0000 | [diff] [blame] | 524 | AttributeList::ReturnIndex); |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 525 | |
David Green | ae073c9 | 2018-09-26 10:46:18 +0000 | [diff] [blame] | 526 | // NoAlias and NonNull are completely benign as far as calling convention |
| 527 | // goes, they shouldn't affect whether the call is a tail call. |
Bjorn Pettersson | 498c167 | 2016-10-27 14:48:09 +0000 | [diff] [blame] | 528 | CallerAttrs.removeAttribute(Attribute::NoAlias); |
| 529 | CalleeAttrs.removeAttribute(Attribute::NoAlias); |
David Green | ae073c9 | 2018-09-26 10:46:18 +0000 | [diff] [blame] | 530 | CallerAttrs.removeAttribute(Attribute::NonNull); |
| 531 | CalleeAttrs.removeAttribute(Attribute::NonNull); |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 532 | |
| 533 | if (CallerAttrs.contains(Attribute::ZExt)) { |
| 534 | if (!CalleeAttrs.contains(Attribute::ZExt)) |
| 535 | return false; |
| 536 | |
| 537 | ADS = false; |
| 538 | CallerAttrs.removeAttribute(Attribute::ZExt); |
| 539 | CalleeAttrs.removeAttribute(Attribute::ZExt); |
| 540 | } else if (CallerAttrs.contains(Attribute::SExt)) { |
| 541 | if (!CalleeAttrs.contains(Attribute::SExt)) |
| 542 | return false; |
| 543 | |
| 544 | ADS = false; |
| 545 | CallerAttrs.removeAttribute(Attribute::SExt); |
| 546 | CalleeAttrs.removeAttribute(Attribute::SExt); |
| 547 | } |
| 548 | |
Francis Visoiu Mistrih | 849dd13 | 2019-01-09 19:46:15 +0000 | [diff] [blame] | 549 | // Drop sext and zext return attributes if the result is not used. |
| 550 | // This enables tail calls for code like: |
| 551 | // |
| 552 | // define void @caller() { |
| 553 | // entry: |
| 554 | // %unused_result = tail call zeroext i1 @callee() |
| 555 | // br label %retlabel |
| 556 | // retlabel: |
| 557 | // ret void |
| 558 | // } |
| 559 | if (I->use_empty()) { |
| 560 | CalleeAttrs.removeAttribute(Attribute::SExt); |
| 561 | CalleeAttrs.removeAttribute(Attribute::ZExt); |
| 562 | } |
| 563 | |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 564 | // If they're still different, there's some facet we don't understand |
| 565 | // (currently only "inreg", but in future who knows). It may be OK but the |
| 566 | // only safe option is to reject the tail call. |
| 567 | return CallerAttrs == CalleeAttrs; |
| 568 | } |
| 569 | |
Michael Gottesman | 9d6852c | 2013-08-20 08:36:50 +0000 | [diff] [blame] | 570 | bool llvm::returnTypeIsEligibleForTailCall(const Function *F, |
| 571 | const Instruction *I, |
| 572 | const ReturnInst *Ret, |
| 573 | const TargetLoweringBase &TLI) { |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 574 | // If the block ends with a void return or unreachable, it doesn't matter |
| 575 | // what the call's return type is. |
| 576 | if (!Ret || Ret->getNumOperands() == 0) return true; |
| 577 | |
| 578 | // If the return value is undef, it doesn't matter what the call's |
| 579 | // return type is. |
| 580 | if (isa<UndefValue>(Ret->getOperand(0))) return true; |
| 581 | |
Tim Northover | 6a4e44f | 2013-08-12 09:45:46 +0000 | [diff] [blame] | 582 | // Make sure the attributes attached to each return are compatible. |
Michael Kuperstein | 2a93bda | 2016-09-08 00:48:37 +0000 | [diff] [blame] | 583 | bool AllowDifferingSizes; |
| 584 | if (!attributesPermitTailCall(F, I, Ret, TLI, &AllowDifferingSizes)) |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 585 | return false; |
| 586 | |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 587 | const Value *RetVal = Ret->getOperand(0), *CallVal = I; |
Wei Mi | 9930880 | 2017-09-08 16:44:52 +0000 | [diff] [blame] | 588 | // Intrinsic like llvm.memcpy has no return value, but the expanded |
| 589 | // libcall may or may not have return value. On most platforms, it |
| 590 | // will be expanded as memcpy in libc, which returns the first |
| 591 | // argument. On other platforms like arm-none-eabi, memcpy may be |
| 592 | // expanded as library call without return value, like __aeabi_memcpy. |
Wei Mi | 78696b3 | 2017-09-06 16:05:17 +0000 | [diff] [blame] | 593 | const CallInst *Call = cast<CallInst>(I); |
| 594 | if (Function *F = Call->getCalledFunction()) { |
| 595 | Intrinsic::ID IID = F->getIntrinsicID(); |
Wei Mi | 9930880 | 2017-09-08 16:44:52 +0000 | [diff] [blame] | 596 | if (((IID == Intrinsic::memcpy && |
| 597 | TLI.getLibcallName(RTLIB::MEMCPY) == StringRef("memcpy")) || |
| 598 | (IID == Intrinsic::memmove && |
| 599 | TLI.getLibcallName(RTLIB::MEMMOVE) == StringRef("memmove")) || |
| 600 | (IID == Intrinsic::memset && |
| 601 | TLI.getLibcallName(RTLIB::MEMSET) == StringRef("memset"))) && |
Wei Mi | 78696b3 | 2017-09-06 16:05:17 +0000 | [diff] [blame] | 602 | RetVal == Call->getArgOperand(0)) |
| 603 | return true; |
| 604 | } |
| 605 | |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 606 | SmallVector<unsigned, 4> RetPath, CallPath; |
| 607 | SmallVector<CompositeType *, 4> RetSubTypes, CallSubTypes; |
| 608 | |
| 609 | bool RetEmpty = !firstRealType(RetVal->getType(), RetSubTypes, RetPath); |
| 610 | bool CallEmpty = !firstRealType(CallVal->getType(), CallSubTypes, CallPath); |
| 611 | |
| 612 | // Nothing's actually returned, it doesn't matter what the callee put there |
| 613 | // it's a valid tail call. |
| 614 | if (RetEmpty) |
| 615 | return true; |
| 616 | |
| 617 | // Iterate pairwise through each of the value types making up the tail call |
| 618 | // and the corresponding return. For each one we want to know whether it's |
| 619 | // essentially going directly from the tail call to the ret, via operations |
| 620 | // that end up not generating any code. |
| 621 | // |
| 622 | // We allow a certain amount of covariance here. For example it's permitted |
| 623 | // for the tail call to define more bits than the ret actually cares about |
| 624 | // (e.g. via a truncate). |
| 625 | do { |
| 626 | if (CallEmpty) { |
| 627 | // We've exhausted the values produced by the tail call instruction, the |
| 628 | // rest are essentially undef. The type doesn't really matter, but we need |
| 629 | // *something*. |
| 630 | Type *SlotType = RetSubTypes.back()->getTypeAtIndex(RetPath.back()); |
| 631 | CallVal = UndefValue::get(SlotType); |
| 632 | } |
| 633 | |
| 634 | // The manipulations performed when we're looking through an insertvalue or |
| 635 | // an extractvalue would happen at the front of the RetPath list, so since |
| 636 | // we have to copy it anyway it's more efficient to create a reversed copy. |
Benjamin Kramer | 31fbd9f | 2015-02-28 10:11:12 +0000 | [diff] [blame] | 637 | SmallVector<unsigned, 4> TmpRetPath(RetPath.rbegin(), RetPath.rend()); |
| 638 | SmallVector<unsigned, 4> TmpCallPath(CallPath.rbegin(), CallPath.rend()); |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 639 | |
| 640 | // Finally, we can check whether the value produced by the tail call at this |
| 641 | // index is compatible with the value we return. |
Tim Northover | 6a4e44f | 2013-08-12 09:45:46 +0000 | [diff] [blame] | 642 | if (!slotOnlyDiscardsData(RetVal, CallVal, TmpRetPath, TmpCallPath, |
Mehdi Amini | f29cc18 | 2015-07-09 02:09:04 +0000 | [diff] [blame] | 643 | AllowDifferingSizes, TLI, |
| 644 | F->getParent()->getDataLayout())) |
Tim Northover | d113448 | 2013-08-06 09:12:35 +0000 | [diff] [blame] | 645 | return false; |
| 646 | |
| 647 | CallEmpty = !nextRealType(CallSubTypes, CallPath); |
| 648 | } while(nextRealType(RetSubTypes, RetPath)); |
| 649 | |
| 650 | return true; |
Dan Gohman | 5eb6d65 | 2010-04-21 01:22:34 +0000 | [diff] [blame] | 651 | } |
Rafael Espindola | 8fa6f94 | 2014-07-30 19:42:16 +0000 | [diff] [blame] | 652 | |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 653 | static void collectEHScopeMembers( |
| 654 | DenseMap<const MachineBasicBlock *, int> &EHScopeMembership, int EHScope, |
| 655 | const MachineBasicBlock *MBB) { |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 656 | SmallVector<const MachineBasicBlock *, 16> Worklist = {MBB}; |
| 657 | while (!Worklist.empty()) { |
| 658 | const MachineBasicBlock *Visiting = Worklist.pop_back_val(); |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 659 | // Don't follow blocks which start new scopes. |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 660 | if (Visiting->isEHPad() && Visiting != MBB) |
| 661 | continue; |
David Blaikie | 1398e08 | 2015-10-26 18:41:13 +0000 | [diff] [blame] | 662 | |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 663 | // Add this MBB to our scope. |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 664 | auto P = EHScopeMembership.insert(std::make_pair(Visiting, EHScope)); |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 665 | |
| 666 | // Don't revisit blocks. |
| 667 | if (!P.second) { |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 668 | assert(P.first->second == EHScope && "MBB is part of two scopes!"); |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 669 | continue; |
| 670 | } |
| 671 | |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 672 | // Returns are boundaries where scope transfer can occur, don't follow |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 673 | // successors. |
Heejin Ahn | 408053d | 2018-08-21 19:44:11 +0000 | [diff] [blame] | 674 | if (Visiting->isEHScopeReturnBlock()) |
David Majnemer | c7ee533 | 2016-01-22 18:49:50 +0000 | [diff] [blame] | 675 | continue; |
| 676 | |
| 677 | for (const MachineBasicBlock *Succ : Visiting->successors()) |
| 678 | Worklist.push_back(Succ); |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 679 | } |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 680 | } |
| 681 | |
| 682 | DenseMap<const MachineBasicBlock *, int> |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 683 | llvm::getEHScopeMembership(const MachineFunction &MF) { |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 684 | DenseMap<const MachineBasicBlock *, int> EHScopeMembership; |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 685 | |
| 686 | // We don't have anything to do if there aren't any EH pads. |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 687 | if (!MF.hasEHScopes()) |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 688 | return EHScopeMembership; |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 689 | |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 690 | int EntryBBNumber = MF.front().getNumber(); |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 691 | bool IsSEH = isAsynchronousEHPersonality( |
Matthias Braun | d318139 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 692 | classifyEHPersonality(MF.getFunction().getPersonalityFn())); |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 693 | |
| 694 | const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo(); |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 695 | SmallVector<const MachineBasicBlock *, 16> EHScopeBlocks; |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 696 | SmallVector<const MachineBasicBlock *, 16> UnreachableBlocks; |
| 697 | SmallVector<const MachineBasicBlock *, 16> SEHCatchPads; |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 698 | SmallVector<std::pair<const MachineBasicBlock *, int>, 16> CatchRetSuccessors; |
| 699 | for (const MachineBasicBlock &MBB : MF) { |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 700 | if (MBB.isEHScopeEntry()) { |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 701 | EHScopeBlocks.push_back(&MBB); |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 702 | } else if (IsSEH && MBB.isEHPad()) { |
| 703 | SEHCatchPads.push_back(&MBB); |
| 704 | } else if (MBB.pred_empty()) { |
| 705 | UnreachableBlocks.push_back(&MBB); |
| 706 | } |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 707 | |
| 708 | MachineBasicBlock::const_iterator MBBI = MBB.getFirstTerminator(); |
Duncan P. N. Exon Smith | 91c138b | 2016-08-11 15:29:02 +0000 | [diff] [blame] | 709 | |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 710 | // CatchPads are not scopes for SEH so do not consider CatchRet to |
| 711 | // transfer control to another scope. |
Reid Kleckner | a4d7181 | 2016-08-11 16:00:43 +0000 | [diff] [blame] | 712 | if (MBBI == MBB.end() || MBBI->getOpcode() != TII->getCatchReturnOpcode()) |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 713 | continue; |
| 714 | |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 715 | // FIXME: SEH CatchPads are not necessarily in the parent function: |
| 716 | // they could be inside a finally block. |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 717 | const MachineBasicBlock *Successor = MBBI->getOperand(0).getMBB(); |
| 718 | const MachineBasicBlock *SuccessorColor = MBBI->getOperand(1).getMBB(); |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 719 | CatchRetSuccessors.push_back( |
| 720 | {Successor, IsSEH ? EntryBBNumber : SuccessorColor->getNumber()}); |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 721 | } |
| 722 | |
| 723 | // We don't have anything to do if there aren't any EH pads. |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 724 | if (EHScopeBlocks.empty()) |
| 725 | return EHScopeMembership; |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 726 | |
| 727 | // Identify all the basic blocks reachable from the function entry. |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 728 | collectEHScopeMembers(EHScopeMembership, EntryBBNumber, &MF.front()); |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 729 | // All blocks not part of a scope are in the parent function. |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 730 | for (const MachineBasicBlock *MBB : UnreachableBlocks) |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 731 | collectEHScopeMembers(EHScopeMembership, EntryBBNumber, MBB); |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 732 | // Next, identify all the blocks inside the scopes. |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 733 | for (const MachineBasicBlock *MBB : EHScopeBlocks) |
| 734 | collectEHScopeMembers(EHScopeMembership, MBB->getNumber(), MBB); |
Heejin Ahn | a3ccaa9 | 2018-05-23 00:32:46 +0000 | [diff] [blame] | 735 | // SEH CatchPads aren't really scopes, handle them separately. |
David Majnemer | 6212b4d | 2015-10-05 20:09:16 +0000 | [diff] [blame] | 736 | for (const MachineBasicBlock *MBB : SEHCatchPads) |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 737 | collectEHScopeMembers(EHScopeMembership, EntryBBNumber, MBB); |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 738 | // Finally, identify all the targets of a catchret. |
| 739 | for (std::pair<const MachineBasicBlock *, int> CatchRetPair : |
| 740 | CatchRetSuccessors) |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 741 | collectEHScopeMembers(EHScopeMembership, CatchRetPair.second, |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 742 | CatchRetPair.first); |
Heejin Ahn | 2522e34 | 2018-06-01 00:03:21 +0000 | [diff] [blame] | 743 | return EHScopeMembership; |
David Majnemer | 492f1cf | 2015-10-04 02:22:52 +0000 | [diff] [blame] | 744 | } |