aboutsummaryrefslogtreecommitdiff
path: root/contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h')
-rw-r--r--contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h714
1 files changed, 192 insertions, 522 deletions
diff --git a/contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h b/contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h
index ff74c15c38c3..db291e3b1ddd 100644
--- a/contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h
+++ b/contrib/llvm/tools/clang/lib/CodeGen/CodeGenFunction.h
@@ -17,12 +17,14 @@
#include "CGBuilder.h"
#include "CGDebugInfo.h"
#include "CGValue.h"
+#include "EHScopeStack.h"
#include "CodeGenModule.h"
#include "clang/AST/CharUnits.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/ExprObjC.h"
#include "clang/AST/Type.h"
#include "clang/Basic/ABI.h"
+#include "clang/Basic/CapturedStmt.h"
#include "clang/Basic/TargetInfo.h"
#include "clang/Frontend/CodeGenOptions.h"
#include "llvm/ADT/ArrayRef.h"
@@ -89,457 +91,6 @@ enum TypeEvaluationKind {
TEK_Aggregate
};
-/// A branch fixup. These are required when emitting a goto to a
-/// label which hasn't been emitted yet. The goto is optimistically
-/// emitted as a branch to the basic block for the label, and (if it
-/// occurs in a scope with non-trivial cleanups) a fixup is added to
-/// the innermost cleanup. When a (normal) cleanup is popped, any
-/// unresolved fixups in that scope are threaded through the cleanup.
-struct BranchFixup {
- /// The block containing the terminator which needs to be modified
- /// into a switch if this fixup is resolved into the current scope.
- /// If null, LatestBranch points directly to the destination.
- llvm::BasicBlock *OptimisticBranchBlock;
-
- /// The ultimate destination of the branch.
- ///
- /// This can be set to null to indicate that this fixup was
- /// successfully resolved.
- llvm::BasicBlock *Destination;
-
- /// The destination index value.
- unsigned DestinationIndex;
-
- /// The initial branch of the fixup.
- llvm::BranchInst *InitialBranch;
-};
-
-template <class T> struct InvariantValue {
- typedef T type;
- typedef T saved_type;
- static bool needsSaving(type value) { return false; }
- static saved_type save(CodeGenFunction &CGF, type value) { return value; }
- static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
-};
-
-/// A metaprogramming class for ensuring that a value will dominate an
-/// arbitrary position in a function.
-template <class T> struct DominatingValue : InvariantValue<T> {};
-
-template <class T, bool mightBeInstruction =
- llvm::is_base_of<llvm::Value, T>::value &&
- !llvm::is_base_of<llvm::Constant, T>::value &&
- !llvm::is_base_of<llvm::BasicBlock, T>::value>
-struct DominatingPointer;
-template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
-// template <class T> struct DominatingPointer<T,true> at end of file
-
-template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
-
-enum CleanupKind {
- EHCleanup = 0x1,
- NormalCleanup = 0x2,
- NormalAndEHCleanup = EHCleanup | NormalCleanup,
-
- InactiveCleanup = 0x4,
- InactiveEHCleanup = EHCleanup | InactiveCleanup,
- InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
- InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
-};
-
-/// A stack of scopes which respond to exceptions, including cleanups
-/// and catch blocks.
-class EHScopeStack {
-public:
- /// A saved depth on the scope stack. This is necessary because
- /// pushing scopes onto the stack invalidates iterators.
- class stable_iterator {
- friend class EHScopeStack;
-
- /// Offset from StartOfData to EndOfBuffer.
- ptrdiff_t Size;
-
- stable_iterator(ptrdiff_t Size) : Size(Size) {}
-
- public:
- static stable_iterator invalid() { return stable_iterator(-1); }
- stable_iterator() : Size(-1) {}
-
- bool isValid() const { return Size >= 0; }
-
- /// Returns true if this scope encloses I.
- /// Returns false if I is invalid.
- /// This scope must be valid.
- bool encloses(stable_iterator I) const { return Size <= I.Size; }
-
- /// Returns true if this scope strictly encloses I: that is,
- /// if it encloses I and is not I.
- /// Returns false is I is invalid.
- /// This scope must be valid.
- bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
-
- friend bool operator==(stable_iterator A, stable_iterator B) {
- return A.Size == B.Size;
- }
- friend bool operator!=(stable_iterator A, stable_iterator B) {
- return A.Size != B.Size;
- }
- };
-
- /// Information for lazily generating a cleanup. Subclasses must be
- /// POD-like: cleanups will not be destructed, and they will be
- /// allocated on the cleanup stack and freely copied and moved
- /// around.
- ///
- /// Cleanup implementations should generally be declared in an
- /// anonymous namespace.
- class Cleanup {
- // Anchor the construction vtable.
- virtual void anchor();
- public:
- /// Generation flags.
- class Flags {
- enum {
- F_IsForEH = 0x1,
- F_IsNormalCleanupKind = 0x2,
- F_IsEHCleanupKind = 0x4
- };
- unsigned flags;
-
- public:
- Flags() : flags(0) {}
-
- /// isForEH - true if the current emission is for an EH cleanup.
- bool isForEHCleanup() const { return flags & F_IsForEH; }
- bool isForNormalCleanup() const { return !isForEHCleanup(); }
- void setIsForEHCleanup() { flags |= F_IsForEH; }
-
- bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
- void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
-
- /// isEHCleanupKind - true if the cleanup was pushed as an EH
- /// cleanup.
- bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
- void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
- };
-
- // Provide a virtual destructor to suppress a very common warning
- // that unfortunately cannot be suppressed without this. Cleanups
- // should not rely on this destructor ever being called.
- virtual ~Cleanup() {}
-
- /// Emit the cleanup. For normal cleanups, this is run in the
- /// same EH context as when the cleanup was pushed, i.e. the
- /// immediately-enclosing context of the cleanup scope. For
- /// EH cleanups, this is run in a terminate context.
- ///
- // \param flags cleanup kind.
- virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
- };
-
- /// ConditionalCleanupN stores the saved form of its N parameters,
- /// then restores them and performs the cleanup.
- template <class T, class A0>
- class ConditionalCleanup1 : public Cleanup {
- typedef typename DominatingValue<A0>::saved_type A0_saved;
- A0_saved a0_saved;
-
- void Emit(CodeGenFunction &CGF, Flags flags) {
- A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
- T(a0).Emit(CGF, flags);
- }
-
- public:
- ConditionalCleanup1(A0_saved a0)
- : a0_saved(a0) {}
- };
-
- template <class T, class A0, class A1>
- class ConditionalCleanup2 : public Cleanup {
- typedef typename DominatingValue<A0>::saved_type A0_saved;
- typedef typename DominatingValue<A1>::saved_type A1_saved;
- A0_saved a0_saved;
- A1_saved a1_saved;
-
- void Emit(CodeGenFunction &CGF, Flags flags) {
- A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
- A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
- T(a0, a1).Emit(CGF, flags);
- }
-
- public:
- ConditionalCleanup2(A0_saved a0, A1_saved a1)
- : a0_saved(a0), a1_saved(a1) {}
- };
-
- template <class T, class A0, class A1, class A2>
- class ConditionalCleanup3 : public Cleanup {
- typedef typename DominatingValue<A0>::saved_type A0_saved;
- typedef typename DominatingValue<A1>::saved_type A1_saved;
- typedef typename DominatingValue<A2>::saved_type A2_saved;
- A0_saved a0_saved;
- A1_saved a1_saved;
- A2_saved a2_saved;
-
- void Emit(CodeGenFunction &CGF, Flags flags) {
- A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
- A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
- A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
- T(a0, a1, a2).Emit(CGF, flags);
- }
-
- public:
- ConditionalCleanup3(A0_saved a0, A1_saved a1, A2_saved a2)
- : a0_saved(a0), a1_saved(a1), a2_saved(a2) {}
- };
-
- template <class T, class A0, class A1, class A2, class A3>
- class ConditionalCleanup4 : public Cleanup {
- typedef typename DominatingValue<A0>::saved_type A0_saved;
- typedef typename DominatingValue<A1>::saved_type A1_saved;
- typedef typename DominatingValue<A2>::saved_type A2_saved;
- typedef typename DominatingValue<A3>::saved_type A3_saved;
- A0_saved a0_saved;
- A1_saved a1_saved;
- A2_saved a2_saved;
- A3_saved a3_saved;
-
- void Emit(CodeGenFunction &CGF, Flags flags) {
- A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
- A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
- A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
- A3 a3 = DominatingValue<A3>::restore(CGF, a3_saved);
- T(a0, a1, a2, a3).Emit(CGF, flags);
- }
-
- public:
- ConditionalCleanup4(A0_saved a0, A1_saved a1, A2_saved a2, A3_saved a3)
- : a0_saved(a0), a1_saved(a1), a2_saved(a2), a3_saved(a3) {}
- };
-
-private:
- // The implementation for this class is in CGException.h and
- // CGException.cpp; the definition is here because it's used as a
- // member of CodeGenFunction.
-
- /// The start of the scope-stack buffer, i.e. the allocated pointer
- /// for the buffer. All of these pointers are either simultaneously
- /// null or simultaneously valid.
- char *StartOfBuffer;
-
- /// The end of the buffer.
- char *EndOfBuffer;
-
- /// The first valid entry in the buffer.
- char *StartOfData;
-
- /// The innermost normal cleanup on the stack.
- stable_iterator InnermostNormalCleanup;
-
- /// The innermost EH scope on the stack.
- stable_iterator InnermostEHScope;
-
- /// The current set of branch fixups. A branch fixup is a jump to
- /// an as-yet unemitted label, i.e. a label for which we don't yet
- /// know the EH stack depth. Whenever we pop a cleanup, we have
- /// to thread all the current branch fixups through it.
- ///
- /// Fixups are recorded as the Use of the respective branch or
- /// switch statement. The use points to the final destination.
- /// When popping out of a cleanup, these uses are threaded through
- /// the cleanup and adjusted to point to the new cleanup.
- ///
- /// Note that branches are allowed to jump into protected scopes
- /// in certain situations; e.g. the following code is legal:
- /// struct A { ~A(); }; // trivial ctor, non-trivial dtor
- /// goto foo;
- /// A a;
- /// foo:
- /// bar();
- SmallVector<BranchFixup, 8> BranchFixups;
-
- char *allocate(size_t Size);
-
- void *pushCleanup(CleanupKind K, size_t DataSize);
-
-public:
- EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
- InnermostNormalCleanup(stable_end()),
- InnermostEHScope(stable_end()) {}
- ~EHScopeStack() { delete[] StartOfBuffer; }
-
- // Variadic templates would make this not terrible.
-
- /// Push a lazily-created cleanup on the stack.
- template <class T>
- void pushCleanup(CleanupKind Kind) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T();
- (void) Obj;
- }
-
- /// Push a lazily-created cleanup on the stack.
- template <class T, class A0>
- void pushCleanup(CleanupKind Kind, A0 a0) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T(a0);
- (void) Obj;
- }
-
- /// Push a lazily-created cleanup on the stack.
- template <class T, class A0, class A1>
- void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T(a0, a1);
- (void) Obj;
- }
-
- /// Push a lazily-created cleanup on the stack.
- template <class T, class A0, class A1, class A2>
- void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T(a0, a1, a2);
- (void) Obj;
- }
-
- /// Push a lazily-created cleanup on the stack.
- template <class T, class A0, class A1, class A2, class A3>
- void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
- (void) Obj;
- }
-
- /// Push a lazily-created cleanup on the stack.
- template <class T, class A0, class A1, class A2, class A3, class A4>
- void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
- void *Buffer = pushCleanup(Kind, sizeof(T));
- Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
- (void) Obj;
- }
-
- // Feel free to add more variants of the following:
-
- /// Push a cleanup with non-constant storage requirements on the
- /// stack. The cleanup type must provide an additional static method:
- /// static size_t getExtraSize(size_t);
- /// The argument to this method will be the value N, which will also
- /// be passed as the first argument to the constructor.
- ///
- /// The data stored in the extra storage must obey the same
- /// restrictions as normal cleanup member data.
- ///
- /// The pointer returned from this method is valid until the cleanup
- /// stack is modified.
- template <class T, class A0, class A1, class A2>
- T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) {
- void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
- return new (Buffer) T(N, a0, a1, a2);
- }
-
- /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.
- void popCleanup();
-
- /// Push a set of catch handlers on the stack. The catch is
- /// uninitialized and will need to have the given number of handlers
- /// set on it.
- class EHCatchScope *pushCatch(unsigned NumHandlers);
-
- /// Pops a catch scope off the stack. This is private to CGException.cpp.
- void popCatch();
-
- /// Push an exceptions filter on the stack.
- class EHFilterScope *pushFilter(unsigned NumFilters);
-
- /// Pops an exceptions filter off the stack.
- void popFilter();
-
- /// Push a terminate handler on the stack.
- void pushTerminate();
-
- /// Pops a terminate handler off the stack.
- void popTerminate();
-
- /// Determines whether the exception-scopes stack is empty.
- bool empty() const { return StartOfData == EndOfBuffer; }
-
- bool requiresLandingPad() const {
- return InnermostEHScope != stable_end();
- }
-
- /// Determines whether there are any normal cleanups on the stack.
- bool hasNormalCleanups() const {
- return InnermostNormalCleanup != stable_end();
- }
-
- /// Returns the innermost normal cleanup on the stack, or
- /// stable_end() if there are no normal cleanups.
- stable_iterator getInnermostNormalCleanup() const {
- return InnermostNormalCleanup;
- }
- stable_iterator getInnermostActiveNormalCleanup() const;
-
- stable_iterator getInnermostEHScope() const {
- return InnermostEHScope;
- }
-
- stable_iterator getInnermostActiveEHScope() const;
-
- /// An unstable reference to a scope-stack depth. Invalidated by
- /// pushes but not pops.
- class iterator;
-
- /// Returns an iterator pointing to the innermost EH scope.
- iterator begin() const;
-
- /// Returns an iterator pointing to the outermost EH scope.
- iterator end() const;
-
- /// Create a stable reference to the top of the EH stack. The
- /// returned reference is valid until that scope is popped off the
- /// stack.
- stable_iterator stable_begin() const {
- return stable_iterator(EndOfBuffer - StartOfData);
- }
-
- /// Create a stable reference to the bottom of the EH stack.
- static stable_iterator stable_end() {
- return stable_iterator(0);
- }
-
- /// Translates an iterator into a stable_iterator.
- stable_iterator stabilize(iterator it) const;
-
- /// Turn a stable reference to a scope depth into a unstable pointer
- /// to the EH stack.
- iterator find(stable_iterator save) const;
-
- /// Removes the cleanup pointed to by the given stable_iterator.
- void removeCleanup(stable_iterator save);
-
- /// Add a branch fixup to the current cleanup scope.
- BranchFixup &addBranchFixup() {
- assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
- BranchFixups.push_back(BranchFixup());
- return BranchFixups.back();
- }
-
- unsigned getNumBranchFixups() const { return BranchFixups.size(); }
- BranchFixup &getBranchFixup(unsigned I) {
- assert(I < getNumBranchFixups());
- return BranchFixups[I];
- }
-
- /// Pops lazily-removed fixups from the end of the list. This
- /// should only be called by procedures which have just popped a
- /// cleanup or resolved one or more fixups.
- void popNullFixups();
-
- /// Clears the branch-fixups list. This should only be called by
- /// ResolveAllBranchFixups.
- void clearFixups() { BranchFixups.clear(); }
-};
-
/// CodeGenFunction - This class organizes the per-function state that is used
/// while generating LLVM code.
class CodeGenFunction : public CodeGenTypeCache {
@@ -606,6 +157,65 @@ public:
/// we prefer to insert allocas.
llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
+ /// \brief API for captured statement code generation.
+ class CGCapturedStmtInfo {
+ public:
+ explicit CGCapturedStmtInfo(const CapturedStmt &S,
+ CapturedRegionKind K = CR_Default)
+ : Kind(K), ThisValue(0), CXXThisFieldDecl(0) {
+
+ RecordDecl::field_iterator Field =
+ S.getCapturedRecordDecl()->field_begin();
+ for (CapturedStmt::const_capture_iterator I = S.capture_begin(),
+ E = S.capture_end();
+ I != E; ++I, ++Field) {
+ if (I->capturesThis())
+ CXXThisFieldDecl = *Field;
+ else
+ CaptureFields[I->getCapturedVar()] = *Field;
+ }
+ }
+
+ virtual ~CGCapturedStmtInfo();
+
+ CapturedRegionKind getKind() const { return Kind; }
+
+ void setContextValue(llvm::Value *V) { ThisValue = V; }
+ // \brief Retrieve the value of the context parameter.
+ llvm::Value *getContextValue() const { return ThisValue; }
+
+ /// \brief Lookup the captured field decl for a variable.
+ const FieldDecl *lookup(const VarDecl *VD) const {
+ return CaptureFields.lookup(VD);
+ }
+
+ bool isCXXThisExprCaptured() const { return CXXThisFieldDecl != 0; }
+ FieldDecl *getThisFieldDecl() const { return CXXThisFieldDecl; }
+
+ /// \brief Emit the captured statement body.
+ virtual void EmitBody(CodeGenFunction &CGF, Stmt *S) {
+ CGF.EmitStmt(S);
+ }
+
+ /// \brief Get the name of the capture helper.
+ virtual StringRef getHelperName() const { return "__captured_stmt"; }
+
+ private:
+ /// \brief The kind of captured statement being generated.
+ CapturedRegionKind Kind;
+
+ /// \brief Keep the map between VarDecl and FieldDecl.
+ llvm::SmallDenseMap<const VarDecl *, FieldDecl *> CaptureFields;
+
+ /// \brief The base address of the captured record, passed in as the first
+ /// argument of the parallel region function.
+ llvm::Value *ThisValue;
+
+ /// \brief Captured 'this' type.
+ FieldDecl *CXXThisFieldDecl;
+ };
+ CGCapturedStmtInfo *CapturedStmtInfo;
+
/// BoundsChecking - Emit run-time bounds checks. Higher values mean
/// potentially higher performance penalties.
unsigned char BoundsChecking;
@@ -631,6 +241,18 @@ public:
llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
EHScopeStack EHStack;
+ llvm::SmallVector<char, 256> LifetimeExtendedCleanupStack;
+
+ /// Header for data within LifetimeExtendedCleanupStack.
+ struct LifetimeExtendedCleanupHeader {
+ /// The size of the following cleanup object.
+ size_t Size : 29;
+ /// The kind of cleanup to push: a value from the CleanupKind enumeration.
+ unsigned Kind : 3;
+
+ size_t getSize() const { return Size; }
+ CleanupKind getKind() const { return static_cast<CleanupKind>(Kind); }
+ };
/// i32s containing the indexes of the cleanup destinations.
llvm::AllocaInst *NormalCleanupDest;
@@ -766,6 +388,23 @@ public:
initFullExprCleanup();
}
+ /// \brief Queue a cleanup to be pushed after finishing the current
+ /// full-expression.
+ template <class T, class A0, class A1, class A2, class A3>
+ void pushCleanupAfterFullExpr(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
+ assert(!isInConditionalBranch() && "can't defer conditional cleanup");
+
+ LifetimeExtendedCleanupHeader Header = { sizeof(T), Kind };
+
+ size_t OldSize = LifetimeExtendedCleanupStack.size();
+ LifetimeExtendedCleanupStack.resize(
+ LifetimeExtendedCleanupStack.size() + sizeof(Header) + Header.Size);
+
+ char *Buffer = &LifetimeExtendedCleanupStack[OldSize];
+ new (Buffer) LifetimeExtendedCleanupHeader(Header);
+ new (Buffer + sizeof(Header)) T(a0, a1, a2, a3);
+ }
+
/// Set up the last cleaup that was pushed as a conditional
/// full-expression cleanup.
void initFullExprCleanup();
@@ -784,9 +423,7 @@ public:
/// PopCleanupBlock - Will pop the cleanup entry on the stack and
/// process all branch fixups.
- /// \param EHLoc - Optional debug location for EH code.
- void PopCleanupBlock(bool FallThroughIsBranchThrough = false,
- SourceLocation EHLoc=SourceLocation());
+ void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
/// DeactivateCleanupBlock - Deactivates the given cleanup block.
/// The block cannot be reactivated. Pops it if it's the top of the
@@ -813,6 +450,7 @@ public:
/// will be executed once the scope is exited.
class RunCleanupsScope {
EHScopeStack::stable_iterator CleanupStackDepth;
+ size_t LifetimeExtendedCleanupStackSize;
bool OldDidCallStackSave;
protected:
bool PerformCleanup;
@@ -830,6 +468,8 @@ public:
: PerformCleanup(true), CGF(CGF)
{
CleanupStackDepth = CGF.EHStack.stable_begin();
+ LifetimeExtendedCleanupStackSize =
+ CGF.LifetimeExtendedCleanupStack.size();
OldDidCallStackSave = CGF.DidCallStackSave;
CGF.DidCallStackSave = false;
}
@@ -839,7 +479,8 @@ public:
~RunCleanupsScope() {
if (PerformCleanup) {
CGF.DidCallStackSave = OldDidCallStackSave;
- CGF.PopCleanupBlocks(CleanupStackDepth);
+ CGF.PopCleanupBlocks(CleanupStackDepth,
+ LifetimeExtendedCleanupStackSize);
}
}
@@ -853,7 +494,8 @@ public:
void ForceCleanup() {
assert(PerformCleanup && "Already forced cleanup");
CGF.DidCallStackSave = OldDidCallStackSave;
- CGF.PopCleanupBlocks(CleanupStackDepth);
+ CGF.PopCleanupBlocks(CleanupStackDepth,
+ LifetimeExtendedCleanupStackSize);
PerformCleanup = false;
}
};
@@ -905,11 +547,15 @@ public:
};
- /// PopCleanupBlocks - Takes the old cleanup stack size and emits
- /// the cleanup blocks that have been added.
- /// \param EHLoc - Optional debug location for EH code.
+ /// \brief Takes the old cleanup stack size and emits the cleanup blocks
+ /// that have been added.
+ void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
+
+ /// \brief Takes the old cleanup stack size and emits the cleanup blocks
+ /// that have been added, then adds all lifetime-extended cleanups from
+ /// the given position to the stack.
void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize,
- SourceLocation EHLoc=SourceLocation());
+ size_t OldLifetimeExtendedStackSize);
void ResolveBranchFixups(llvm::BasicBlock *Target);
@@ -1152,10 +798,6 @@ private:
CGDebugInfo *DebugInfo;
bool DisableDebugInfo;
- /// If the current function returns 'this', use the field to keep track of
- /// the callee that returns 'this'.
- llvm::Value *CalleeWithThisReturn;
-
/// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid
/// calling llvm.stacksave for multiple VLAs in the same scope.
bool DidCallStackSave;
@@ -1279,6 +921,10 @@ private:
/// The current lexical scope.
LexicalScope *CurLexicalScope;
+ /// The current source location that should be used for exception
+ /// handling code.
+ SourceLocation CurEHLocation;
+
/// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM
/// type as well as the field number that contains the actual data.
llvm::DenseMap<const ValueDecl *, std::pair<llvm::Type *,
@@ -1307,14 +953,6 @@ public:
CodeGenTypes &getTypes() const { return CGM.getTypes(); }
ASTContext &getContext() const { return CGM.getContext(); }
- /// Returns true if DebugInfo is actually initialized.
- bool maybeInitializeDebugInfo() {
- if (CGM.getModuleDebugInfo()) {
- DebugInfo = CGM.getModuleDebugInfo();
- return true;
- }
- return false;
- }
CGDebugInfo *getDebugInfo() {
if (DisableDebugInfo)
return NULL;
@@ -1378,12 +1016,15 @@ public:
llvm::Value *addr, QualType type);
void pushDestroy(CleanupKind kind, llvm::Value *addr, QualType type,
Destroyer *destroyer, bool useEHCleanupForArray);
+ void pushLifetimeExtendedDestroy(CleanupKind kind, llvm::Value *addr,
+ QualType type, Destroyer *destroyer,
+ bool useEHCleanupForArray);
void emitDestroy(llvm::Value *addr, QualType type, Destroyer *destroyer,
bool useEHCleanupForArray);
- llvm::Function *generateDestroyHelper(llvm::Constant *addr,
- QualType type,
+ llvm::Function *generateDestroyHelper(llvm::Constant *addr, QualType type,
Destroyer *destroyer,
- bool useEHCleanupForArray);
+ bool useEHCleanupForArray,
+ const VarDecl *VD);
void emitArrayDestroy(llvm::Value *begin, llvm::Value *end,
QualType type, Destroyer *destroyer,
bool checkZeroLength, bool useEHCleanup);
@@ -1495,9 +1136,9 @@ public:
void EmitConstructorBody(FunctionArgList &Args);
void EmitDestructorBody(FunctionArgList &Args);
void emitImplicitAssignmentOperatorBody(FunctionArgList &Args);
- void EmitFunctionBody(FunctionArgList &Args);
+ void EmitFunctionBody(FunctionArgList &Args, const Stmt *Body);
- void EmitForwardingCallToLambda(const CXXRecordDecl *Lambda,
+ void EmitForwardingCallToLambda(const CXXMethodDecl *LambdaCallOperator,
CallArgList &CallArgs);
void EmitLambdaToBlockPointerBody(FunctionArgList &Args);
void EmitLambdaBlockInvokeBody();
@@ -1512,6 +1153,11 @@ public:
/// legal to call this function even if there is no current insertion point.
void FinishFunction(SourceLocation EndLoc=SourceLocation());
+ void StartThunk(llvm::Function *Fn, GlobalDecl GD, const CGFunctionInfo &FnInfo);
+
+ void EmitCallAndReturnForThunk(GlobalDecl GD, llvm::Value *Callee,
+ const ThunkInfo *Thunk);
+
/// GenerateThunk - Generate a thunk for the given method.
void GenerateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
GlobalDecl GD, const ThunkInfo &Thunk);
@@ -1531,7 +1177,6 @@ public:
void InitializeVTablePointer(BaseSubobject Base,
const CXXRecordDecl *NearestVBase,
CharUnits OffsetFromNearestVBase,
- llvm::Constant *VTable,
const CXXRecordDecl *VTableClass);
typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy;
@@ -1539,7 +1184,6 @@ public:
const CXXRecordDecl *NearestVBase,
CharUnits OffsetFromNearestVBase,
bool BaseIsNonVirtualPrimaryBase,
- llvm::Constant *VTable,
const CXXRecordDecl *VTableClass,
VisitedVirtualBasesSetTy& VBases);
@@ -1549,6 +1193,12 @@ public:
/// to by This.
llvm::Value *GetVTablePtr(llvm::Value *This, llvm::Type *Ty);
+
+ /// CanDevirtualizeMemberFunctionCalls - Checks whether virtual calls on given
+ /// expr can be devirtualized.
+ bool CanDevirtualizeMemberFunctionCall(const Expr *Base,
+ const CXXMethodDecl *MD);
+
/// EnterDtorCleanups - Enter the cleanups necessary to complete the
/// given phase of destruction for a destructor. The end result
/// should call destructors on members and base classes in reverse
@@ -1576,7 +1226,8 @@ public:
/// EmitFunctionEpilog - Emit the target specific LLVM code to return the
/// given temporary.
- void EmitFunctionEpilog(const CGFunctionInfo &FI, bool EmitRetDbgLoc);
+ void EmitFunctionEpilog(const CGFunctionInfo &FI, bool EmitRetDbgLoc,
+ SourceLocation EndLoc);
/// EmitStartEHSpec - Emit the start of the exception spec.
void EmitStartEHSpec(const Decl *D);
@@ -1678,8 +1329,7 @@ public:
/// ErrorUnsupported - Print out an error that codegen doesn't support the
/// specified stmt yet.
- void ErrorUnsupported(const Stmt *S, const char *Type,
- bool OmitOnError=false);
+ void ErrorUnsupported(const Stmt *S, const char *Type);
//===--------------------------------------------------------------------===//
// Helpers
@@ -1915,10 +1565,6 @@ public:
CastExpr::path_const_iterator PathEnd,
bool NullCheckValue);
- llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This,
- const CXXRecordDecl *ClassDecl,
- const CXXRecordDecl *BaseClassDecl);
-
/// GetVTTParameter - Return the VTT parameter that should be passed to a
/// base constructor/destructor with virtual bases.
/// FIXME: VTTs are Itanium ABI-specific, so the definition should move
@@ -1928,7 +1574,8 @@ public:
void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
CXXCtorType CtorType,
- const FunctionArgList &Args);
+ const FunctionArgList &Args,
+ SourceLocation Loc);
// It's important not to confuse this and the previous function. Delegating
// constructors are the C++0x feature. The constructor delegate optimization
// is used to reduce duplication in the base and complete consturctors where
@@ -1982,10 +1629,6 @@ public:
llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE);
llvm::Value* EmitCXXUuidofExpr(const CXXUuidofExpr *E);
- void MaybeEmitStdInitializerListCleanup(llvm::Value *loc, const Expr *init);
- void EmitStdInitializerListCleanup(llvm::Value *loc,
- const InitListExpr *init);
-
/// \brief Situations in which we might emit a check for the suitability of a
/// pointer or glvalue.
enum TypeCheckKind {
@@ -2161,11 +1804,12 @@ public:
/// \return True if the statement was handled.
bool EmitSimpleStmt(const Stmt *S);
- RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
- AggValueSlot AVS = AggValueSlot::ignored());
- RValue EmitCompoundStmtWithoutScope(const CompoundStmt &S,
- bool GetLast = false, AggValueSlot AVS =
- AggValueSlot::ignored());
+ llvm::Value *EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
+ AggValueSlot AVS = AggValueSlot::ignored());
+ llvm::Value *EmitCompoundStmtWithoutScope(const CompoundStmt &S,
+ bool GetLast = false,
+ AggValueSlot AVS =
+ AggValueSlot::ignored());
/// EmitLabel - Emit the block for the given label. It is legal to call this
/// function even if there is no current insertion point.
@@ -2188,7 +1832,6 @@ public:
void EmitCaseStmt(const CaseStmt &S);
void EmitCaseStmtRange(const CaseStmt &S);
void EmitAsmStmt(const AsmStmt &S);
- void EmitCapturedStmt(const CapturedStmt &S);
void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S);
void EmitObjCAtTryStmt(const ObjCAtTryStmt &S);
@@ -2202,8 +1845,14 @@ public:
void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
void EmitCXXTryStmt(const CXXTryStmt &S);
+ void EmitSEHTryStmt(const SEHTryStmt &S);
void EmitCXXForRangeStmt(const CXXForRangeStmt &S);
+ llvm::Function *EmitCapturedStmt(const CapturedStmt &S, CapturedRegionKind K);
+ llvm::Function *GenerateCapturedStmtFunction(const CapturedDecl *CD,
+ const RecordDecl *RD,
+ SourceLocation Loc);
+
//===--------------------------------------------------------------------===//
// LValue Expression Emission
//===--------------------------------------------------------------------===//
@@ -2245,11 +1894,12 @@ public:
/// that the address will be used to access the object.
LValue EmitCheckedLValue(const Expr *E, TypeCheckKind TCK);
- RValue convertTempToRValue(llvm::Value *addr, QualType type);
+ RValue convertTempToRValue(llvm::Value *addr, QualType type,
+ SourceLocation Loc);
void EmitAtomicInit(Expr *E, LValue lvalue);
- RValue EmitAtomicLoad(LValue lvalue,
+ RValue EmitAtomicLoad(LValue lvalue, SourceLocation loc,
AggValueSlot slot = AggValueSlot::ignored());
void EmitAtomicStore(RValue rvalue, LValue lvalue, bool isInit);
@@ -2267,6 +1917,7 @@ public:
/// the LLVM value representation.
llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile,
unsigned Alignment, QualType Ty,
+ SourceLocation Loc,
llvm::MDNode *TBAAInfo = 0,
QualType TBAABaseTy = QualType(),
uint64_t TBAAOffset = 0);
@@ -2275,7 +1926,7 @@ public:
/// care to appropriately convert from the memory representation to
/// the LLVM value representation. The l-value must be a simple
/// l-value.
- llvm::Value *EmitLoadOfScalar(LValue lvalue);
+ llvm::Value *EmitLoadOfScalar(LValue lvalue, SourceLocation Loc);
/// EmitStoreOfScalar - Store a scalar value to an address, taking
/// care to appropriately convert from the memory representation to
@@ -2296,7 +1947,7 @@ public:
/// EmitLoadOfLValue - Given an expression that represents a value lvalue,
/// this method emits the address of the lvalue, then loads the result as an
/// rvalue, returning the rvalue.
- RValue EmitLoadOfLValue(LValue V);
+ RValue EmitLoadOfLValue(LValue V, SourceLocation Loc);
RValue EmitLoadOfExtVectorElementLValue(LValue V);
RValue EmitLoadOfBitfieldLValue(LValue LV);
@@ -2306,8 +1957,8 @@ public:
void EmitStoreThroughLValue(RValue Src, LValue Dst, bool isInit=false);
void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst);
- /// EmitStoreThroughLValue - Store Src into Dst with same constraints as
- /// EmitStoreThroughLValue.
+ /// EmitStoreThroughBitfieldLValue - Store Src into Dst with same constraints
+ /// as EmitStoreThroughLValue.
///
/// \param Result [out] - If non-null, this will be set to a Value* for the
/// bit-field contents after the store, appropriate for use as the result of
@@ -2318,6 +1969,8 @@ public:
/// Emit an l-value for an assignment (simple or compound) of complex type.
LValue EmitComplexAssignmentLValue(const BinaryOperator *E);
LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E);
+ LValue EmitScalarCompooundAssignWithComplex(const CompoundAssignOperator *E,
+ llvm::Value *&Result);
// Note: only available for agg return types
LValue EmitBinaryOperatorLValue(const BinaryOperator *E);
@@ -2340,11 +1993,10 @@ public:
LValue EmitInitListLValue(const InitListExpr *E);
LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E);
LValue EmitCastLValue(const CastExpr *E);
- LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E);
LValue EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *E);
LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e);
- RValue EmitRValueForField(LValue LV, const FieldDecl *FD);
+ RValue EmitRValueForField(LValue LV, const FieldDecl *FD, SourceLocation Loc);
class ConstantEmission {
llvm::PointerIntPair<llvm::Constant*, 1, bool> ValueAndIsReference;
@@ -2359,7 +2011,7 @@ public:
return ConstantEmission(C, false);
}
- operator bool() const { return ValueAndIsReference.getOpaqueValue() != 0; }
+ LLVM_EXPLICIT operator bool() const { return ValueAndIsReference.getOpaqueValue() != 0; }
bool isReference() const { return ValueAndIsReference.getInt(); }
LValue getReferenceLValue(CodeGenFunction &CGF, Expr *refExpr) const {
@@ -2426,6 +2078,7 @@ public:
llvm::Instruction **callOrInvoke = 0);
RValue EmitCall(QualType FnType, llvm::Value *Callee,
+ SourceLocation CallLoc,
ReturnValueSlot ReturnValue,
CallExpr::const_arg_iterator ArgBeg,
CallExpr::const_arg_iterator ArgEnd,
@@ -2457,10 +2110,6 @@ public:
void EmitNoreturnRuntimeCallOrInvoke(llvm::Value *callee,
ArrayRef<llvm::Value*> args);
- llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This,
- llvm::Type *Ty);
- llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type,
- llvm::Value *This, llvm::Type *Ty);
llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD,
NestedNameSpecifier *Qual,
llvm::Type *Ty);
@@ -2503,6 +2152,11 @@ public:
/// is unhandled by the current target.
llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
+ llvm::Value *EmitAArch64CompareBuiltinExpr(llvm::Value *Op, llvm::Type *Ty,
+ const llvm::CmpInst::Predicate Fp,
+ const llvm::CmpInst::Predicate Ip,
+ const llvm::Twine &Name = "");
+ llvm::Value *EmitAArch64CompareBuiltinExpr(llvm::Value *Op, llvm::Type *Ty);
llvm::Value *EmitAArch64BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
llvm::Value *EmitNeonCall(llvm::Function *F,
@@ -2512,6 +2166,8 @@ public:
llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx);
llvm::Value *EmitNeonShiftVector(llvm::Value *V, llvm::Type *Ty,
bool negateForRightShift);
+ llvm::Value *EmitNeonRShiftImm(llvm::Value *Vec, llvm::Value *Amt,
+ llvm::Type *Ty, bool usgn, const char *name);
llvm::Value *BuildVector(ArrayRef<llvm::Value*> Ops);
llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
@@ -2587,10 +2243,8 @@ public:
void EmitObjCAutoreleasePoolCleanup(llvm::Value *Ptr);
void EmitObjCMRRAutoreleasePoolPop(llvm::Value *Ptr);
- /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in
- /// expression. Will emit a temporary variable if E is not an LValue.
- RValue EmitReferenceBindingToExpr(const Expr* E,
- const NamedDecl *InitializedDecl);
+ /// \brief Emits a reference binding to the passed in expression.
+ RValue EmitReferenceBindingToExpr(const Expr *E);
//===--------------------------------------------------------------------===//
// Expression Emission
@@ -2646,7 +2300,7 @@ public:
void EmitStoreOfComplex(ComplexPairTy V, LValue dest, bool isInit);
/// EmitLoadOfComplex - Load a complex number from the specified l-value.
- ComplexPairTy EmitLoadOfComplex(LValue src);
+ ComplexPairTy EmitLoadOfComplex(LValue src, SourceLocation loc);
/// CreateStaticVarDecl - Create a zero-initialized LLVM global for
/// a static local variable.
@@ -2670,7 +2324,8 @@ public:
/// Call atexit() with a function that passes the given argument to
/// the given function.
- void registerGlobalDtorWithAtExit(llvm::Constant *fn, llvm::Constant *addr);
+ void registerGlobalDtorWithAtExit(const VarDecl &D, llvm::Constant *fn,
+ llvm::Constant *addr);
/// Emit code in this function to perform a guarded variable
/// initialization. Guarded initializations are used when it's not
@@ -2801,7 +2456,8 @@ public:
/// EmitDelegateCallArg - We are performing a delegate call; that
/// is, the current function is delegating to another one. Produce
/// a r-value suitable for passing the given parameter.
- void EmitDelegateCallArg(CallArgList &args, const VarDecl *param);
+ void EmitDelegateCallArg(CallArgList &args, const VarDecl *param,
+ SourceLocation loc);
/// SetFPAccuracy - Set the minimum required accuracy of the given floating
/// point operation, expressed as the maximum relative error in ulp.
@@ -2825,7 +2481,7 @@ private:
/// Ty, into individual arguments on the provided vector \arg Args. See
/// ABIArgInfo::Expand.
void ExpandTypeToArgs(QualType Ty, RValue Src,
- SmallVector<llvm::Value*, 16> &Args,
+ SmallVectorImpl<llvm::Value *> &Args,
llvm::FunctionType *IRFuncTy);
llvm::Value* EmitAsmInput(const TargetInfo::ConstraintInfo &Info,
@@ -2833,7 +2489,8 @@ private:
llvm::Value* EmitAsmInputLValue(const TargetInfo::ConstraintInfo &Info,
LValue InputValue, QualType InputType,
- std::string &ConstraintStr);
+ std::string &ConstraintStr,
+ SourceLocation Loc);
/// EmitCallArgs - Emit call arguments for a function.
/// The CallArgTypeInfo parameter is used for iterating over the known
@@ -2841,8 +2498,13 @@ private:
template<typename T>
void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo,
CallExpr::const_arg_iterator ArgBeg,
- CallExpr::const_arg_iterator ArgEnd) {
- CallExpr::const_arg_iterator Arg = ArgBeg;
+ CallExpr::const_arg_iterator ArgEnd,
+ bool ForceColumnInfo = false) {
+ CGDebugInfo *DI = getDebugInfo();
+ SourceLocation CallLoc;
+ if (DI) CallLoc = DI->getLocation();
+
+ CallExpr::const_arg_iterator Arg = ArgBeg;
// First, use the argument types that the type info knows about
if (CallArgTypeInfo) {
@@ -2871,6 +2533,10 @@ private:
"type mismatch in call argument!");
#endif
EmitCallArg(Args, *Arg, ArgType);
+
+ // Each argument expression could modify the debug
+ // location. Restore it.
+ if (DI) DI->EmitLocation(Builder, CallLoc, ForceColumnInfo);
}
// Either we've emitted all the call args, or we have a call to a
@@ -2881,8 +2547,12 @@ private:
}
// If we still have any arguments, emit them using the type of the argument.
- for (; Arg != ArgEnd; ++Arg)
+ for (; Arg != ArgEnd; ++Arg) {
EmitCallArg(Args, *Arg, Arg->getType());
+
+ // Restore the debug location.
+ if (DI) DI->EmitLocation(Builder, CallLoc, ForceColumnInfo);
+ }
}
const TargetCodeGenInfo &getTargetHooks() const {