[OpenACC] Implement 'collapse' for combined constructs.
[llvm-project.git] / clang / lib / CodeGen / CGCleanup.h
blobc73c97146abc4d4d5ec4adba51bf0a02106d7d89
1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // These classes support the generation of LLVM IR for cleanups.
11 //===----------------------------------------------------------------------===//
13 #ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
14 #define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
16 #include "EHScopeStack.h"
18 #include "Address.h"
19 #include "llvm/ADT/STLExtras.h"
20 #include "llvm/ADT/SetVector.h"
21 #include "llvm/ADT/SmallPtrSet.h"
22 #include "llvm/ADT/SmallVector.h"
23 #include "llvm/IR/Instruction.h"
25 namespace llvm {
26 class BasicBlock;
27 class Value;
28 class ConstantInt;
31 namespace clang {
32 class FunctionDecl;
33 namespace CodeGen {
34 class CodeGenModule;
35 class CodeGenFunction;
37 /// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
38 /// type of a catch handler, so we use this wrapper.
39 struct CatchTypeInfo {
40 llvm::Constant *RTTI;
41 unsigned Flags;
44 /// A protected scope for zero-cost EH handling.
45 class EHScope {
46 public:
47 enum Kind { Cleanup, Catch, Terminate, Filter };
49 private:
50 llvm::BasicBlock *CachedLandingPad;
51 llvm::BasicBlock *CachedEHDispatchBlock;
53 EHScopeStack::stable_iterator EnclosingEHScope;
55 class CommonBitFields {
56 friend class EHScope;
57 LLVM_PREFERRED_TYPE(Kind)
58 unsigned Kind : 3;
60 enum { NumCommonBits = 3 };
62 protected:
63 class CatchBitFields {
64 friend class EHCatchScope;
65 unsigned : NumCommonBits;
67 unsigned NumHandlers : 32 - NumCommonBits;
70 class CleanupBitFields {
71 friend class EHCleanupScope;
72 unsigned : NumCommonBits;
74 /// Whether this cleanup needs to be run along normal edges.
75 LLVM_PREFERRED_TYPE(bool)
76 unsigned IsNormalCleanup : 1;
78 /// Whether this cleanup needs to be run along exception edges.
79 LLVM_PREFERRED_TYPE(bool)
80 unsigned IsEHCleanup : 1;
82 /// Whether this cleanup is currently active.
83 LLVM_PREFERRED_TYPE(bool)
84 unsigned IsActive : 1;
86 /// Whether this cleanup is a lifetime marker
87 LLVM_PREFERRED_TYPE(bool)
88 unsigned IsLifetimeMarker : 1;
90 /// Whether the normal cleanup should test the activation flag.
91 LLVM_PREFERRED_TYPE(bool)
92 unsigned TestFlagInNormalCleanup : 1;
94 /// Whether the EH cleanup should test the activation flag.
95 LLVM_PREFERRED_TYPE(bool)
96 unsigned TestFlagInEHCleanup : 1;
98 /// The amount of extra storage needed by the Cleanup.
99 /// Always a multiple of the scope-stack alignment.
100 unsigned CleanupSize : 12;
103 class FilterBitFields {
104 friend class EHFilterScope;
105 unsigned : NumCommonBits;
107 unsigned NumFilters : 32 - NumCommonBits;
110 union {
111 CommonBitFields CommonBits;
112 CatchBitFields CatchBits;
113 CleanupBitFields CleanupBits;
114 FilterBitFields FilterBits;
117 public:
118 EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
119 : CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
120 EnclosingEHScope(enclosingEHScope) {
121 CommonBits.Kind = kind;
124 Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
126 llvm::BasicBlock *getCachedLandingPad() const {
127 return CachedLandingPad;
130 void setCachedLandingPad(llvm::BasicBlock *block) {
131 CachedLandingPad = block;
134 llvm::BasicBlock *getCachedEHDispatchBlock() const {
135 return CachedEHDispatchBlock;
138 void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
139 CachedEHDispatchBlock = block;
142 bool hasEHBranches() const {
143 if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
144 return !block->use_empty();
145 return false;
148 EHScopeStack::stable_iterator getEnclosingEHScope() const {
149 return EnclosingEHScope;
153 /// A scope which attempts to handle some, possibly all, types of
154 /// exceptions.
156 /// Objective C \@finally blocks are represented using a cleanup scope
157 /// after the catch scope.
158 class EHCatchScope : public EHScope {
159 // In effect, we have a flexible array member
160 // Handler Handlers[0];
161 // But that's only standard in C99, not C++, so we have to do
162 // annoying pointer arithmetic instead.
164 public:
165 struct Handler {
166 /// A type info value, or null (C++ null, not an LLVM null pointer)
167 /// for a catch-all.
168 CatchTypeInfo Type;
170 /// The catch handler for this type.
171 llvm::BasicBlock *Block;
173 bool isCatchAll() const { return Type.RTTI == nullptr; }
176 private:
177 friend class EHScopeStack;
179 Handler *getHandlers() {
180 return reinterpret_cast<Handler*>(this+1);
183 const Handler *getHandlers() const {
184 return reinterpret_cast<const Handler*>(this+1);
187 public:
188 static size_t getSizeForNumHandlers(unsigned N) {
189 return sizeof(EHCatchScope) + N * sizeof(Handler);
192 EHCatchScope(unsigned numHandlers,
193 EHScopeStack::stable_iterator enclosingEHScope)
194 : EHScope(Catch, enclosingEHScope) {
195 CatchBits.NumHandlers = numHandlers;
196 assert(CatchBits.NumHandlers == numHandlers && "NumHandlers overflow?");
199 unsigned getNumHandlers() const {
200 return CatchBits.NumHandlers;
203 void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
204 setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
207 void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
208 assert(I < getNumHandlers());
209 getHandlers()[I].Type = CatchTypeInfo{Type, 0};
210 getHandlers()[I].Block = Block;
213 void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
214 assert(I < getNumHandlers());
215 getHandlers()[I].Type = Type;
216 getHandlers()[I].Block = Block;
219 const Handler &getHandler(unsigned I) const {
220 assert(I < getNumHandlers());
221 return getHandlers()[I];
224 // Clear all handler blocks.
225 // FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
226 // 'takeHandler' or some such function which removes ownership from the
227 // EHCatchScope object if the handlers should live longer than EHCatchScope.
228 void clearHandlerBlocks() {
229 for (unsigned I = 0, N = getNumHandlers(); I != N; ++I)
230 delete getHandler(I).Block;
233 typedef const Handler *iterator;
234 iterator begin() const { return getHandlers(); }
235 iterator end() const { return getHandlers() + getNumHandlers(); }
237 static bool classof(const EHScope *Scope) {
238 return Scope->getKind() == Catch;
242 /// A cleanup scope which generates the cleanup blocks lazily.
243 class alignas(8) EHCleanupScope : public EHScope {
244 /// The nearest normal cleanup scope enclosing this one.
245 EHScopeStack::stable_iterator EnclosingNormal;
247 /// The nearest EH scope enclosing this one.
248 EHScopeStack::stable_iterator EnclosingEH;
250 /// The dual entry/exit block along the normal edge. This is lazily
251 /// created if needed before the cleanup is popped.
252 llvm::BasicBlock *NormalBlock;
254 /// An optional i1 variable indicating whether this cleanup has been
255 /// activated yet.
256 Address ActiveFlag;
258 /// Extra information required for cleanups that have resolved
259 /// branches through them. This has to be allocated on the side
260 /// because everything on the cleanup stack has be trivially
261 /// movable.
262 struct ExtInfo {
263 /// The destinations of normal branch-afters and branch-throughs.
264 llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
266 /// Normal branch-afters.
267 SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
268 BranchAfters;
270 mutable struct ExtInfo *ExtInfo;
272 /// Erases auxillary allocas and their usages for an unused cleanup.
273 /// Cleanups should mark these allocas as 'used' if the cleanup is
274 /// emitted, otherwise these instructions would be erased.
275 struct AuxillaryAllocas {
276 SmallVector<llvm::Instruction *, 1> AuxAllocas;
277 bool used = false;
279 // Records a potentially unused instruction to be erased later.
280 void Add(llvm::AllocaInst *Alloca) { AuxAllocas.push_back(Alloca); }
282 // Mark all recorded instructions as used. These will not be erased later.
283 void MarkUsed() {
284 used = true;
285 AuxAllocas.clear();
288 ~AuxillaryAllocas() {
289 if (used)
290 return;
291 llvm::SetVector<llvm::Instruction *> Uses;
292 for (auto *Inst : llvm::reverse(AuxAllocas))
293 CollectUses(Inst, Uses);
294 // Delete uses in the reverse order of insertion.
295 for (auto *I : llvm::reverse(Uses))
296 I->eraseFromParent();
299 private:
300 void CollectUses(llvm::Instruction *I,
301 llvm::SetVector<llvm::Instruction *> &Uses) {
302 if (!I || !Uses.insert(I))
303 return;
304 for (auto *User : I->users())
305 CollectUses(cast<llvm::Instruction>(User), Uses);
308 mutable struct AuxillaryAllocas *AuxAllocas;
310 AuxillaryAllocas &getAuxillaryAllocas() {
311 if (!AuxAllocas) {
312 AuxAllocas = new struct AuxillaryAllocas();
314 return *AuxAllocas;
317 /// The number of fixups required by enclosing scopes (not including
318 /// this one). If this is the top cleanup scope, all the fixups
319 /// from this index onwards belong to this scope.
320 unsigned FixupDepth;
322 struct ExtInfo &getExtInfo() {
323 if (!ExtInfo) ExtInfo = new struct ExtInfo();
324 return *ExtInfo;
327 const struct ExtInfo &getExtInfo() const {
328 if (!ExtInfo) ExtInfo = new struct ExtInfo();
329 return *ExtInfo;
332 public:
333 /// Gets the size required for a lazy cleanup scope with the given
334 /// cleanup-data requirements.
335 static size_t getSizeForCleanupSize(size_t Size) {
336 return sizeof(EHCleanupScope) + Size;
339 size_t getAllocatedSize() const {
340 return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
343 EHCleanupScope(bool isNormal, bool isEH, unsigned cleanupSize,
344 unsigned fixupDepth,
345 EHScopeStack::stable_iterator enclosingNormal,
346 EHScopeStack::stable_iterator enclosingEH)
347 : EHScope(EHScope::Cleanup, enclosingEH),
348 EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
349 ActiveFlag(Address::invalid()), ExtInfo(nullptr), AuxAllocas(nullptr),
350 FixupDepth(fixupDepth) {
351 CleanupBits.IsNormalCleanup = isNormal;
352 CleanupBits.IsEHCleanup = isEH;
353 CleanupBits.IsActive = true;
354 CleanupBits.IsLifetimeMarker = false;
355 CleanupBits.TestFlagInNormalCleanup = false;
356 CleanupBits.TestFlagInEHCleanup = false;
357 CleanupBits.CleanupSize = cleanupSize;
359 assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
362 void Destroy() {
363 if (AuxAllocas)
364 delete AuxAllocas;
365 delete ExtInfo;
367 void AddAuxAllocas(llvm::SmallVector<llvm::AllocaInst *> Allocas) {
368 for (auto *Alloca : Allocas)
369 getAuxillaryAllocas().Add(Alloca);
371 void MarkEmitted() { getAuxillaryAllocas().MarkUsed(); }
372 // Objects of EHCleanupScope are not destructed. Use Destroy().
373 ~EHCleanupScope() = delete;
375 bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
376 llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
377 void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
379 bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
381 bool isActive() const { return CleanupBits.IsActive; }
382 void setActive(bool A) { CleanupBits.IsActive = A; }
384 bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
385 void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
387 bool hasActiveFlag() const { return ActiveFlag.isValid(); }
388 Address getActiveFlag() const {
389 return ActiveFlag;
391 void setActiveFlag(RawAddress Var) {
392 assert(Var.getAlignment().isOne());
393 ActiveFlag = Var;
396 void setTestFlagInNormalCleanup() {
397 CleanupBits.TestFlagInNormalCleanup = true;
399 bool shouldTestFlagInNormalCleanup() const {
400 return CleanupBits.TestFlagInNormalCleanup;
403 void setTestFlagInEHCleanup() {
404 CleanupBits.TestFlagInEHCleanup = true;
406 bool shouldTestFlagInEHCleanup() const {
407 return CleanupBits.TestFlagInEHCleanup;
410 unsigned getFixupDepth() const { return FixupDepth; }
411 EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
412 return EnclosingNormal;
415 size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
416 void *getCleanupBuffer() { return this + 1; }
418 EHScopeStack::Cleanup *getCleanup() {
419 return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
422 /// True if this cleanup scope has any branch-afters or branch-throughs.
423 bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
425 /// Add a branch-after to this cleanup scope. A branch-after is a
426 /// branch from a point protected by this (normal) cleanup to a
427 /// point in the normal cleanup scope immediately containing it.
428 /// For example,
429 /// for (;;) { A a; break; }
430 /// contains a branch-after.
432 /// Branch-afters each have their own destination out of the
433 /// cleanup, guaranteed distinct from anything else threaded through
434 /// it. Therefore branch-afters usually force a switch after the
435 /// cleanup.
436 void addBranchAfter(llvm::ConstantInt *Index,
437 llvm::BasicBlock *Block) {
438 struct ExtInfo &ExtInfo = getExtInfo();
439 if (ExtInfo.Branches.insert(Block).second)
440 ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
443 /// Return the number of unique branch-afters on this scope.
444 unsigned getNumBranchAfters() const {
445 return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
448 llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
449 assert(I < getNumBranchAfters());
450 return ExtInfo->BranchAfters[I].first;
453 llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
454 assert(I < getNumBranchAfters());
455 return ExtInfo->BranchAfters[I].second;
458 /// Add a branch-through to this cleanup scope. A branch-through is
459 /// a branch from a scope protected by this (normal) cleanup to an
460 /// enclosing scope other than the immediately-enclosing normal
461 /// cleanup scope.
463 /// In the following example, the branch through B's scope is a
464 /// branch-through, while the branch through A's scope is a
465 /// branch-after:
466 /// for (;;) { A a; B b; break; }
468 /// All branch-throughs have a common destination out of the
469 /// cleanup, one possibly shared with the fall-through. Therefore
470 /// branch-throughs usually don't force a switch after the cleanup.
472 /// \return true if the branch-through was new to this scope
473 bool addBranchThrough(llvm::BasicBlock *Block) {
474 return getExtInfo().Branches.insert(Block).second;
477 /// Determines if this cleanup scope has any branch throughs.
478 bool hasBranchThroughs() const {
479 if (!ExtInfo) return false;
480 return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
483 static bool classof(const EHScope *Scope) {
484 return (Scope->getKind() == Cleanup);
487 // NOTE: there's a bunch of different data classes tacked on after an
488 // EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
489 // they don't require greater alignment than ScopeStackAlignment. So,
490 // EHCleanupScope ought to have alignment equal to that -- not more
491 // (would be misaligned by the stack allocator), and not less (would
492 // break the appended classes).
493 static_assert(alignof(EHCleanupScope) == EHScopeStack::ScopeStackAlignment,
494 "EHCleanupScope expected alignment");
496 /// An exceptions scope which filters exceptions thrown through it.
497 /// Only exceptions matching the filter types will be permitted to be
498 /// thrown.
500 /// This is used to implement C++ exception specifications.
501 class EHFilterScope : public EHScope {
502 // Essentially ends in a flexible array member:
503 // llvm::Value *FilterTypes[0];
505 llvm::Value **getFilters() {
506 return reinterpret_cast<llvm::Value**>(this+1);
509 llvm::Value * const *getFilters() const {
510 return reinterpret_cast<llvm::Value* const *>(this+1);
513 public:
514 EHFilterScope(unsigned numFilters)
515 : EHScope(Filter, EHScopeStack::stable_end()) {
516 FilterBits.NumFilters = numFilters;
517 assert(FilterBits.NumFilters == numFilters && "NumFilters overflow");
520 static size_t getSizeForNumFilters(unsigned numFilters) {
521 return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
524 unsigned getNumFilters() const { return FilterBits.NumFilters; }
526 void setFilter(unsigned i, llvm::Value *filterValue) {
527 assert(i < getNumFilters());
528 getFilters()[i] = filterValue;
531 llvm::Value *getFilter(unsigned i) const {
532 assert(i < getNumFilters());
533 return getFilters()[i];
536 static bool classof(const EHScope *scope) {
537 return scope->getKind() == Filter;
541 /// An exceptions scope which calls std::terminate if any exception
542 /// reaches it.
543 class EHTerminateScope : public EHScope {
544 public:
545 EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
546 : EHScope(Terminate, enclosingEHScope) {}
547 static size_t getSize() { return sizeof(EHTerminateScope); }
549 static bool classof(const EHScope *scope) {
550 return scope->getKind() == Terminate;
554 /// A non-stable pointer into the scope stack.
555 class EHScopeStack::iterator {
556 char *Ptr;
558 friend class EHScopeStack;
559 explicit iterator(char *Ptr) : Ptr(Ptr) {}
561 public:
562 iterator() : Ptr(nullptr) {}
564 EHScope *get() const {
565 return reinterpret_cast<EHScope*>(Ptr);
568 EHScope *operator->() const { return get(); }
569 EHScope &operator*() const { return *get(); }
571 iterator &operator++() {
572 size_t Size;
573 switch (get()->getKind()) {
574 case EHScope::Catch:
575 Size = EHCatchScope::getSizeForNumHandlers(
576 static_cast<const EHCatchScope *>(get())->getNumHandlers());
577 break;
579 case EHScope::Filter:
580 Size = EHFilterScope::getSizeForNumFilters(
581 static_cast<const EHFilterScope *>(get())->getNumFilters());
582 break;
584 case EHScope::Cleanup:
585 Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
586 break;
588 case EHScope::Terminate:
589 Size = EHTerminateScope::getSize();
590 break;
592 Ptr += llvm::alignTo(Size, ScopeStackAlignment);
593 return *this;
596 iterator next() {
597 iterator copy = *this;
598 ++copy;
599 return copy;
602 iterator operator++(int) {
603 iterator copy = *this;
604 operator++();
605 return copy;
608 bool encloses(iterator other) const { return Ptr >= other.Ptr; }
609 bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
611 bool operator==(iterator other) const { return Ptr == other.Ptr; }
612 bool operator!=(iterator other) const { return Ptr != other.Ptr; }
615 inline EHScopeStack::iterator EHScopeStack::begin() const {
616 return iterator(StartOfData);
619 inline EHScopeStack::iterator EHScopeStack::end() const {
620 return iterator(EndOfBuffer);
623 inline void EHScopeStack::popCatch() {
624 assert(!empty() && "popping exception stack when not empty");
626 EHCatchScope &scope = cast<EHCatchScope>(*begin());
627 InnermostEHScope = scope.getEnclosingEHScope();
628 deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
631 inline void EHScopeStack::popTerminate() {
632 assert(!empty() && "popping exception stack when not empty");
634 EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
635 InnermostEHScope = scope.getEnclosingEHScope();
636 deallocate(EHTerminateScope::getSize());
639 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
640 assert(sp.isValid() && "finding invalid savepoint");
641 assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
642 return iterator(EndOfBuffer - sp.Size);
645 inline EHScopeStack::stable_iterator
646 EHScopeStack::stabilize(iterator ir) const {
647 assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
648 return stable_iterator(EndOfBuffer - ir.Ptr);
651 /// The exceptions personality for a function.
652 struct EHPersonality {
653 const char *PersonalityFn;
655 // If this is non-null, this personality requires a non-standard
656 // function for rethrowing an exception after a catchall cleanup.
657 // This function must have prototype void(void*).
658 const char *CatchallRethrowFn;
660 static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
661 static const EHPersonality &get(CodeGenFunction &CGF);
663 static const EHPersonality GNU_C;
664 static const EHPersonality GNU_C_SJLJ;
665 static const EHPersonality GNU_C_SEH;
666 static const EHPersonality GNU_ObjC;
667 static const EHPersonality GNU_ObjC_SJLJ;
668 static const EHPersonality GNU_ObjC_SEH;
669 static const EHPersonality GNUstep_ObjC;
670 static const EHPersonality GNU_ObjCXX;
671 static const EHPersonality NeXT_ObjC;
672 static const EHPersonality GNU_CPlusPlus;
673 static const EHPersonality GNU_CPlusPlus_SJLJ;
674 static const EHPersonality GNU_CPlusPlus_SEH;
675 static const EHPersonality MSVC_except_handler;
676 static const EHPersonality MSVC_C_specific_handler;
677 static const EHPersonality MSVC_CxxFrameHandler3;
678 static const EHPersonality GNU_Wasm_CPlusPlus;
679 static const EHPersonality XL_CPlusPlus;
680 static const EHPersonality ZOS_CPlusPlus;
682 /// Does this personality use landingpads or the family of pad instructions
683 /// designed to form funclets?
684 bool usesFuncletPads() const {
685 return isMSVCPersonality() || isWasmPersonality();
688 bool isMSVCPersonality() const {
689 return this == &MSVC_except_handler || this == &MSVC_C_specific_handler ||
690 this == &MSVC_CxxFrameHandler3;
693 bool isWasmPersonality() const { return this == &GNU_Wasm_CPlusPlus; }
695 bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
700 #endif