1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
10 #include "ByteCodeStmtGen.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
19 using namespace clang
;
20 using namespace clang::interp
;
22 unsigned Program::getOrCreateNativePointer(const void *Ptr
) {
23 auto It
= NativePointerIndices
.find(Ptr
);
24 if (It
!= NativePointerIndices
.end())
27 unsigned Idx
= NativePointers
.size();
28 NativePointers
.push_back(Ptr
);
29 NativePointerIndices
[Ptr
] = Idx
;
33 const void *Program::getNativePointer(unsigned Idx
) {
34 return NativePointers
[Idx
];
37 unsigned Program::createGlobalString(const StringLiteral
*S
) {
38 const size_t CharWidth
= S
->getCharByteWidth();
39 const size_t BitWidth
= CharWidth
* Ctx
.getCharBit();
53 llvm_unreachable("unsupported character width");
56 // Create a descriptor for the string.
58 allocateDescriptor(S
, CharType
, std::nullopt
, S
->getLength() + 1,
60 /*isTemporary=*/false,
63 // Allocate storage for the string.
64 // The byte length does not include the null terminator.
65 unsigned I
= Globals
.size();
66 unsigned Sz
= Desc
->getAllocSize();
67 auto *G
= new (Allocator
, Sz
) Global(Desc
, /*isStatic=*/true,
69 G
->block()->invokeCtor();
72 // Construct the string in storage.
73 const Pointer
Ptr(G
->block());
74 for (unsigned I
= 0, N
= S
->getLength(); I
<= N
; ++I
) {
75 Pointer Field
= Ptr
.atIndex(I
).narrow();
76 const uint32_t CodePoint
= I
== N
? 0 : S
->getCodeUnit(I
);
79 using T
= PrimConv
<PT_Sint8
>::T
;
80 Field
.deref
<T
>() = T::from(CodePoint
, BitWidth
);
84 using T
= PrimConv
<PT_Uint16
>::T
;
85 Field
.deref
<T
>() = T::from(CodePoint
, BitWidth
);
89 using T
= PrimConv
<PT_Uint32
>::T
;
90 Field
.deref
<T
>() = T::from(CodePoint
, BitWidth
);
94 llvm_unreachable("unsupported character type");
100 Pointer
Program::getPtrGlobal(unsigned Idx
) {
101 assert(Idx
< Globals
.size());
102 return Pointer(Globals
[Idx
]->block());
105 std::optional
<unsigned> Program::getGlobal(const ValueDecl
*VD
) {
106 auto It
= GlobalIndices
.find(VD
);
107 if (It
!= GlobalIndices
.end())
110 // Find any previous declarations which were already evaluated.
111 std::optional
<unsigned> Index
;
112 for (const Decl
*P
= VD
; P
; P
= P
->getPreviousDecl()) {
113 auto It
= GlobalIndices
.find(P
);
114 if (It
!= GlobalIndices
.end()) {
120 // Map the decl to the existing index.
122 GlobalIndices
[VD
] = *Index
;
129 std::optional
<unsigned> Program::getOrCreateGlobal(const ValueDecl
*VD
,
131 if (auto Idx
= getGlobal(VD
))
134 if (auto Idx
= createGlobal(VD
, Init
)) {
135 GlobalIndices
[VD
] = *Idx
;
141 std::optional
<unsigned> Program::getOrCreateDummy(const ValueDecl
*VD
) {
142 // Dedup blocks since they are immutable and pointers cannot be compared.
143 if (auto It
= DummyParams
.find(VD
); It
!= DummyParams
.end())
146 // Create dummy descriptor.
147 Descriptor
*Desc
= allocateDescriptor(VD
, std::nullopt
);
148 // Allocate a block for storage.
149 unsigned I
= Globals
.size();
151 auto *G
= new (Allocator
, Desc
->getAllocSize())
152 Global(getCurrentDecl(), Desc
, /*IsStatic=*/true, /*IsExtern=*/false);
153 G
->block()->invokeCtor();
155 Globals
.push_back(G
);
160 std::optional
<unsigned> Program::createGlobal(const ValueDecl
*VD
,
162 assert(!getGlobal(VD
));
163 bool IsStatic
, IsExtern
;
164 if (const auto *Var
= dyn_cast
<VarDecl
>(VD
)) {
165 IsStatic
= Context::shouldBeGloballyIndexed(VD
);
166 IsExtern
= !Var
->getAnyInitializer();
167 } else if (isa
<UnnamedGlobalConstantDecl
>(VD
)) {
174 if (auto Idx
= createGlobal(VD
, VD
->getType(), IsStatic
, IsExtern
, Init
)) {
175 for (const Decl
*P
= VD
; P
; P
= P
->getPreviousDecl())
176 GlobalIndices
[P
] = *Idx
;
182 std::optional
<unsigned> Program::createGlobal(const Expr
*E
) {
183 return createGlobal(E
, E
->getType(), /*isStatic=*/true, /*isExtern=*/false);
186 std::optional
<unsigned> Program::createGlobal(const DeclTy
&D
, QualType Ty
,
187 bool IsStatic
, bool IsExtern
,
189 // Create a descriptor for the global.
191 const bool IsConst
= Ty
.isConstQualified();
192 const bool IsTemporary
= D
.dyn_cast
<const Expr
*>();
193 if (auto T
= Ctx
.classify(Ty
)) {
194 Desc
= createDescriptor(D
, *T
, std::nullopt
, IsConst
, IsTemporary
);
196 Desc
= createDescriptor(D
, Ty
.getTypePtr(), std::nullopt
, IsConst
,
202 // Allocate a block for storage.
203 unsigned I
= Globals
.size();
205 auto *G
= new (Allocator
, Desc
->getAllocSize())
206 Global(getCurrentDecl(), Desc
, IsStatic
, IsExtern
);
207 G
->block()->invokeCtor();
209 Globals
.push_back(G
);
214 Function
*Program::getFunction(const FunctionDecl
*F
) {
215 F
= F
->getCanonicalDecl();
217 auto It
= Funcs
.find(F
);
218 return It
== Funcs
.end() ? nullptr : It
->second
.get();
221 Record
*Program::getOrCreateRecord(const RecordDecl
*RD
) {
222 // Use the actual definition as a key.
223 RD
= RD
->getDefinition();
227 // Deduplicate records.
228 if (auto It
= Records
.find(RD
); It
!= Records
.end())
231 // We insert nullptr now and replace that later, so recursive calls
232 // to this function with the same RecordDecl don't run into
233 // infinite recursion.
234 Records
.insert({RD
, nullptr});
236 // Number of bytes required by fields and base classes.
237 unsigned BaseSize
= 0;
238 // Number of bytes required by virtual base.
239 unsigned VirtSize
= 0;
241 // Helper to get a base descriptor.
242 auto GetBaseDesc
= [this](const RecordDecl
*BD
, Record
*BR
) -> Descriptor
* {
245 return allocateDescriptor(BD
, BR
, std::nullopt
, /*isConst=*/false,
246 /*isTemporary=*/false,
247 /*isMutable=*/false);
250 // Reserve space for base classes.
251 Record::BaseList Bases
;
252 Record::VirtualBaseList VirtBases
;
253 if (auto *CD
= dyn_cast
<CXXRecordDecl
>(RD
)) {
254 for (const CXXBaseSpecifier
&Spec
: CD
->bases()) {
255 if (Spec
.isVirtual())
258 const RecordDecl
*BD
= Spec
.getType()->castAs
<RecordType
>()->getDecl();
259 Record
*BR
= getOrCreateRecord(BD
);
260 if (Descriptor
*Desc
= GetBaseDesc(BD
, BR
)) {
261 BaseSize
+= align(sizeof(InlineDescriptor
));
262 Bases
.push_back({BD
, BaseSize
, Desc
, BR
});
263 BaseSize
+= align(BR
->getSize());
269 for (const CXXBaseSpecifier
&Spec
: CD
->vbases()) {
270 const RecordDecl
*BD
= Spec
.getType()->castAs
<RecordType
>()->getDecl();
271 Record
*BR
= getOrCreateRecord(BD
);
273 if (Descriptor
*Desc
= GetBaseDesc(BD
, BR
)) {
274 VirtSize
+= align(sizeof(InlineDescriptor
));
275 VirtBases
.push_back({BD
, VirtSize
, Desc
, BR
});
276 VirtSize
+= align(BR
->getSize());
283 // Reserve space for fields.
284 Record::FieldList Fields
;
285 for (const FieldDecl
*FD
: RD
->fields()) {
286 // Reserve space for the field's descriptor and the offset.
287 BaseSize
+= align(sizeof(InlineDescriptor
));
289 // Classify the field and add its metadata.
290 QualType FT
= FD
->getType();
291 const bool IsConst
= FT
.isConstQualified();
292 const bool IsMutable
= FD
->isMutable();
294 if (std::optional
<PrimType
> T
= Ctx
.classify(FT
)) {
295 Desc
= createDescriptor(FD
, *T
, std::nullopt
, IsConst
,
296 /*isTemporary=*/false, IsMutable
);
298 Desc
= createDescriptor(FD
, FT
.getTypePtr(), std::nullopt
, IsConst
,
299 /*isTemporary=*/false, IsMutable
);
303 Fields
.push_back({FD
, BaseSize
, Desc
});
304 BaseSize
+= align(Desc
->getAllocSize());
307 Record
*R
= new (Allocator
) Record(RD
, std::move(Bases
), std::move(Fields
),
308 std::move(VirtBases
), VirtSize
, BaseSize
);
313 Descriptor
*Program::createDescriptor(const DeclTy
&D
, const Type
*Ty
,
314 Descriptor::MetadataSize MDSize
,
315 bool IsConst
, bool IsTemporary
,
316 bool IsMutable
, const Expr
*Init
) {
317 // Classes and structures.
318 if (auto *RT
= Ty
->getAs
<RecordType
>()) {
319 if (auto *Record
= getOrCreateRecord(RT
->getDecl()))
320 return allocateDescriptor(D
, Record
, MDSize
, IsConst
, IsTemporary
,
325 if (auto ArrayType
= Ty
->getAsArrayTypeUnsafe()) {
326 QualType ElemTy
= ArrayType
->getElementType();
327 // Array of well-known bounds.
328 if (auto CAT
= dyn_cast
<ConstantArrayType
>(ArrayType
)) {
329 size_t NumElems
= CAT
->getSize().getZExtValue();
330 if (std::optional
<PrimType
> T
= Ctx
.classify(ElemTy
)) {
331 // Arrays of primitives.
332 unsigned ElemSize
= primSize(*T
);
333 if (std::numeric_limits
<unsigned>::max() / ElemSize
<= NumElems
) {
336 return allocateDescriptor(D
, *T
, MDSize
, NumElems
, IsConst
, IsTemporary
,
339 // Arrays of composites. In this case, the array is a list of pointers,
340 // followed by the actual elements.
341 Descriptor
*ElemDesc
= createDescriptor(
342 D
, ElemTy
.getTypePtr(), std::nullopt
, IsConst
, IsTemporary
);
346 ElemDesc
->getAllocSize() + sizeof(InlineDescriptor
);
347 if (std::numeric_limits
<unsigned>::max() / ElemSize
<= NumElems
)
349 return allocateDescriptor(D
, ElemDesc
, MDSize
, NumElems
, IsConst
,
350 IsTemporary
, IsMutable
);
354 // Array of unknown bounds - cannot be accessed and pointer arithmetic
355 // is forbidden on pointers to such objects.
356 if (isa
<IncompleteArrayType
>(ArrayType
)) {
357 if (std::optional
<PrimType
> T
= Ctx
.classify(ElemTy
)) {
358 return allocateDescriptor(D
, *T
, IsTemporary
,
359 Descriptor::UnknownSize
{});
361 Descriptor
*Desc
= createDescriptor(D
, ElemTy
.getTypePtr(), MDSize
,
362 IsConst
, IsTemporary
);
365 return allocateDescriptor(D
, Desc
, IsTemporary
,
366 Descriptor::UnknownSize
{});
372 if (auto *AT
= Ty
->getAs
<AtomicType
>()) {
373 const Type
*InnerTy
= AT
->getValueType().getTypePtr();
374 return createDescriptor(D
, InnerTy
, MDSize
, IsConst
, IsTemporary
,
378 // Complex types - represented as arrays of elements.
379 if (auto *CT
= Ty
->getAs
<ComplexType
>()) {
380 PrimType ElemTy
= *Ctx
.classify(CT
->getElementType());
381 return allocateDescriptor(D
, ElemTy
, MDSize
, 2, IsConst
, IsTemporary
,