[clang][modules] Don't prevent translation of FW_Private includes when explicitly...
[llvm-project.git] / clang / lib / AST / Interp / Program.cpp
blob52e13398163ecf79accac7ce285e07b008c3f64d
1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
9 #include "Program.h"
10 #include "ByteCodeStmtGen.h"
11 #include "Context.h"
12 #include "Function.h"
13 #include "Integral.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
19 using namespace clang;
20 using namespace clang::interp;
22 unsigned Program::getOrCreateNativePointer(const void *Ptr) {
23 auto It = NativePointerIndices.find(Ptr);
24 if (It != NativePointerIndices.end())
25 return It->second;
27 unsigned Idx = NativePointers.size();
28 NativePointers.push_back(Ptr);
29 NativePointerIndices[Ptr] = Idx;
30 return Idx;
33 const void *Program::getNativePointer(unsigned Idx) {
34 return NativePointers[Idx];
37 unsigned Program::createGlobalString(const StringLiteral *S) {
38 const size_t CharWidth = S->getCharByteWidth();
39 const size_t BitWidth = CharWidth * Ctx.getCharBit();
41 PrimType CharType;
42 switch (CharWidth) {
43 case 1:
44 CharType = PT_Sint8;
45 break;
46 case 2:
47 CharType = PT_Uint16;
48 break;
49 case 4:
50 CharType = PT_Uint32;
51 break;
52 default:
53 llvm_unreachable("unsupported character width");
56 // Create a descriptor for the string.
57 Descriptor *Desc =
58 allocateDescriptor(S, CharType, std::nullopt, S->getLength() + 1,
59 /*isConst=*/true,
60 /*isTemporary=*/false,
61 /*isMutable=*/false);
63 // Allocate storage for the string.
64 // The byte length does not include the null terminator.
65 unsigned I = Globals.size();
66 unsigned Sz = Desc->getAllocSize();
67 auto *G = new (Allocator, Sz) Global(Desc, /*isStatic=*/true,
68 /*isExtern=*/false);
69 G->block()->invokeCtor();
70 Globals.push_back(G);
72 // Construct the string in storage.
73 const Pointer Ptr(G->block());
74 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) {
75 Pointer Field = Ptr.atIndex(I).narrow();
76 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I);
77 switch (CharType) {
78 case PT_Sint8: {
79 using T = PrimConv<PT_Sint8>::T;
80 Field.deref<T>() = T::from(CodePoint, BitWidth);
81 break;
83 case PT_Uint16: {
84 using T = PrimConv<PT_Uint16>::T;
85 Field.deref<T>() = T::from(CodePoint, BitWidth);
86 break;
88 case PT_Uint32: {
89 using T = PrimConv<PT_Uint32>::T;
90 Field.deref<T>() = T::from(CodePoint, BitWidth);
91 break;
93 default:
94 llvm_unreachable("unsupported character type");
97 return I;
100 Pointer Program::getPtrGlobal(unsigned Idx) {
101 assert(Idx < Globals.size());
102 return Pointer(Globals[Idx]->block());
105 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) {
106 auto It = GlobalIndices.find(VD);
107 if (It != GlobalIndices.end())
108 return It->second;
110 // Find any previous declarations which were already evaluated.
111 std::optional<unsigned> Index;
112 for (const Decl *P = VD; P; P = P->getPreviousDecl()) {
113 auto It = GlobalIndices.find(P);
114 if (It != GlobalIndices.end()) {
115 Index = It->second;
116 break;
120 // Map the decl to the existing index.
121 if (Index) {
122 GlobalIndices[VD] = *Index;
123 return std::nullopt;
126 return Index;
129 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD,
130 const Expr *Init) {
131 if (auto Idx = getGlobal(VD))
132 return Idx;
134 if (auto Idx = createGlobal(VD, Init)) {
135 GlobalIndices[VD] = *Idx;
136 return Idx;
138 return std::nullopt;
141 std::optional<unsigned> Program::getOrCreateDummy(const ValueDecl *VD) {
142 // Dedup blocks since they are immutable and pointers cannot be compared.
143 if (auto It = DummyParams.find(VD); It != DummyParams.end())
144 return It->second;
146 // Create dummy descriptor.
147 Descriptor *Desc = allocateDescriptor(VD, std::nullopt);
148 // Allocate a block for storage.
149 unsigned I = Globals.size();
151 auto *G = new (Allocator, Desc->getAllocSize())
152 Global(getCurrentDecl(), Desc, /*IsStatic=*/true, /*IsExtern=*/false);
153 G->block()->invokeCtor();
155 Globals.push_back(G);
156 DummyParams[VD] = I;
157 return I;
160 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD,
161 const Expr *Init) {
162 assert(!getGlobal(VD));
163 bool IsStatic, IsExtern;
164 if (const auto *Var = dyn_cast<VarDecl>(VD)) {
165 IsStatic = Context::shouldBeGloballyIndexed(VD);
166 IsExtern = !Var->getAnyInitializer();
167 } else if (isa<UnnamedGlobalConstantDecl>(VD)) {
168 IsStatic = true;
169 IsExtern = false;
170 } else {
171 IsStatic = false;
172 IsExtern = true;
174 if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) {
175 for (const Decl *P = VD; P; P = P->getPreviousDecl())
176 GlobalIndices[P] = *Idx;
177 return *Idx;
179 return std::nullopt;
182 std::optional<unsigned> Program::createGlobal(const Expr *E) {
183 return createGlobal(E, E->getType(), /*isStatic=*/true, /*isExtern=*/false);
186 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty,
187 bool IsStatic, bool IsExtern,
188 const Expr *Init) {
189 // Create a descriptor for the global.
190 Descriptor *Desc;
191 const bool IsConst = Ty.isConstQualified();
192 const bool IsTemporary = D.dyn_cast<const Expr *>();
193 if (auto T = Ctx.classify(Ty)) {
194 Desc = createDescriptor(D, *T, std::nullopt, IsConst, IsTemporary);
195 } else {
196 Desc = createDescriptor(D, Ty.getTypePtr(), std::nullopt, IsConst,
197 IsTemporary);
199 if (!Desc)
200 return std::nullopt;
202 // Allocate a block for storage.
203 unsigned I = Globals.size();
205 auto *G = new (Allocator, Desc->getAllocSize())
206 Global(getCurrentDecl(), Desc, IsStatic, IsExtern);
207 G->block()->invokeCtor();
209 Globals.push_back(G);
211 return I;
214 Function *Program::getFunction(const FunctionDecl *F) {
215 F = F->getCanonicalDecl();
216 assert(F);
217 auto It = Funcs.find(F);
218 return It == Funcs.end() ? nullptr : It->second.get();
221 Record *Program::getOrCreateRecord(const RecordDecl *RD) {
222 // Use the actual definition as a key.
223 RD = RD->getDefinition();
224 if (!RD)
225 return nullptr;
227 // Deduplicate records.
228 if (auto It = Records.find(RD); It != Records.end())
229 return It->second;
231 // We insert nullptr now and replace that later, so recursive calls
232 // to this function with the same RecordDecl don't run into
233 // infinite recursion.
234 Records.insert({RD, nullptr});
236 // Number of bytes required by fields and base classes.
237 unsigned BaseSize = 0;
238 // Number of bytes required by virtual base.
239 unsigned VirtSize = 0;
241 // Helper to get a base descriptor.
242 auto GetBaseDesc = [this](const RecordDecl *BD, Record *BR) -> Descriptor * {
243 if (!BR)
244 return nullptr;
245 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false,
246 /*isTemporary=*/false,
247 /*isMutable=*/false);
250 // Reserve space for base classes.
251 Record::BaseList Bases;
252 Record::VirtualBaseList VirtBases;
253 if (auto *CD = dyn_cast<CXXRecordDecl>(RD)) {
254 for (const CXXBaseSpecifier &Spec : CD->bases()) {
255 if (Spec.isVirtual())
256 continue;
258 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl();
259 Record *BR = getOrCreateRecord(BD);
260 if (Descriptor *Desc = GetBaseDesc(BD, BR)) {
261 BaseSize += align(sizeof(InlineDescriptor));
262 Bases.push_back({BD, BaseSize, Desc, BR});
263 BaseSize += align(BR->getSize());
264 continue;
266 return nullptr;
269 for (const CXXBaseSpecifier &Spec : CD->vbases()) {
270 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl();
271 Record *BR = getOrCreateRecord(BD);
273 if (Descriptor *Desc = GetBaseDesc(BD, BR)) {
274 VirtSize += align(sizeof(InlineDescriptor));
275 VirtBases.push_back({BD, VirtSize, Desc, BR});
276 VirtSize += align(BR->getSize());
277 continue;
279 return nullptr;
283 // Reserve space for fields.
284 Record::FieldList Fields;
285 for (const FieldDecl *FD : RD->fields()) {
286 // Reserve space for the field's descriptor and the offset.
287 BaseSize += align(sizeof(InlineDescriptor));
289 // Classify the field and add its metadata.
290 QualType FT = FD->getType();
291 const bool IsConst = FT.isConstQualified();
292 const bool IsMutable = FD->isMutable();
293 Descriptor *Desc;
294 if (std::optional<PrimType> T = Ctx.classify(FT)) {
295 Desc = createDescriptor(FD, *T, std::nullopt, IsConst,
296 /*isTemporary=*/false, IsMutable);
297 } else {
298 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst,
299 /*isTemporary=*/false, IsMutable);
301 if (!Desc)
302 return nullptr;
303 Fields.push_back({FD, BaseSize, Desc});
304 BaseSize += align(Desc->getAllocSize());
307 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields),
308 std::move(VirtBases), VirtSize, BaseSize);
309 Records[RD] = R;
310 return R;
313 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty,
314 Descriptor::MetadataSize MDSize,
315 bool IsConst, bool IsTemporary,
316 bool IsMutable, const Expr *Init) {
317 // Classes and structures.
318 if (auto *RT = Ty->getAs<RecordType>()) {
319 if (auto *Record = getOrCreateRecord(RT->getDecl()))
320 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary,
321 IsMutable);
324 // Arrays.
325 if (auto ArrayType = Ty->getAsArrayTypeUnsafe()) {
326 QualType ElemTy = ArrayType->getElementType();
327 // Array of well-known bounds.
328 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) {
329 size_t NumElems = CAT->getSize().getZExtValue();
330 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
331 // Arrays of primitives.
332 unsigned ElemSize = primSize(*T);
333 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) {
334 return {};
336 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary,
337 IsMutable);
338 } else {
339 // Arrays of composites. In this case, the array is a list of pointers,
340 // followed by the actual elements.
341 Descriptor *ElemDesc = createDescriptor(
342 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary);
343 if (!ElemDesc)
344 return nullptr;
345 unsigned ElemSize =
346 ElemDesc->getAllocSize() + sizeof(InlineDescriptor);
347 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems)
348 return {};
349 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst,
350 IsTemporary, IsMutable);
354 // Array of unknown bounds - cannot be accessed and pointer arithmetic
355 // is forbidden on pointers to such objects.
356 if (isa<IncompleteArrayType>(ArrayType)) {
357 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
358 return allocateDescriptor(D, *T, IsTemporary,
359 Descriptor::UnknownSize{});
360 } else {
361 Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), MDSize,
362 IsConst, IsTemporary);
363 if (!Desc)
364 return nullptr;
365 return allocateDescriptor(D, Desc, IsTemporary,
366 Descriptor::UnknownSize{});
371 // Atomic types.
372 if (auto *AT = Ty->getAs<AtomicType>()) {
373 const Type *InnerTy = AT->getValueType().getTypePtr();
374 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary,
375 IsMutable);
378 // Complex types - represented as arrays of elements.
379 if (auto *CT = Ty->getAs<ComplexType>()) {
380 PrimType ElemTy = *Ctx.classify(CT->getElementType());
381 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary,
382 IsMutable);
385 return nullptr;