1 //===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file implements the ARMTargetStreamer class.
11 //===----------------------------------------------------------------------===//
13 #include "MCTargetDesc/ARMMCTargetDesc.h"
14 #include "llvm/MC/ConstantPools.h"
15 #include "llvm/MC/MCAsmInfo.h"
16 #include "llvm/MC/MCContext.h"
17 #include "llvm/MC/MCExpr.h"
18 #include "llvm/MC/MCStreamer.h"
19 #include "llvm/MC/MCSubtargetInfo.h"
20 #include "llvm/Support/ARMBuildAttributes.h"
25 // ARMTargetStreamer Implemenation
28 ARMTargetStreamer::ARMTargetStreamer(MCStreamer
&S
)
29 : MCTargetStreamer(S
), ConstantPools(new AssemblerConstantPools()) {}
31 ARMTargetStreamer::~ARMTargetStreamer() = default;
33 // The constant pool handling is shared by all ARMTargetStreamer
35 const MCExpr
*ARMTargetStreamer::addConstantPoolEntry(const MCExpr
*Expr
, SMLoc Loc
) {
36 return ConstantPools
->addEntry(Streamer
, Expr
, 4, Loc
);
39 void ARMTargetStreamer::emitCurrentConstantPool() {
40 ConstantPools
->emitForCurrentSection(Streamer
);
41 ConstantPools
->clearCacheForCurrentSection(Streamer
);
44 // finish() - write out any non-empty assembler constant pools.
45 void ARMTargetStreamer::emitConstantPools() {
46 ConstantPools
->emitAll(Streamer
);
49 // reset() - Reset any state
50 void ARMTargetStreamer::reset() {}
52 void ARMTargetStreamer::emitInst(uint32_t Inst
, char Suffix
) {
55 const bool LittleEndian
= getStreamer().getContext().getAsmInfo()->isLittleEndian();
61 for (unsigned II
= 0, IE
= Size
; II
!= IE
; II
++) {
62 const unsigned I
= LittleEndian
? (Size
- II
- 1) : II
;
63 Buffer
[Size
- II
- 1] = uint8_t(Inst
>> I
* CHAR_BIT
);
69 Size
= (Suffix
== 'n' ? 2 : 4);
71 // Thumb wide instructions are emitted as a pair of 16-bit words of the
72 // appropriate endianness.
73 for (unsigned II
= 0, IE
= Size
; II
!= IE
; II
= II
+ 2) {
74 const unsigned I0
= LittleEndian
? II
+ 0 : II
+ 1;
75 const unsigned I1
= LittleEndian
? II
+ 1 : II
+ 0;
76 Buffer
[Size
- II
- 2] = uint8_t(Inst
>> I0
* CHAR_BIT
);
77 Buffer
[Size
- II
- 1] = uint8_t(Inst
>> I1
* CHAR_BIT
);
82 llvm_unreachable("Invalid Suffix");
84 getStreamer().emitBytes(StringRef(Buffer
, Size
));
87 // The remaining callbacks should be handled separately by each
89 void ARMTargetStreamer::emitFnStart() {}
90 void ARMTargetStreamer::emitFnEnd() {}
91 void ARMTargetStreamer::emitCantUnwind() {}
92 void ARMTargetStreamer::emitPersonality(const MCSymbol
*Personality
) {}
93 void ARMTargetStreamer::emitPersonalityIndex(unsigned Index
) {}
94 void ARMTargetStreamer::emitHandlerData() {}
95 void ARMTargetStreamer::emitSetFP(MCRegister FpReg
, MCRegister SpReg
,
97 void ARMTargetStreamer::emitMovSP(MCRegister Reg
, int64_t Offset
) {}
98 void ARMTargetStreamer::emitPad(int64_t Offset
) {}
99 void ARMTargetStreamer::emitRegSave(const SmallVectorImpl
<MCRegister
> &RegList
,
101 void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset
,
102 const SmallVectorImpl
<uint8_t> &Opcodes
) {
104 void ARMTargetStreamer::switchVendor(StringRef Vendor
) {}
105 void ARMTargetStreamer::emitAttribute(unsigned Attribute
, unsigned Value
) {}
106 void ARMTargetStreamer::emitTextAttribute(unsigned Attribute
,
108 void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute
,
110 StringRef StringValue
) {}
111 void ARMTargetStreamer::emitArch(ARM::ArchKind Arch
) {}
112 void ARMTargetStreamer::emitArchExtension(uint64_t ArchExt
) {}
113 void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch
) {}
114 void ARMTargetStreamer::emitFPU(ARM::FPUKind FPU
) {}
115 void ARMTargetStreamer::finishAttributeSection() {}
116 void ARMTargetStreamer::annotateTLSDescriptorSequence(
117 const MCSymbolRefExpr
*SRE
) {}
118 void ARMTargetStreamer::emitThumbSet(MCSymbol
*Symbol
, const MCExpr
*Value
) {}
120 void ARMTargetStreamer::emitARMWinCFIAllocStack(unsigned Size
, bool Wide
) {}
121 void ARMTargetStreamer::emitARMWinCFISaveRegMask(unsigned Mask
, bool Wide
) {}
122 void ARMTargetStreamer::emitARMWinCFISaveSP(unsigned Reg
) {}
123 void ARMTargetStreamer::emitARMWinCFISaveFRegs(unsigned First
, unsigned Last
) {}
124 void ARMTargetStreamer::emitARMWinCFISaveLR(unsigned Offset
) {}
125 void ARMTargetStreamer::emitARMWinCFINop(bool Wide
) {}
126 void ARMTargetStreamer::emitARMWinCFIPrologEnd(bool Fragment
) {}
127 void ARMTargetStreamer::emitARMWinCFIEpilogStart(unsigned Condition
) {}
128 void ARMTargetStreamer::emitARMWinCFIEpilogEnd() {}
129 void ARMTargetStreamer::emitARMWinCFICustom(unsigned Opcode
) {}
131 static ARMBuildAttrs::CPUArch
getArchForCPU(const MCSubtargetInfo
&STI
) {
132 if (STI
.getCPU() == "xscale")
133 return ARMBuildAttrs::v5TEJ
;
135 if (STI
.hasFeature(ARM::HasV9_0aOps
))
136 return ARMBuildAttrs::v9_A
;
137 else if (STI
.hasFeature(ARM::HasV8Ops
)) {
138 if (STI
.hasFeature(ARM::FeatureRClass
))
139 return ARMBuildAttrs::v8_R
;
140 return ARMBuildAttrs::v8_A
;
141 } else if (STI
.hasFeature(ARM::HasV8_1MMainlineOps
))
142 return ARMBuildAttrs::v8_1_M_Main
;
143 else if (STI
.hasFeature(ARM::HasV8MMainlineOps
))
144 return ARMBuildAttrs::v8_M_Main
;
145 else if (STI
.hasFeature(ARM::HasV7Ops
)) {
146 if (STI
.hasFeature(ARM::FeatureMClass
) && STI
.hasFeature(ARM::FeatureDSP
))
147 return ARMBuildAttrs::v7E_M
;
148 return ARMBuildAttrs::v7
;
149 } else if (STI
.hasFeature(ARM::HasV6T2Ops
))
150 return ARMBuildAttrs::v6T2
;
151 else if (STI
.hasFeature(ARM::HasV8MBaselineOps
))
152 return ARMBuildAttrs::v8_M_Base
;
153 else if (STI
.hasFeature(ARM::HasV6MOps
))
154 return ARMBuildAttrs::v6S_M
;
155 else if (STI
.hasFeature(ARM::HasV6Ops
))
156 return ARMBuildAttrs::v6
;
157 else if (STI
.hasFeature(ARM::HasV5TEOps
))
158 return ARMBuildAttrs::v5TE
;
159 else if (STI
.hasFeature(ARM::HasV5TOps
))
160 return ARMBuildAttrs::v5T
;
161 else if (STI
.hasFeature(ARM::HasV4TOps
))
162 return ARMBuildAttrs::v4T
;
164 return ARMBuildAttrs::v4
;
167 static bool isV8M(const MCSubtargetInfo
&STI
) {
168 // Note that v8M Baseline is a subset of v6T2!
169 return (STI
.hasFeature(ARM::HasV8MBaselineOps
) &&
170 !STI
.hasFeature(ARM::HasV6T2Ops
)) ||
171 STI
.hasFeature(ARM::HasV8MMainlineOps
);
174 /// Emit the build attributes that only depend on the hardware that we expect
175 // /to be available, and not on the ABI, or any source-language choices.
176 void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo
&STI
) {
177 switchVendor("aeabi");
179 const StringRef CPUString
= STI
.getCPU();
180 if (!CPUString
.empty() && !CPUString
.starts_with("generic")) {
181 // FIXME: remove krait check when GNU tools support krait cpu
182 if (STI
.hasFeature(ARM::ProcKrait
)) {
183 emitTextAttribute(ARMBuildAttrs::CPU_name
, "cortex-a9");
184 // We consider krait as a "cortex-a9" + hwdiv CPU
185 // Enable hwdiv through ".arch_extension idiv"
186 if (STI
.hasFeature(ARM::FeatureHWDivThumb
) ||
187 STI
.hasFeature(ARM::FeatureHWDivARM
))
188 emitArchExtension(ARM::AEK_HWDIVTHUMB
| ARM::AEK_HWDIVARM
);
190 emitTextAttribute(ARMBuildAttrs::CPU_name
, CPUString
);
194 emitAttribute(ARMBuildAttrs::CPU_arch
, getArchForCPU(STI
));
196 if (STI
.hasFeature(ARM::FeatureAClass
)) {
197 emitAttribute(ARMBuildAttrs::CPU_arch_profile
,
198 ARMBuildAttrs::ApplicationProfile
);
199 } else if (STI
.hasFeature(ARM::FeatureRClass
)) {
200 emitAttribute(ARMBuildAttrs::CPU_arch_profile
,
201 ARMBuildAttrs::RealTimeProfile
);
202 } else if (STI
.hasFeature(ARM::FeatureMClass
)) {
203 emitAttribute(ARMBuildAttrs::CPU_arch_profile
,
204 ARMBuildAttrs::MicroControllerProfile
);
207 emitAttribute(ARMBuildAttrs::ARM_ISA_use
, STI
.hasFeature(ARM::FeatureNoARM
)
208 ? ARMBuildAttrs::Not_Allowed
209 : ARMBuildAttrs::Allowed
);
212 emitAttribute(ARMBuildAttrs::THUMB_ISA_use
,
213 ARMBuildAttrs::AllowThumbDerived
);
214 } else if (STI
.hasFeature(ARM::FeatureThumb2
)) {
215 emitAttribute(ARMBuildAttrs::THUMB_ISA_use
,
216 ARMBuildAttrs::AllowThumb32
);
217 } else if (STI
.hasFeature(ARM::HasV4TOps
)) {
218 emitAttribute(ARMBuildAttrs::THUMB_ISA_use
, ARMBuildAttrs::Allowed
);
221 if (STI
.hasFeature(ARM::FeatureNEON
)) {
222 /* NEON is not exactly a VFP architecture, but GAS emit one of
223 * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
224 if (STI
.hasFeature(ARM::FeatureFPARMv8
)) {
225 if (STI
.hasFeature(ARM::FeatureCrypto
))
226 emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8
);
228 emitFPU(ARM::FK_NEON_FP_ARMV8
);
229 } else if (STI
.hasFeature(ARM::FeatureVFP4
))
230 emitFPU(ARM::FK_NEON_VFPV4
);
232 emitFPU(STI
.hasFeature(ARM::FeatureFP16
) ? ARM::FK_NEON_FP16
234 // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
235 if (STI
.hasFeature(ARM::HasV8Ops
))
236 emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch
,
237 STI
.hasFeature(ARM::HasV8_1aOps
)
238 ? ARMBuildAttrs::AllowNeonARMv8_1a
239 : ARMBuildAttrs::AllowNeonARMv8
);
241 if (STI
.hasFeature(ARM::FeatureFPARMv8_D16_SP
)) {
242 // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
243 // FPU, but there are two different names for it depending on the CPU.
244 if (STI
.hasFeature(ARM::FeatureD32
))
245 emitFPU(ARM::FK_FP_ARMV8
);
247 emitFPU(STI
.hasFeature(ARM::FeatureFP64
) ? ARM::FK_FPV5_D16
248 : ARM::FK_FPV5_SP_D16
);
249 if (STI
.hasFeature(ARM::HasMVEFloatOps
))
250 emitArchExtension(ARM::AEK_SIMD
| ARM::AEK_DSP
| ARM::AEK_FP
);
252 } else if (STI
.hasFeature(ARM::FeatureVFP4_D16_SP
))
253 emitFPU(STI
.hasFeature(ARM::FeatureD32
)
255 : (STI
.hasFeature(ARM::FeatureFP64
) ? ARM::FK_VFPV4_D16
256 : ARM::FK_FPV4_SP_D16
));
257 else if (STI
.hasFeature(ARM::FeatureVFP3_D16_SP
))
259 STI
.hasFeature(ARM::FeatureD32
)
261 ? (STI
.hasFeature(ARM::FeatureFP16
) ? ARM::FK_VFPV3_FP16
264 : (STI
.hasFeature(ARM::FeatureFP64
)
265 ? (STI
.hasFeature(ARM::FeatureFP16
)
266 ? ARM::FK_VFPV3_D16_FP16
268 : (STI
.hasFeature(ARM::FeatureFP16
) ? ARM::FK_VFPV3XD_FP16
269 : ARM::FK_VFPV3XD
)));
270 else if (STI
.hasFeature(ARM::FeatureVFP2_SP
))
271 emitFPU(ARM::FK_VFPV2
);
274 // ABI_HardFP_use attribute to indicate single precision FP.
275 if (STI
.hasFeature(ARM::FeatureVFP2_SP
) && !STI
.hasFeature(ARM::FeatureFP64
))
276 emitAttribute(ARMBuildAttrs::ABI_HardFP_use
,
277 ARMBuildAttrs::HardFPSinglePrecision
);
279 if (STI
.hasFeature(ARM::FeatureFP16
))
280 emitAttribute(ARMBuildAttrs::FP_HP_extension
, ARMBuildAttrs::AllowHPFP
);
282 if (STI
.hasFeature(ARM::FeatureMP
))
283 emitAttribute(ARMBuildAttrs::MPextension_use
, ARMBuildAttrs::AllowMP
);
285 if (STI
.hasFeature(ARM::HasMVEFloatOps
))
286 emitAttribute(ARMBuildAttrs::MVE_arch
, ARMBuildAttrs::AllowMVEIntegerAndFloat
);
287 else if (STI
.hasFeature(ARM::HasMVEIntegerOps
))
288 emitAttribute(ARMBuildAttrs::MVE_arch
, ARMBuildAttrs::AllowMVEInteger
);
290 // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
291 // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
292 // It is not possible to produce DisallowDIV: if hwdiv is present in the base
293 // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
294 // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
295 // otherwise, the default value (AllowDIVIfExists) applies.
296 if (STI
.hasFeature(ARM::FeatureHWDivARM
) && !STI
.hasFeature(ARM::HasV8Ops
))
297 emitAttribute(ARMBuildAttrs::DIV_use
, ARMBuildAttrs::AllowDIVExt
);
299 if (STI
.hasFeature(ARM::FeatureDSP
) && isV8M(STI
))
300 emitAttribute(ARMBuildAttrs::DSP_extension
, ARMBuildAttrs::Allowed
);
302 if (STI
.hasFeature(ARM::FeatureStrictAlign
))
303 emitAttribute(ARMBuildAttrs::CPU_unaligned_access
,
304 ARMBuildAttrs::Not_Allowed
);
306 emitAttribute(ARMBuildAttrs::CPU_unaligned_access
,
307 ARMBuildAttrs::Allowed
);
309 if (STI
.hasFeature(ARM::FeatureTrustZone
) &&
310 STI
.hasFeature(ARM::FeatureVirtualization
))
311 emitAttribute(ARMBuildAttrs::Virtualization_use
,
312 ARMBuildAttrs::AllowTZVirtualization
);
313 else if (STI
.hasFeature(ARM::FeatureTrustZone
))
314 emitAttribute(ARMBuildAttrs::Virtualization_use
, ARMBuildAttrs::AllowTZ
);
315 else if (STI
.hasFeature(ARM::FeatureVirtualization
))
316 emitAttribute(ARMBuildAttrs::Virtualization_use
,
317 ARMBuildAttrs::AllowVirtualization
);
319 if (STI
.hasFeature(ARM::FeaturePACBTI
)) {
320 emitAttribute(ARMBuildAttrs::PAC_extension
, ARMBuildAttrs::AllowPAC
);
321 emitAttribute(ARMBuildAttrs::BTI_extension
, ARMBuildAttrs::AllowBTI
);
326 llvm::createARMObjectTargetStreamer(MCStreamer
&S
, const MCSubtargetInfo
&STI
) {
327 const Triple
&TT
= STI
.getTargetTriple();
328 if (TT
.isOSBinFormatELF())
329 return createARMObjectTargetELFStreamer(S
);
330 if (TT
.isOSBinFormatCOFF())
331 return createARMObjectTargetWinCOFFStreamer(S
);
332 return new ARMTargetStreamer(S
);