[X86][BMI] Pull out schedule classes from bmi_andn<> and bmi_bls<>
[llvm-core.git] / lib / Target / AArch64 / MCTargetDesc / AArch64ELFObjectWriter.cpp
blob0fd1ca187be7f6552776ad102356e66191f3ba75
1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file handles ELF-specific object emission, converting LLVM's internal
10 // fixups into the appropriate relocations.
12 //===----------------------------------------------------------------------===//
14 #include "MCTargetDesc/AArch64FixupKinds.h"
15 #include "MCTargetDesc/AArch64MCExpr.h"
16 #include "MCTargetDesc/AArch64MCTargetDesc.h"
17 #include "llvm/BinaryFormat/ELF.h"
18 #include "llvm/MC/MCContext.h"
19 #include "llvm/MC/MCELFObjectWriter.h"
20 #include "llvm/MC/MCFixup.h"
21 #include "llvm/MC/MCObjectWriter.h"
22 #include "llvm/MC/MCValue.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include <cassert>
25 #include <cstdint>
27 using namespace llvm;
29 namespace {
31 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
32 public:
33 AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32);
35 ~AArch64ELFObjectWriter() override = default;
37 protected:
38 unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
39 const MCFixup &Fixup, bool IsPCRel) const override;
40 bool IsILP32;
43 } // end anonymous namespace
45 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32)
46 : MCELFObjectTargetWriter(/*Is64Bit*/ true, OSABI, ELF::EM_AARCH64,
47 /*HasRelocationAddend*/ true),
48 IsILP32(IsILP32) {}
50 #define R_CLS(rtype) \
51 IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
52 #define BAD_ILP32_MOV(lp64rtype) \
53 "ILP32 absolute MOV relocation not " \
54 "supported (LP64 eqv: " #lp64rtype ")"
56 // assumes IsILP32 is true
57 static bool isNonILP32reloc(const MCFixup &Fixup,
58 AArch64MCExpr::VariantKind RefKind,
59 MCContext &Ctx) {
60 if (Fixup.getTargetKind() != AArch64::fixup_aarch64_movw)
61 return false;
62 switch (RefKind) {
63 case AArch64MCExpr::VK_ABS_G3:
64 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
65 return true;
66 case AArch64MCExpr::VK_ABS_G2:
67 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
68 return true;
69 case AArch64MCExpr::VK_ABS_G2_S:
70 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
71 return true;
72 case AArch64MCExpr::VK_ABS_G2_NC:
73 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
74 return true;
75 case AArch64MCExpr::VK_ABS_G1_S:
76 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
77 return true;
78 case AArch64MCExpr::VK_ABS_G1_NC:
79 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
80 return true;
81 case AArch64MCExpr::VK_DTPREL_G2:
82 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
83 return true;
84 case AArch64MCExpr::VK_DTPREL_G1_NC:
85 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
86 return true;
87 case AArch64MCExpr::VK_TPREL_G2:
88 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
89 return true;
90 case AArch64MCExpr::VK_TPREL_G1_NC:
91 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
92 return true;
93 case AArch64MCExpr::VK_GOTTPREL_G1:
94 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
95 return true;
96 case AArch64MCExpr::VK_GOTTPREL_G0_NC:
97 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
98 return true;
99 default:
100 return false;
102 return false;
105 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
106 const MCValue &Target,
107 const MCFixup &Fixup,
108 bool IsPCRel) const {
109 AArch64MCExpr::VariantKind RefKind =
110 static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
111 AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
112 bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
114 assert((!Target.getSymA() ||
115 Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
116 "Should only be expression-level modifiers here");
118 assert((!Target.getSymB() ||
119 Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
120 "Should only be expression-level modifiers here");
122 if (IsPCRel) {
123 switch (Fixup.getTargetKind()) {
124 case FK_Data_1:
125 Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
126 return ELF::R_AARCH64_NONE;
127 case FK_Data_2:
128 return R_CLS(PREL16);
129 case FK_Data_4:
130 return R_CLS(PREL32);
131 case FK_Data_8:
132 if (IsILP32) {
133 Ctx.reportError(Fixup.getLoc(),
134 "ILP32 8 byte PC relative data "
135 "relocation not supported (LP64 eqv: PREL64)");
136 return ELF::R_AARCH64_NONE;
137 } else
138 return ELF::R_AARCH64_PREL64;
139 case AArch64::fixup_aarch64_pcrel_adr_imm21:
140 if (SymLoc != AArch64MCExpr::VK_ABS)
141 Ctx.reportError(Fixup.getLoc(),
142 "invalid symbol kind for ADR relocation");
143 return R_CLS(ADR_PREL_LO21);
144 case AArch64::fixup_aarch64_pcrel_adrp_imm21:
145 if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
146 return R_CLS(ADR_PREL_PG_HI21);
147 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
148 if (IsILP32) {
149 Ctx.reportError(Fixup.getLoc(),
150 "invalid fixup for 32-bit pcrel ADRP instruction "
151 "VK_ABS VK_NC");
152 return ELF::R_AARCH64_NONE;
153 } else {
154 return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
157 if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
158 return R_CLS(ADR_GOT_PAGE);
159 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
160 return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
161 if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
162 return R_CLS(TLSDESC_ADR_PAGE21);
163 Ctx.reportError(Fixup.getLoc(),
164 "invalid symbol kind for ADRP relocation");
165 return ELF::R_AARCH64_NONE;
166 case AArch64::fixup_aarch64_pcrel_branch26:
167 return R_CLS(JUMP26);
168 case AArch64::fixup_aarch64_pcrel_call26:
169 return R_CLS(CALL26);
170 case AArch64::fixup_aarch64_ldr_pcrel_imm19:
171 if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
172 return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
173 if (SymLoc == AArch64MCExpr::VK_GOT)
174 return R_CLS(GOT_LD_PREL19);
175 return R_CLS(LD_PREL_LO19);
176 case AArch64::fixup_aarch64_pcrel_branch14:
177 return R_CLS(TSTBR14);
178 case AArch64::fixup_aarch64_pcrel_branch19:
179 return R_CLS(CONDBR19);
180 default:
181 Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
182 return ELF::R_AARCH64_NONE;
184 } else {
185 if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
186 return ELF::R_AARCH64_NONE;
187 switch (Fixup.getTargetKind()) {
188 case FK_NONE:
189 return ELF::R_AARCH64_NONE;
190 case FK_Data_1:
191 Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
192 return ELF::R_AARCH64_NONE;
193 case FK_Data_2:
194 return R_CLS(ABS16);
195 case FK_Data_4:
196 return R_CLS(ABS32);
197 case FK_Data_8:
198 if (IsILP32) {
199 Ctx.reportError(Fixup.getLoc(),
200 "ILP32 8 byte absolute data "
201 "relocation not supported (LP64 eqv: ABS64)");
202 return ELF::R_AARCH64_NONE;
203 } else
204 return ELF::R_AARCH64_ABS64;
205 case AArch64::fixup_aarch64_add_imm12:
206 if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
207 return R_CLS(TLSLD_ADD_DTPREL_HI12);
208 if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
209 return R_CLS(TLSLE_ADD_TPREL_HI12);
210 if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
211 return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
212 if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
213 return R_CLS(TLSLD_ADD_DTPREL_LO12);
214 if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
215 return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
216 if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
217 return R_CLS(TLSLE_ADD_TPREL_LO12);
218 if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
219 return R_CLS(TLSDESC_ADD_LO12);
220 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
221 return R_CLS(ADD_ABS_LO12_NC);
223 Ctx.reportError(Fixup.getLoc(),
224 "invalid fixup for add (uimm12) instruction");
225 return ELF::R_AARCH64_NONE;
226 case AArch64::fixup_aarch64_ldst_imm12_scale1:
227 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
228 return R_CLS(LDST8_ABS_LO12_NC);
229 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
230 return R_CLS(TLSLD_LDST8_DTPREL_LO12);
231 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
232 return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
233 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
234 return R_CLS(TLSLE_LDST8_TPREL_LO12);
235 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
236 return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
238 Ctx.reportError(Fixup.getLoc(),
239 "invalid fixup for 8-bit load/store instruction");
240 return ELF::R_AARCH64_NONE;
241 case AArch64::fixup_aarch64_ldst_imm12_scale2:
242 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
243 return R_CLS(LDST16_ABS_LO12_NC);
244 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
245 return R_CLS(TLSLD_LDST16_DTPREL_LO12);
246 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
247 return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
248 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
249 return R_CLS(TLSLE_LDST16_TPREL_LO12);
250 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
251 return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
253 Ctx.reportError(Fixup.getLoc(),
254 "invalid fixup for 16-bit load/store instruction");
255 return ELF::R_AARCH64_NONE;
256 case AArch64::fixup_aarch64_ldst_imm12_scale4:
257 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
258 return R_CLS(LDST32_ABS_LO12_NC);
259 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
260 return R_CLS(TLSLD_LDST32_DTPREL_LO12);
261 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
262 return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
263 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
264 return R_CLS(TLSLE_LDST32_TPREL_LO12);
265 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
266 return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
267 if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
268 if (IsILP32) {
269 return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
270 } else {
271 Ctx.reportError(Fixup.getLoc(),
272 "LP64 4 byte unchecked GOT load/store relocation "
273 "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
274 return ELF::R_AARCH64_NONE;
277 if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
278 if (IsILP32) {
279 Ctx.reportError(Fixup.getLoc(),
280 "ILP32 4 byte checked GOT load/store relocation "
281 "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
282 } else {
283 Ctx.reportError(Fixup.getLoc(),
284 "LP64 4 byte checked GOT load/store relocation "
285 "not supported (unchecked/ILP32 eqv: "
286 "LD32_GOT_LO12_NC)");
288 return ELF::R_AARCH64_NONE;
290 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
291 if (IsILP32) {
292 return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
293 } else {
294 Ctx.reportError(Fixup.getLoc(),
295 "LP64 32-bit load/store "
296 "relocation not supported (ILP32 eqv: "
297 "TLSIE_LD32_GOTTPREL_LO12_NC)");
298 return ELF::R_AARCH64_NONE;
301 if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
302 if (IsILP32) {
303 return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
304 } else {
305 Ctx.reportError(Fixup.getLoc(),
306 "LP64 4 byte TLSDESC load/store relocation "
307 "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
308 return ELF::R_AARCH64_NONE;
312 Ctx.reportError(Fixup.getLoc(),
313 "invalid fixup for 32-bit load/store instruction "
314 "fixup_aarch64_ldst_imm12_scale4");
315 return ELF::R_AARCH64_NONE;
316 case AArch64::fixup_aarch64_ldst_imm12_scale8:
317 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
318 return R_CLS(LDST64_ABS_LO12_NC);
319 if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
320 if (!IsILP32) {
321 return ELF::R_AARCH64_LD64_GOT_LO12_NC;
322 } else {
323 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
324 "relocation not supported (LP64 eqv: "
325 "LD64_GOT_LO12_NC)");
326 return ELF::R_AARCH64_NONE;
329 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
330 return R_CLS(TLSLD_LDST64_DTPREL_LO12);
331 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
332 return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
333 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
334 return R_CLS(TLSLE_LDST64_TPREL_LO12);
335 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
336 return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
337 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
338 if (!IsILP32) {
339 return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
340 } else {
341 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
342 "relocation not supported (LP64 eqv: "
343 "TLSIE_LD64_GOTTPREL_LO12_NC)");
344 return ELF::R_AARCH64_NONE;
347 if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
348 if (!IsILP32) {
349 return ELF::R_AARCH64_TLSDESC_LD64_LO12;
350 } else {
351 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
352 "relocation not supported (LP64 eqv: "
353 "TLSDESC_LD64_LO12)");
354 return ELF::R_AARCH64_NONE;
357 Ctx.reportError(Fixup.getLoc(),
358 "invalid fixup for 64-bit load/store instruction");
359 return ELF::R_AARCH64_NONE;
360 case AArch64::fixup_aarch64_ldst_imm12_scale16:
361 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
362 return R_CLS(LDST128_ABS_LO12_NC);
363 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
364 return R_CLS(TLSLD_LDST128_DTPREL_LO12);
365 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
366 return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
367 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
368 return R_CLS(TLSLE_LDST128_TPREL_LO12);
369 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
370 return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);
372 Ctx.reportError(Fixup.getLoc(),
373 "invalid fixup for 128-bit load/store instruction");
374 return ELF::R_AARCH64_NONE;
375 // ILP32 case not reached here, tested with isNonILP32reloc
376 case AArch64::fixup_aarch64_movw:
377 if (RefKind == AArch64MCExpr::VK_ABS_G3)
378 return ELF::R_AARCH64_MOVW_UABS_G3;
379 if (RefKind == AArch64MCExpr::VK_ABS_G2)
380 return ELF::R_AARCH64_MOVW_UABS_G2;
381 if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
382 return ELF::R_AARCH64_MOVW_SABS_G2;
383 if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
384 return ELF::R_AARCH64_MOVW_UABS_G2_NC;
385 if (RefKind == AArch64MCExpr::VK_ABS_G1)
386 return R_CLS(MOVW_UABS_G1);
387 if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
388 return ELF::R_AARCH64_MOVW_SABS_G1;
389 if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
390 return ELF::R_AARCH64_MOVW_UABS_G1_NC;
391 if (RefKind == AArch64MCExpr::VK_ABS_G0)
392 return R_CLS(MOVW_UABS_G0);
393 if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
394 return R_CLS(MOVW_SABS_G0);
395 if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
396 return R_CLS(MOVW_UABS_G0_NC);
397 if (RefKind == AArch64MCExpr::VK_PREL_G3)
398 return ELF::R_AARCH64_MOVW_PREL_G3;
399 if (RefKind == AArch64MCExpr::VK_PREL_G2)
400 return ELF::R_AARCH64_MOVW_PREL_G2;
401 if (RefKind == AArch64MCExpr::VK_PREL_G2_NC)
402 return ELF::R_AARCH64_MOVW_PREL_G2_NC;
403 if (RefKind == AArch64MCExpr::VK_PREL_G1)
404 return R_CLS(MOVW_PREL_G1);
405 if (RefKind == AArch64MCExpr::VK_PREL_G1_NC)
406 return ELF::R_AARCH64_MOVW_PREL_G1_NC;
407 if (RefKind == AArch64MCExpr::VK_PREL_G0)
408 return R_CLS(MOVW_PREL_G0);
409 if (RefKind == AArch64MCExpr::VK_PREL_G0_NC)
410 return R_CLS(MOVW_PREL_G0_NC);
411 if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
412 return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
413 if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
414 return R_CLS(TLSLD_MOVW_DTPREL_G1);
415 if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
416 return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
417 if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
418 return R_CLS(TLSLD_MOVW_DTPREL_G0);
419 if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
420 return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
421 if (RefKind == AArch64MCExpr::VK_TPREL_G2)
422 return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
423 if (RefKind == AArch64MCExpr::VK_TPREL_G1)
424 return R_CLS(TLSLE_MOVW_TPREL_G1);
425 if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
426 return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
427 if (RefKind == AArch64MCExpr::VK_TPREL_G0)
428 return R_CLS(TLSLE_MOVW_TPREL_G0);
429 if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
430 return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
431 if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
432 return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
433 if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
434 return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
435 Ctx.reportError(Fixup.getLoc(),
436 "invalid fixup for movz/movk instruction");
437 return ELF::R_AARCH64_NONE;
438 case AArch64::fixup_aarch64_tlsdesc_call:
439 return R_CLS(TLSDESC_CALL);
440 default:
441 Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
442 return ELF::R_AARCH64_NONE;
446 llvm_unreachable("Unimplemented fixup -> relocation");
449 std::unique_ptr<MCObjectTargetWriter>
450 llvm::createAArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) {
451 return std::make_unique<AArch64ELFObjectWriter>(OSABI, IsILP32);