1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
14 #include "elf_traits.h"
18 namespace relocation_packer
{
20 // Stub identifier written to 'null out' packed data, "NULL".
21 static const uint32_t kStubIdentifier
= 0x4c4c554eu
;
23 // Out-of-band dynamic tags used to indicate the offset and size of the
24 // android packed relocations section.
25 static const ELF::Sword DT_ANDROID_REL_OFFSET
= DT_LOOS
;
26 static const ELF::Sword DT_ANDROID_REL_SIZE
= DT_LOOS
+ 1;
28 // Alignment to preserve, in bytes. This must be at least as large as the
29 // largest d_align and sh_addralign values found in the loaded file.
30 // Out of caution for RELRO page alignment, we preserve to a complete target
31 // page. See http://www.airs.com/blog/archives/189.
32 static const size_t kPreserveAlignment
= 4096;
36 // Get section data. Checks that the section has exactly one data entry,
37 // so that the section size and the data size are the same. True in
38 // practice for all sections we resize when packing or unpacking. Done
39 // by ensuring that a call to elf_getdata(section, data) returns NULL as
40 // the next data entry.
41 Elf_Data
* GetSectionData(Elf_Scn
* section
) {
42 Elf_Data
* data
= elf_getdata(section
, NULL
);
43 CHECK(data
&& elf_getdata(section
, data
) == NULL
);
47 // Rewrite section data. Allocates new data and makes it the data element's
48 // buffer. Relies on program exit to free allocated data.
49 void RewriteSectionData(Elf_Data
* data
,
50 const void* section_data
,
52 CHECK(size
== data
->d_size
);
53 uint8_t* area
= new uint8_t[size
];
54 memcpy(area
, section_data
, size
);
58 // Verbose ELF header logging.
59 void VerboseLogElfHeader(const ELF::Ehdr
* elf_header
) {
60 VLOG(1) << "e_phoff = " << elf_header
->e_phoff
;
61 VLOG(1) << "e_shoff = " << elf_header
->e_shoff
;
62 VLOG(1) << "e_ehsize = " << elf_header
->e_ehsize
;
63 VLOG(1) << "e_phentsize = " << elf_header
->e_phentsize
;
64 VLOG(1) << "e_phnum = " << elf_header
->e_phnum
;
65 VLOG(1) << "e_shnum = " << elf_header
->e_shnum
;
66 VLOG(1) << "e_shstrndx = " << elf_header
->e_shstrndx
;
69 // Verbose ELF program header logging.
70 void VerboseLogProgramHeader(size_t program_header_index
,
71 const ELF::Phdr
* program_header
) {
73 switch (program_header
->p_type
) {
74 case PT_NULL
: type
= "NULL"; break;
75 case PT_LOAD
: type
= "LOAD"; break;
76 case PT_DYNAMIC
: type
= "DYNAMIC"; break;
77 case PT_INTERP
: type
= "INTERP"; break;
78 case PT_NOTE
: type
= "NOTE"; break;
79 case PT_SHLIB
: type
= "SHLIB"; break;
80 case PT_PHDR
: type
= "PHDR"; break;
81 case PT_TLS
: type
= "TLS"; break;
82 default: type
= "(OTHER)"; break;
84 VLOG(1) << "phdr " << program_header_index
<< " : " << type
;
85 VLOG(1) << " p_offset = " << program_header
->p_offset
;
86 VLOG(1) << " p_vaddr = " << program_header
->p_vaddr
;
87 VLOG(1) << " p_paddr = " << program_header
->p_paddr
;
88 VLOG(1) << " p_filesz = " << program_header
->p_filesz
;
89 VLOG(1) << " p_memsz = " << program_header
->p_memsz
;
92 // Verbose ELF section header logging.
93 void VerboseLogSectionHeader(const std::string
& section_name
,
94 const ELF::Shdr
* section_header
) {
95 VLOG(1) << "section " << section_name
;
96 VLOG(1) << " sh_addr = " << section_header
->sh_addr
;
97 VLOG(1) << " sh_offset = " << section_header
->sh_offset
;
98 VLOG(1) << " sh_size = " << section_header
->sh_size
;
99 VLOG(1) << " sh_addralign = " << section_header
->sh_addralign
;
102 // Verbose ELF section data logging.
103 void VerboseLogSectionData(const Elf_Data
* data
) {
105 VLOG(1) << " d_buf = " << data
->d_buf
;
106 VLOG(1) << " d_off = " << data
->d_off
;
107 VLOG(1) << " d_size = " << data
->d_size
;
108 VLOG(1) << " d_align = " << data
->d_align
;
113 // Load the complete ELF file into a memory image in libelf, and identify
114 // the .rel.dyn or .rela.dyn, .dynamic, and .android.rel.dyn or
115 // .android.rela.dyn sections. No-op if the ELF file has already been loaded.
116 bool ElfFile::Load() {
120 Elf
* elf
= elf_begin(fd_
, ELF_C_RDWR
, NULL
);
123 if (elf_kind(elf
) != ELF_K_ELF
) {
124 LOG(ERROR
) << "File not in ELF format";
128 ELF::Ehdr
* elf_header
= ELF::getehdr(elf
);
130 LOG(ERROR
) << "Failed to load ELF header: " << elf_errmsg(elf_errno());
133 if (elf_header
->e_machine
!= ELF::kMachine
) {
134 LOG(ERROR
) << "ELF file architecture is not " << ELF::Machine();
137 if (elf_header
->e_type
!= ET_DYN
) {
138 LOG(ERROR
) << "ELF file is not a shared object";
142 // Require that our endianness matches that of the target, and that both
143 // are little-endian. Safe for all current build/target combinations.
144 const int endian
= elf_header
->e_ident
[EI_DATA
];
145 CHECK(endian
== ELFDATA2LSB
);
146 CHECK(__BYTE_ORDER__
== __ORDER_LITTLE_ENDIAN__
);
148 // Also require that the file class is as expected.
149 const int file_class
= elf_header
->e_ident
[EI_CLASS
];
150 CHECK(file_class
== ELF::kFileClass
);
152 VLOG(1) << "endian = " << endian
<< ", file class = " << file_class
;
153 VerboseLogElfHeader(elf_header
);
155 const ELF::Phdr
* elf_program_header
= ELF::getphdr(elf
);
156 CHECK(elf_program_header
);
158 const ELF::Phdr
* dynamic_program_header
= NULL
;
159 for (size_t i
= 0; i
< elf_header
->e_phnum
; ++i
) {
160 const ELF::Phdr
* program_header
= &elf_program_header
[i
];
161 VerboseLogProgramHeader(i
, program_header
);
163 if (program_header
->p_type
== PT_DYNAMIC
) {
164 CHECK(dynamic_program_header
== NULL
);
165 dynamic_program_header
= program_header
;
168 CHECK(dynamic_program_header
!= NULL
);
171 elf_getshdrstrndx(elf
, &string_index
);
173 // Notes of the dynamic relocations, packed relocations, and .dynamic
174 // sections. Found while iterating sections, and later stored in class
176 Elf_Scn
* found_relocations_section
= NULL
;
177 Elf_Scn
* found_android_relocations_section
= NULL
;
178 Elf_Scn
* found_dynamic_section
= NULL
;
180 // Notes of relocation section types seen. We require one or the other of
181 // these; both is unsupported.
182 bool has_rel_relocations
= false;
183 bool has_rela_relocations
= false;
185 // Flag set if we encounter any .debug* section. We do not adjust any
186 // offsets or addresses of any debug data, so if we find one of these then
187 // the resulting output shared object should still run, but might not be
188 // usable for debugging, disassembly, and so on. Provides a warning if
190 bool has_debug_section
= false;
192 Elf_Scn
* section
= NULL
;
193 while ((section
= elf_nextscn(elf
, section
)) != NULL
) {
194 const ELF::Shdr
* section_header
= ELF::getshdr(section
);
195 std::string name
= elf_strptr(elf
, string_index
, section_header
->sh_name
);
196 VerboseLogSectionHeader(name
, section_header
);
198 // Note relocation section types.
199 if (section_header
->sh_type
== SHT_REL
) {
200 has_rel_relocations
= true;
202 if (section_header
->sh_type
== SHT_RELA
) {
203 has_rela_relocations
= true;
206 // Note special sections as we encounter them.
207 if ((name
== ".rel.dyn" || name
== ".rela.dyn") &&
208 section_header
->sh_size
> 0) {
209 found_relocations_section
= section
;
211 if ((name
== ".android.rel.dyn" || name
== ".android.rela.dyn") &&
212 section_header
->sh_size
> 0) {
213 found_android_relocations_section
= section
;
215 if (section_header
->sh_offset
== dynamic_program_header
->p_offset
) {
216 found_dynamic_section
= section
;
219 // If we find a section named .debug*, set the debug warning flag.
220 if (std::string(name
).find(".debug") == 0) {
221 has_debug_section
= true;
224 // Ensure we preserve alignment, repeated later for the data block(s).
225 CHECK(section_header
->sh_addralign
<= kPreserveAlignment
);
227 Elf_Data
* data
= NULL
;
228 while ((data
= elf_getdata(section
, data
)) != NULL
) {
229 CHECK(data
->d_align
<= kPreserveAlignment
);
230 VerboseLogSectionData(data
);
234 // Loading failed if we did not find the required special sections.
235 if (!found_relocations_section
) {
236 LOG(ERROR
) << "Missing or empty .rel.dyn or .rela.dyn section";
239 if (!found_android_relocations_section
) {
240 LOG(ERROR
) << "Missing or empty .android.rel.dyn or .android.rela.dyn "
241 << "section (to fix, run with --help and follow the "
242 << "pre-packing instructions)";
245 if (!found_dynamic_section
) {
246 LOG(ERROR
) << "Missing .dynamic section";
250 // Loading failed if we could not identify the relocations type.
251 if (!has_rel_relocations
&& !has_rela_relocations
) {
252 LOG(ERROR
) << "No relocations sections found";
255 if (has_rel_relocations
&& has_rela_relocations
) {
256 LOG(ERROR
) << "Multiple relocations sections with different types found, "
257 << "not currently supported";
261 if (has_debug_section
) {
262 LOG(WARNING
) << "Found .debug section(s), and ignored them";
266 relocations_section_
= found_relocations_section
;
267 dynamic_section_
= found_dynamic_section
;
268 android_relocations_section_
= found_android_relocations_section
;
269 relocations_type_
= has_rel_relocations
? REL
: RELA
;
275 // Helper for ResizeSection(). Adjust the main ELF header for the hole.
276 void AdjustElfHeaderForHole(ELF::Ehdr
* elf_header
,
279 if (elf_header
->e_phoff
> hole_start
) {
280 elf_header
->e_phoff
+= hole_size
;
281 VLOG(1) << "e_phoff adjusted to " << elf_header
->e_phoff
;
283 if (elf_header
->e_shoff
> hole_start
) {
284 elf_header
->e_shoff
+= hole_size
;
285 VLOG(1) << "e_shoff adjusted to " << elf_header
->e_shoff
;
289 // Helper for ResizeSection(). Adjust all program headers for the hole.
290 void AdjustProgramHeadersForHole(ELF::Phdr
* elf_program_header
,
291 size_t program_header_count
,
294 for (size_t i
= 0; i
< program_header_count
; ++i
) {
295 ELF::Phdr
* program_header
= &elf_program_header
[i
];
297 if (program_header
->p_offset
> hole_start
) {
298 // The hole start is past this segment, so adjust offsets and addrs.
299 program_header
->p_offset
+= hole_size
;
300 VLOG(1) << "phdr " << i
301 << " p_offset adjusted to "<< program_header
->p_offset
;
303 // Only adjust vaddr and paddr if this program header has them.
304 if (program_header
->p_vaddr
!= 0) {
305 program_header
->p_vaddr
+= hole_size
;
306 VLOG(1) << "phdr " << i
307 << " p_vaddr adjusted to " << program_header
->p_vaddr
;
309 if (program_header
->p_paddr
!= 0) {
310 program_header
->p_paddr
+= hole_size
;
311 VLOG(1) << "phdr " << i
312 << " p_paddr adjusted to " << program_header
->p_paddr
;
314 } else if (program_header
->p_offset
+
315 program_header
->p_filesz
> hole_start
) {
316 // The hole start is within this segment, so adjust file and in-memory
317 // sizes, but leave offsets and addrs unchanged.
318 program_header
->p_filesz
+= hole_size
;
319 VLOG(1) << "phdr " << i
320 << " p_filesz adjusted to " << program_header
->p_filesz
;
321 program_header
->p_memsz
+= hole_size
;
322 VLOG(1) << "phdr " << i
323 << " p_memsz adjusted to " << program_header
->p_memsz
;
328 // Helper for ResizeSection(). Adjust all section headers for the hole.
329 void AdjustSectionHeadersForHole(Elf
* elf
,
333 elf_getshdrstrndx(elf
, &string_index
);
335 Elf_Scn
* section
= NULL
;
336 while ((section
= elf_nextscn(elf
, section
)) != NULL
) {
337 ELF::Shdr
* section_header
= ELF::getshdr(section
);
338 std::string name
= elf_strptr(elf
, string_index
, section_header
->sh_name
);
340 if (section_header
->sh_offset
> hole_start
) {
341 section_header
->sh_offset
+= hole_size
;
342 VLOG(1) << "section " << name
343 << " sh_offset adjusted to " << section_header
->sh_offset
;
344 // Only adjust section addr if this section has one.
345 if (section_header
->sh_addr
!= 0) {
346 section_header
->sh_addr
+= hole_size
;
347 VLOG(1) << "section " << name
348 << " sh_addr adjusted to " << section_header
->sh_addr
;
354 // Helper for ResizeSection(). Adjust the .dynamic section for the hole.
355 template <typename Rel
>
356 void AdjustDynamicSectionForHole(Elf_Scn
* dynamic_section
,
357 bool is_relocations_resize
,
360 Elf_Data
* data
= GetSectionData(dynamic_section
);
362 const ELF::Dyn
* dynamic_base
= reinterpret_cast<ELF::Dyn
*>(data
->d_buf
);
363 std::vector
<ELF::Dyn
> dynamics(
365 dynamic_base
+ data
->d_size
/ sizeof(dynamics
[0]));
367 for (size_t i
= 0; i
< dynamics
.size(); ++i
) {
368 ELF::Dyn
* dynamic
= &dynamics
[i
];
369 const ELF::Sword tag
= dynamic
->d_tag
;
370 // Any tags that hold offsets are adjustment candidates.
371 const bool is_adjustable
= (tag
== DT_PLTGOT
||
380 tag
== DT_INIT_ARRAY
||
381 tag
== DT_FINI_ARRAY
||
382 tag
== DT_ANDROID_REL_OFFSET
);
383 if (is_adjustable
&& dynamic
->d_un
.d_ptr
> hole_start
) {
384 dynamic
->d_un
.d_ptr
+= hole_size
;
385 VLOG(1) << "dynamic[" << i
<< "] " << dynamic
->d_tag
386 << " d_ptr adjusted to " << dynamic
->d_un
.d_ptr
;
389 // If we are specifically resizing dynamic relocations, we need to make
390 // some added adjustments to tags that indicate the counts of relative
391 // relocations in the shared object.
392 if (!is_relocations_resize
)
395 // DT_RELSZ is the overall size of relocations. Adjust by hole size.
396 if (tag
== DT_RELSZ
) {
397 dynamic
->d_un
.d_val
+= hole_size
;
398 VLOG(1) << "dynamic[" << i
<< "] " << dynamic
->d_tag
399 << " d_val adjusted to " << dynamic
->d_un
.d_val
;
402 // DT_RELCOUNT is the count of relative relocations. Packing reduces it
403 // to the alignment padding, if any; unpacking restores it to its former
404 // value. The crazy linker does not use it, but we update it anyway.
405 if (tag
== DT_RELCOUNT
) {
406 // Cast sizeof to a signed type to avoid the division result being
407 // promoted into an unsigned size_t.
408 const ssize_t sizeof_rel
= static_cast<ssize_t
>(sizeof(Rel
));
409 dynamic
->d_un
.d_val
+= hole_size
/ sizeof_rel
;
410 VLOG(1) << "dynamic[" << i
<< "] " << dynamic
->d_tag
411 << " d_val adjusted to " << dynamic
->d_un
.d_val
;
414 // DT_RELENT doesn't change, but make sure it is what we expect.
415 if (tag
== DT_RELENT
) {
416 CHECK(dynamic
->d_un
.d_val
== sizeof(Rel
));
420 void* section_data
= &dynamics
[0];
421 size_t bytes
= dynamics
.size() * sizeof(dynamics
[0]);
422 RewriteSectionData(data
, section_data
, bytes
);
425 // Helper for ResizeSection(). Adjust the .dynsym section for the hole.
426 // We need to adjust the values for the symbols represented in it.
427 void AdjustDynSymSectionForHole(Elf_Scn
* dynsym_section
,
430 Elf_Data
* data
= GetSectionData(dynsym_section
);
432 const ELF::Sym
* dynsym_base
= reinterpret_cast<ELF::Sym
*>(data
->d_buf
);
433 std::vector
<ELF::Sym
> dynsyms
435 dynsym_base
+ data
->d_size
/ sizeof(dynsyms
[0]));
437 for (size_t i
= 0; i
< dynsyms
.size(); ++i
) {
438 ELF::Sym
* dynsym
= &dynsyms
[i
];
439 const int type
= static_cast<int>(ELF_ST_TYPE(dynsym
->st_info
));
440 const bool is_adjustable
= (type
== STT_OBJECT
||
442 type
== STT_SECTION
||
444 type
== STT_COMMON
||
446 if (is_adjustable
&& dynsym
->st_value
> hole_start
) {
447 dynsym
->st_value
+= hole_size
;
448 VLOG(1) << "dynsym[" << i
<< "] type=" << type
449 << " st_value adjusted to " << dynsym
->st_value
;
453 void* section_data
= &dynsyms
[0];
454 size_t bytes
= dynsyms
.size() * sizeof(dynsyms
[0]);
455 RewriteSectionData(data
, section_data
, bytes
);
458 // Helper for ResizeSection(). Adjust the plt relocations section for the
459 // hole. We need to adjust the offset of every relocation inside it that
460 // falls beyond the hole start.
461 template <typename Rel
>
462 void AdjustRelPltSectionForHole(Elf_Scn
* relplt_section
,
465 Elf_Data
* data
= GetSectionData(relplt_section
);
467 const Rel
* relplt_base
= reinterpret_cast<Rel
*>(data
->d_buf
);
468 std::vector
<Rel
> relplts(
470 relplt_base
+ data
->d_size
/ sizeof(relplts
[0]));
472 for (size_t i
= 0; i
< relplts
.size(); ++i
) {
473 Rel
* relplt
= &relplts
[i
];
474 if (relplt
->r_offset
> hole_start
) {
475 relplt
->r_offset
+= hole_size
;
476 VLOG(1) << "relplt[" << i
477 << "] r_offset adjusted to " << relplt
->r_offset
;
481 void* section_data
= &relplts
[0];
482 size_t bytes
= relplts
.size() * sizeof(relplts
[0]);
483 RewriteSectionData(data
, section_data
, bytes
);
486 // Helper for ResizeSection(). Adjust the .symtab section for the hole.
487 // We want to adjust the value of every symbol in it that falls beyond
489 void AdjustSymTabSectionForHole(Elf_Scn
* symtab_section
,
492 Elf_Data
* data
= GetSectionData(symtab_section
);
494 const ELF::Sym
* symtab_base
= reinterpret_cast<ELF::Sym
*>(data
->d_buf
);
495 std::vector
<ELF::Sym
> symtab(
497 symtab_base
+ data
->d_size
/ sizeof(symtab
[0]));
499 for (size_t i
= 0; i
< symtab
.size(); ++i
) {
500 ELF::Sym
* sym
= &symtab
[i
];
501 if (sym
->st_value
> hole_start
) {
502 sym
->st_value
+= hole_size
;
503 VLOG(1) << "symtab[" << i
<< "] value adjusted to " << sym
->st_value
;
507 void* section_data
= &symtab
[0];
508 size_t bytes
= symtab
.size() * sizeof(symtab
[0]);
509 RewriteSectionData(data
, section_data
, bytes
);
512 // Resize a section. If the new size is larger than the current size, open
513 // up a hole by increasing file offsets that come after the hole. If smaller
514 // than the current size, remove the hole by decreasing those offsets.
515 template <typename Rel
>
516 void ResizeSection(Elf
* elf
, Elf_Scn
* section
, size_t new_size
) {
517 ELF::Shdr
* section_header
= ELF::getshdr(section
);
518 if (section_header
->sh_size
== new_size
)
521 // Note if we are resizing the real dyn relocations. If yes, then we have
522 // to massage d_un.d_val in the dynamic section where d_tag is DT_RELSZ and
525 elf_getshdrstrndx(elf
, &string_index
);
526 const std::string section_name
=
527 elf_strptr(elf
, string_index
, section_header
->sh_name
);
528 const bool is_relocations_resize
=
529 (section_name
== ".rel.dyn" || section_name
== ".rela.dyn");
531 // Require that the section size and the data size are the same. True
532 // in practice for all sections we resize when packing or unpacking.
533 Elf_Data
* data
= GetSectionData(section
);
534 CHECK(data
->d_off
== 0 && data
->d_size
== section_header
->sh_size
);
536 // Require that the section is not zero-length (that is, has allocated
537 // data that we can validly expand).
538 CHECK(data
->d_size
&& data
->d_buf
);
540 const ELF::Off hole_start
= section_header
->sh_offset
;
541 const ssize_t hole_size
= new_size
- data
->d_size
;
543 VLOG_IF(1, (hole_size
> 0)) << "expand section size = " << data
->d_size
;
544 VLOG_IF(1, (hole_size
< 0)) << "shrink section size = " << data
->d_size
;
546 // Resize the data and the section header.
547 data
->d_size
+= hole_size
;
548 section_header
->sh_size
+= hole_size
;
550 ELF::Ehdr
* elf_header
= ELF::getehdr(elf
);
551 ELF::Phdr
* elf_program_header
= ELF::getphdr(elf
);
553 // Add the hole size to all offsets in the ELF file that are after the
554 // start of the hole. If the hole size is positive we are expanding the
555 // section to create a new hole; if negative, we are closing up a hole.
557 // Start with the main ELF header.
558 AdjustElfHeaderForHole(elf_header
, hole_start
, hole_size
);
560 // Adjust all program headers.
561 AdjustProgramHeadersForHole(elf_program_header
,
566 // Adjust all section headers.
567 AdjustSectionHeadersForHole(elf
, hole_start
, hole_size
);
569 // We use the dynamic program header entry to locate the dynamic section.
570 const ELF::Phdr
* dynamic_program_header
= NULL
;
572 // Find the dynamic program header entry.
573 for (size_t i
= 0; i
< elf_header
->e_phnum
; ++i
) {
574 ELF::Phdr
* program_header
= &elf_program_header
[i
];
576 if (program_header
->p_type
== PT_DYNAMIC
) {
577 dynamic_program_header
= program_header
;
580 CHECK(dynamic_program_header
);
582 // Sections requiring special attention, and the packed android
583 // relocations offset.
584 Elf_Scn
* dynamic_section
= NULL
;
585 Elf_Scn
* dynsym_section
= NULL
;
586 Elf_Scn
* plt_relocations_section
= NULL
;
587 Elf_Scn
* symtab_section
= NULL
;
588 ELF::Off android_relocations_offset
= 0;
590 // Find these sections, and the packed android relocations offset.
592 while ((section
= elf_nextscn(elf
, section
)) != NULL
) {
593 ELF::Shdr
* section_header
= ELF::getshdr(section
);
594 std::string name
= elf_strptr(elf
, string_index
, section_header
->sh_name
);
596 if (section_header
->sh_offset
== dynamic_program_header
->p_offset
) {
597 dynamic_section
= section
;
599 if (name
== ".dynsym") {
600 dynsym_section
= section
;
602 if (name
== ".rel.plt" || name
== ".rela.plt") {
603 plt_relocations_section
= section
;
605 if (name
== ".symtab") {
606 symtab_section
= section
;
609 // Note packed android relocations offset.
610 if (name
== ".android.rel.dyn" || name
== ".android.rela.dyn") {
611 android_relocations_offset
= section_header
->sh_offset
;
614 CHECK(dynamic_section
!= NULL
);
615 CHECK(dynsym_section
!= NULL
);
616 CHECK(plt_relocations_section
!= NULL
);
617 CHECK(android_relocations_offset
!= 0);
619 // Adjust the .dynamic section for the hole. Because we have to edit the
620 // current contents of .dynamic we disallow resizing it.
621 CHECK(section
!= dynamic_section
);
622 AdjustDynamicSectionForHole
<Rel
>(dynamic_section
,
623 is_relocations_resize
,
627 // Adjust the .dynsym section for the hole.
628 AdjustDynSymSectionForHole(dynsym_section
, hole_start
, hole_size
);
630 // Adjust the plt relocations section for the hole.
631 AdjustRelPltSectionForHole
<Rel
>(plt_relocations_section
,
635 // If present, adjust the .symtab section for the hole. If the shared
636 // library was stripped then .symtab will be absent.
638 AdjustSymTabSectionForHole(symtab_section
, hole_start
, hole_size
);
641 // Find the first slot in a dynamics array with the given tag. The array
642 // always ends with a free (unused) element, and which we exclude from the
643 // search. Returns dynamics->size() if not found.
644 size_t FindDynamicEntry(ELF::Sword tag
,
645 std::vector
<ELF::Dyn
>* dynamics
) {
646 // Loop until the penultimate entry. We exclude the end sentinel.
647 for (size_t i
= 0; i
< dynamics
->size() - 1; ++i
) {
648 if (dynamics
->at(i
).d_tag
== tag
)
652 // The tag was not found.
653 return dynamics
->size();
656 // Replace the first free (unused) slot in a dynamics vector with the given
657 // value. The vector always ends with a free (unused) element, so the slot
658 // found cannot be the last one in the vector.
659 void AddDynamicEntry(const ELF::Dyn
& dyn
,
660 std::vector
<ELF::Dyn
>* dynamics
) {
661 const size_t slot
= FindDynamicEntry(DT_NULL
, dynamics
);
662 if (slot
== dynamics
->size()) {
663 LOG(FATAL
) << "No spare dynamic array slots found "
664 << "(to fix, increase gold's --spare-dynamic-tags value)";
667 // Replace this entry with the one supplied.
668 dynamics
->at(slot
) = dyn
;
669 VLOG(1) << "dynamic[" << slot
<< "] overwritten with " << dyn
.d_tag
;
672 // Remove the element in the dynamics vector that matches the given tag with
673 // unused slot data. Shuffle the following elements up, and ensure that the
674 // last is the null sentinel.
675 void RemoveDynamicEntry(ELF::Sword tag
,
676 std::vector
<ELF::Dyn
>* dynamics
) {
677 const size_t slot
= FindDynamicEntry(tag
, dynamics
);
678 CHECK(slot
!= dynamics
->size());
680 // Remove this entry by shuffling up everything that follows.
681 for (size_t i
= slot
; i
< dynamics
->size() - 1; ++i
) {
682 dynamics
->at(i
) = dynamics
->at(i
+ 1);
683 VLOG(1) << "dynamic[" << i
684 << "] overwritten with dynamic[" << i
+ 1 << "]";
687 // Ensure that the end sentinel is still present.
688 CHECK(dynamics
->at(dynamics
->size() - 1).d_tag
== DT_NULL
);
691 // Adjust a relocation. For a relocation without addend, we find its target
692 // in the section and adjust that. For a relocation with addend, the target
693 // is the relocation addend, and the section data at the target is zero.
694 template <typename Rel
>
695 void AdjustRelocation(ssize_t index
,
696 ELF::Addr hole_start
,
702 void AdjustRelocation
<ELF::Rel
>(ssize_t index
,
703 ELF::Addr hole_start
,
705 ELF::Rel
* relocation
,
707 // Adjust the target if after the hole start.
708 if (*target
> hole_start
) {
709 *target
+= hole_size
;
710 VLOG(1) << "relocation[" << index
<< "] target adjusted to " << *target
;
715 void AdjustRelocation
<ELF::Rela
>(ssize_t index
,
716 ELF::Addr hole_start
,
718 ELF::Rela
* relocation
,
720 // The relocation's target is the addend. Adjust if after the hole start.
721 if (relocation
->r_addend
> hole_start
) {
722 relocation
->r_addend
+= hole_size
;
723 VLOG(1) << "relocation["
724 << index
<< "] addend adjusted to " << relocation
->r_addend
;
728 // For relative relocations without addends, adjust the file data to which
729 // they refer. For relative relocations with addends, adjust the addends.
730 // This translates data into the area it will occupy after the hole in
731 // the dynamic relocations is added or removed.
732 template <typename Rel
>
733 void AdjustRelocationTargets(Elf
* elf
,
736 std::vector
<Rel
>* relocations
) {
737 Elf_Scn
* section
= NULL
;
738 while ((section
= elf_nextscn(elf
, section
)) != NULL
) {
739 const ELF::Shdr
* section_header
= ELF::getshdr(section
);
741 // Ignore sections that do not appear in a process memory image.
742 if (section_header
->sh_addr
== 0)
745 Elf_Data
* data
= GetSectionData(section
);
747 // Ignore sections with no effective data.
748 if (data
->d_buf
== NULL
)
751 // Identify this section's start and end addresses.
752 const ELF::Addr section_start
= section_header
->sh_addr
;
753 const ELF::Addr section_end
= section_start
+ section_header
->sh_size
;
755 // Create a copy of the section's data.
756 uint8_t* area
= new uint8_t[data
->d_size
];
757 memcpy(area
, data
->d_buf
, data
->d_size
);
759 for (size_t i
= 0; i
< relocations
->size(); ++i
) {
760 Rel
* relocation
= &relocations
->at(i
);
761 CHECK(ELF_R_TYPE(relocation
->r_info
) == ELF::kRelativeRelocationCode
);
763 // See if this relocation points into the current section.
764 if (relocation
->r_offset
>= section_start
&&
765 relocation
->r_offset
< section_end
) {
766 // The relocation's target is what it points to in area.
767 // For relocations without addend, this is what we adjust; for
768 // relocations with addend, we leave this (it will be zero)
769 // and instead adjust the addend.
770 ELF::Addr byte_offset
= relocation
->r_offset
- section_start
;
771 ELF::Off
* target
= reinterpret_cast<ELF::Off
*>(area
+ byte_offset
);
772 AdjustRelocation
<Rel
>(i
, hole_start
, hole_size
, relocation
, target
);
776 // If we altered the data for this section, write it back.
777 if (memcmp(area
, data
->d_buf
, data
->d_size
)) {
778 RewriteSectionData(data
, area
, data
->d_size
);
784 // Pad relocations with a given number of null relocations.
785 template <typename Rel
>
786 void PadRelocations(size_t count
, std::vector
<Rel
>* relocations
);
789 void PadRelocations
<ELF::Rel
>(size_t count
,
790 std::vector
<ELF::Rel
>* relocations
) {
791 ELF::Rel null_relocation
;
792 null_relocation
.r_offset
= 0;
793 null_relocation
.r_info
= ELF_R_INFO(0, ELF::kNoRelocationCode
);
794 std::vector
<ELF::Rel
> padding(count
, null_relocation
);
795 relocations
->insert(relocations
->end(), padding
.begin(), padding
.end());
799 void PadRelocations
<ELF::Rela
>(size_t count
,
800 std::vector
<ELF::Rela
>* relocations
) {
801 ELF::Rela null_relocation
;
802 null_relocation
.r_offset
= 0;
803 null_relocation
.r_info
= ELF_R_INFO(0, ELF::kNoRelocationCode
);
804 null_relocation
.r_addend
= 0;
805 std::vector
<ELF::Rela
> padding(count
, null_relocation
);
806 relocations
->insert(relocations
->end(), padding
.begin(), padding
.end());
809 // Adjust relocations so that the offset that they indicate will be correct
810 // after the hole in the dynamic relocations is added or removed (in effect,
811 // relocate the relocations).
812 template <typename Rel
>
813 void AdjustRelocations(ELF::Off hole_start
,
815 std::vector
<Rel
>* relocations
) {
816 for (size_t i
= 0; i
< relocations
->size(); ++i
) {
817 Rel
* relocation
= &relocations
->at(i
);
818 if (relocation
->r_offset
> hole_start
) {
819 relocation
->r_offset
+= hole_size
;
820 VLOG(1) << "relocation[" << i
821 << "] offset adjusted to " << relocation
->r_offset
;
828 // Remove relative entries from dynamic relocations and write as packed
829 // data into android packed relocations.
830 bool ElfFile::PackRelocations() {
831 // Load the ELF file into libelf.
833 LOG(ERROR
) << "Failed to load as ELF";
837 // Retrieve the current dynamic relocations section data.
838 Elf_Data
* data
= GetSectionData(relocations_section_
);
840 if (relocations_type_
== REL
) {
841 // Convert data to a vector of relocations.
842 const ELF::Rel
* relocations_base
= reinterpret_cast<ELF::Rel
*>(data
->d_buf
);
843 std::vector
<ELF::Rel
> relocations(
845 relocations_base
+ data
->d_size
/ sizeof(relocations
[0]));
847 LOG(INFO
) << "Relocations : REL";
848 return PackTypedRelocations
<ELF::Rel
>(relocations
, data
);
851 if (relocations_type_
== RELA
) {
852 // Convert data to a vector of relocations with addends.
853 const ELF::Rela
* relocations_base
=
854 reinterpret_cast<ELF::Rela
*>(data
->d_buf
);
855 std::vector
<ELF::Rela
> relocations(
857 relocations_base
+ data
->d_size
/ sizeof(relocations
[0]));
859 LOG(INFO
) << "Relocations : RELA";
860 return PackTypedRelocations
<ELF::Rela
>(relocations
, data
);
867 // Helper for PackRelocations(). Rel type is one of ELF::Rel or ELF::Rela.
868 template <typename Rel
>
869 bool ElfFile::PackTypedRelocations(const std::vector
<Rel
>& relocations
,
871 // Filter relocations into those that are relative and others.
872 std::vector
<Rel
> relative_relocations
;
873 std::vector
<Rel
> other_relocations
;
875 for (size_t i
= 0; i
< relocations
.size(); ++i
) {
876 const Rel
& relocation
= relocations
[i
];
877 if (ELF_R_TYPE(relocation
.r_info
) == ELF::kRelativeRelocationCode
) {
878 CHECK(ELF_R_SYM(relocation
.r_info
) == 0);
879 relative_relocations
.push_back(relocation
);
881 other_relocations
.push_back(relocation
);
884 LOG(INFO
) << "Relative : " << relative_relocations
.size() << " entries";
885 LOG(INFO
) << "Other : " << other_relocations
.size() << " entries";
886 LOG(INFO
) << "Total : " << relocations
.size() << " entries";
888 // If no relative relocations then we have nothing packable. Perhaps
889 // the shared object has already been packed?
890 if (relative_relocations
.empty()) {
891 LOG(ERROR
) << "No relative relocations found (already packed?)";
895 // Unless padding, pre-apply relative relocations to account for the
896 // hole, and pre-adjust all relocation offsets accordingly.
897 if (!is_padding_relocations_
) {
898 // Pre-calculate the size of the hole we will close up when we rewrite
899 // dynamic relocations. We have to adjust relocation addresses to
901 ELF::Shdr
* section_header
= ELF::getshdr(relocations_section_
);
902 const ELF::Off hole_start
= section_header
->sh_offset
;
904 relative_relocations
.size() * sizeof(relative_relocations
[0]);
905 const ssize_t unaligned_hole_size
= hole_size
;
907 // Adjust the actual hole size to preserve alignment. We always adjust
908 // by a whole number of NONE-type relocations.
909 while (hole_size
% kPreserveAlignment
)
910 hole_size
-= sizeof(relative_relocations
[0]);
911 LOG(INFO
) << "Compaction : " << hole_size
<< " bytes";
913 // Adjusting for alignment may have removed any packing benefit.
914 if (hole_size
== 0) {
915 LOG(INFO
) << "Too few relative relocations to pack after alignment";
919 // Find the padding needed in other_relocations to preserve alignment.
920 // Ensure that we never completely empty the real relocations section.
921 size_t padding_bytes
= unaligned_hole_size
- hole_size
;
922 if (padding_bytes
== 0 && other_relocations
.size() == 0) {
924 padding_bytes
+= sizeof(relative_relocations
[0]);
925 } while (padding_bytes
% kPreserveAlignment
);
927 CHECK(padding_bytes
% sizeof(other_relocations
[0]) == 0);
928 const size_t padding
= padding_bytes
/ sizeof(other_relocations
[0]);
930 // Padding may have removed any packing benefit.
931 if (padding
>= relative_relocations
.size()) {
932 LOG(INFO
) << "Too few relative relocations to pack after padding";
936 // Add null relocations to other_relocations to preserve alignment.
937 PadRelocations
<Rel
>(padding
, &other_relocations
);
938 LOG(INFO
) << "Alignment pad : " << padding
<< " relocations";
940 // Apply relocations to all relative data to relocate it into the
941 // area it will occupy once the hole in the dynamic relocations is removed.
942 AdjustRelocationTargets
<Rel
>(
943 elf_
, hole_start
, -hole_size
, &relative_relocations
);
944 // Relocate the relocations.
945 AdjustRelocations
<Rel
>(hole_start
, -hole_size
, &relative_relocations
);
946 AdjustRelocations
<Rel
>(hole_start
, -hole_size
, &other_relocations
);
948 // If padding, add NONE-type relocations to other_relocations to make it
949 // the same size as the the original relocations we read in. This makes
950 // the ResizeSection() below a no-op.
951 const size_t padding
= relocations
.size() - other_relocations
.size();
952 PadRelocations
<Rel
>(padding
, &other_relocations
);
955 // Pack relative relocations.
956 const size_t initial_bytes
=
957 relative_relocations
.size() * sizeof(relative_relocations
[0]);
958 LOG(INFO
) << "Unpacked relative: " << initial_bytes
<< " bytes";
959 std::vector
<uint8_t> packed
;
960 RelocationPacker packer
;
961 packer
.PackRelativeRelocations(relative_relocations
, &packed
);
962 const void* packed_data
= &packed
[0];
963 const size_t packed_bytes
= packed
.size() * sizeof(packed
[0]);
964 LOG(INFO
) << "Packed relative: " << packed_bytes
<< " bytes";
966 // If we have insufficient relative relocations to form a run then
968 if (packed
.empty()) {
969 LOG(INFO
) << "Too few relative relocations to pack";
973 // Run a loopback self-test as a check that packing is lossless.
974 std::vector
<Rel
> unpacked
;
975 packer
.UnpackRelativeRelocations(packed
, &unpacked
);
976 CHECK(unpacked
.size() == relative_relocations
.size());
977 CHECK(!memcmp(&unpacked
[0],
978 &relative_relocations
[0],
979 unpacked
.size() * sizeof(unpacked
[0])));
981 // Make sure packing saved some space.
982 if (packed_bytes
>= initial_bytes
) {
983 LOG(INFO
) << "Packing relative relocations saves no space";
987 // Rewrite the current dynamic relocations section to be only the ARM
988 // non-relative relocations, then shrink it to size.
989 const void* section_data
= &other_relocations
[0];
990 const size_t bytes
= other_relocations
.size() * sizeof(other_relocations
[0]);
991 ResizeSection
<Rel
>(elf_
, relocations_section_
, bytes
);
992 RewriteSectionData(data
, section_data
, bytes
);
994 // Rewrite the current packed android relocations section to hold the packed
995 // relative relocations.
996 data
= GetSectionData(android_relocations_section_
);
997 ResizeSection
<Rel
>(elf_
, android_relocations_section_
, packed_bytes
);
998 RewriteSectionData(data
, packed_data
, packed_bytes
);
1000 // Rewrite .dynamic to include two new tags describing the packed android
1002 data
= GetSectionData(dynamic_section_
);
1003 const ELF::Dyn
* dynamic_base
= reinterpret_cast<ELF::Dyn
*>(data
->d_buf
);
1004 std::vector
<ELF::Dyn
> dynamics(
1006 dynamic_base
+ data
->d_size
/ sizeof(dynamics
[0]));
1007 // Use two of the spare slots to describe the packed section.
1008 ELF::Shdr
* section_header
= ELF::getshdr(android_relocations_section_
);
1009 const ELF::Dyn offset_dyn
1010 = {DT_ANDROID_REL_OFFSET
, {section_header
->sh_offset
}};
1011 AddDynamicEntry(offset_dyn
, &dynamics
);
1012 const ELF::Dyn size_dyn
1013 = {DT_ANDROID_REL_SIZE
, {section_header
->sh_size
}};
1014 AddDynamicEntry(size_dyn
, &dynamics
);
1015 const void* dynamics_data
= &dynamics
[0];
1016 const size_t dynamics_bytes
= dynamics
.size() * sizeof(dynamics
[0]);
1017 RewriteSectionData(data
, dynamics_data
, dynamics_bytes
);
1023 // Find packed relative relocations in the packed android relocations
1024 // section, unpack them, and rewrite the dynamic relocations section to
1025 // contain unpacked data.
1026 bool ElfFile::UnpackRelocations() {
1027 // Load the ELF file into libelf.
1029 LOG(ERROR
) << "Failed to load as ELF";
1033 // Retrieve the current packed android relocations section data.
1034 Elf_Data
* data
= GetSectionData(android_relocations_section_
);
1036 // Convert data to a vector of bytes.
1037 const uint8_t* packed_base
= reinterpret_cast<uint8_t*>(data
->d_buf
);
1038 std::vector
<uint8_t> packed(
1040 packed_base
+ data
->d_size
/ sizeof(packed
[0]));
1042 if (packed
.size() > 3 &&
1047 // Signature is APR1, unpack relocations.
1048 CHECK(relocations_type_
== REL
);
1049 LOG(INFO
) << "Relocations : REL";
1050 return UnpackTypedRelocations
<ELF::Rel
>(packed
, data
);
1053 if (packed
.size() > 3 &&
1058 // Signature is APA1, unpack relocations with addends.
1059 CHECK(relocations_type_
== RELA
);
1060 LOG(INFO
) << "Relocations : RELA";
1061 return UnpackTypedRelocations
<ELF::Rela
>(packed
, data
);
1064 LOG(ERROR
) << "Packed relative relocations not found (not packed?)";
1068 // Helper for UnpackRelocations(). Rel type is one of ELF::Rel or ELF::Rela.
1069 template <typename Rel
>
1070 bool ElfFile::UnpackTypedRelocations(const std::vector
<uint8_t>& packed
,
1072 // Unpack the data to re-materialize the relative relocations.
1073 const size_t packed_bytes
= packed
.size() * sizeof(packed
[0]);
1074 LOG(INFO
) << "Packed relative: " << packed_bytes
<< " bytes";
1075 std::vector
<Rel
> relative_relocations
;
1076 RelocationPacker packer
;
1077 packer
.UnpackRelativeRelocations(packed
, &relative_relocations
);
1078 const size_t unpacked_bytes
=
1079 relative_relocations
.size() * sizeof(relative_relocations
[0]);
1080 LOG(INFO
) << "Unpacked relative: " << unpacked_bytes
<< " bytes";
1082 // Retrieve the current dynamic relocations section data.
1083 data
= GetSectionData(relocations_section_
);
1085 // Interpret data as Elf32 relocations.
1086 const Rel
* relocations_base
= reinterpret_cast<Rel
*>(data
->d_buf
);
1087 std::vector
<Rel
> relocations(
1089 relocations_base
+ data
->d_size
/ sizeof(relocations
[0]));
1091 std::vector
<Rel
> other_relocations
;
1094 // Filter relocations to locate any that are NONE-type. These will occur
1095 // if padding was turned on for packing.
1096 for (size_t i
= 0; i
< relocations
.size(); ++i
) {
1097 const Rel
& relocation
= relocations
[i
];
1098 if (ELF_R_TYPE(relocation
.r_info
) != ELF::kNoRelocationCode
) {
1099 other_relocations
.push_back(relocation
);
1104 LOG(INFO
) << "Relative : " << relative_relocations
.size() << " entries";
1105 LOG(INFO
) << "Other : " << other_relocations
.size() << " entries";
1107 // If we found the same number of null relocation entries in the dynamic
1108 // relocations section as we hold as unpacked relative relocations, then
1109 // this is a padded file.
1110 const bool is_padded
= padding
== relative_relocations
.size();
1112 // Unless padded, pre-apply relative relocations to account for the
1113 // hole, and pre-adjust all relocation offsets accordingly.
1115 // Pre-calculate the size of the hole we will open up when we rewrite
1116 // dynamic relocations. We have to adjust relocation addresses to
1117 // account for this.
1118 ELF::Shdr
* section_header
= ELF::getshdr(relocations_section_
);
1119 const ELF::Off hole_start
= section_header
->sh_offset
;
1121 relative_relocations
.size() * sizeof(relative_relocations
[0]);
1123 // Adjust the hole size for the padding added to preserve alignment.
1124 hole_size
-= padding
* sizeof(other_relocations
[0]);
1125 LOG(INFO
) << "Expansion : " << hole_size
<< " bytes";
1127 // Apply relocations to all relative data to relocate it into the
1128 // area it will occupy once the hole in dynamic relocations is opened.
1129 AdjustRelocationTargets
<Rel
>(
1130 elf_
, hole_start
, hole_size
, &relative_relocations
);
1131 // Relocate the relocations.
1132 AdjustRelocations
<Rel
>(hole_start
, hole_size
, &relative_relocations
);
1133 AdjustRelocations
<Rel
>(hole_start
, hole_size
, &other_relocations
);
1136 // Rewrite the current dynamic relocations section to be the relative
1137 // relocations followed by other relocations. This is the usual order in
1138 // which we find them after linking, so this action will normally put the
1139 // entire dynamic relocations section back to its pre-split-and-packed state.
1140 relocations
.assign(relative_relocations
.begin(), relative_relocations
.end());
1141 relocations
.insert(relocations
.end(),
1142 other_relocations
.begin(), other_relocations
.end());
1143 const void* section_data
= &relocations
[0];
1144 const size_t bytes
= relocations
.size() * sizeof(relocations
[0]);
1145 LOG(INFO
) << "Total : " << relocations
.size() << " entries";
1146 ResizeSection
<Rel
>(elf_
, relocations_section_
, bytes
);
1147 RewriteSectionData(data
, section_data
, bytes
);
1149 // Nearly empty the current packed android relocations section. Leaves a
1150 // four-byte stub so that some data remains allocated to the section.
1151 // This is a convenience which allows us to re-pack this file again without
1152 // having to remove the section and then add a new small one with objcopy.
1153 // The way we resize sections relies on there being some data in a section.
1154 data
= GetSectionData(android_relocations_section_
);
1156 elf_
, android_relocations_section_
, sizeof(kStubIdentifier
));
1157 RewriteSectionData(data
, &kStubIdentifier
, sizeof(kStubIdentifier
));
1159 // Rewrite .dynamic to remove two tags describing packed android relocations.
1160 data
= GetSectionData(dynamic_section_
);
1161 const ELF::Dyn
* dynamic_base
= reinterpret_cast<ELF::Dyn
*>(data
->d_buf
);
1162 std::vector
<ELF::Dyn
> dynamics(
1164 dynamic_base
+ data
->d_size
/ sizeof(dynamics
[0]));
1165 RemoveDynamicEntry(DT_ANDROID_REL_OFFSET
, &dynamics
);
1166 RemoveDynamicEntry(DT_ANDROID_REL_SIZE
, &dynamics
);
1167 const void* dynamics_data
= &dynamics
[0];
1168 const size_t dynamics_bytes
= dynamics
.size() * sizeof(dynamics
[0]);
1169 RewriteSectionData(data
, dynamics_data
, dynamics_bytes
);
1175 // Flush rewritten shared object file data.
1176 void ElfFile::Flush() {
1177 // Flag all ELF data held in memory as needing to be written back to the
1178 // file, and tell libelf that we have controlled the file layout.
1179 elf_flagelf(elf_
, ELF_C_SET
, ELF_F_DIRTY
);
1180 elf_flagelf(elf_
, ELF_C_SET
, ELF_F_LAYOUT
);
1182 // Write ELF data back to disk.
1183 const off_t file_bytes
= elf_update(elf_
, ELF_C_WRITE
);
1184 CHECK(file_bytes
> 0);
1185 VLOG(1) << "elf_update returned: " << file_bytes
;
1187 // Clean up libelf, and truncate the output file to the number of bytes
1188 // written by elf_update().
1191 const int truncate
= ftruncate(fd_
, file_bytes
);
1192 CHECK(truncate
== 0);
1195 } // namespace relocation_packer