1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "components/nacl/loader/nonsfi/elf_loader.h"
14 #include "base/logging.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "native_client/src/include/portability.h"
17 #include "native_client/src/trusted/desc/nacl_desc_base.h"
18 #include "native_client/src/trusted/desc/nacl_desc_effector_trusted_mem.h"
19 #include "native_client/src/trusted/service_runtime/include/bits/mman.h"
21 // Extracted from native_client/src/trusted/service_runtime/nacl_config.h.
22 #if NACL_ARCH(NACL_BUILD_ARCH) == NACL_x86
23 # if NACL_BUILD_SUBARCH == 64
24 # define NACL_ELF_E_MACHINE EM_X86_64
25 # elif NACL_BUILD_SUBARCH == 32
26 # define NACL_ELF_E_MACHINE EM_386
28 # error Unknown platform.
30 #elif NACL_ARCH(NACL_BUILD_ARCH) == NACL_arm
31 # define NACL_ELF_E_MACHINE EM_ARM
32 #elif NACL_ARCH(NACL_BUILD_ARCH) == NACL_mips
33 # define NACL_ELF_E_MACHINE EM_MIPS
35 # error Unknown platform.
42 // Page size for non-SFI Mode.
43 const ElfW(Addr
) kNonSfiPageSize
= 4096;
44 const ElfW(Addr
) kNonSfiPageMask
= kNonSfiPageSize
- 1;
46 NaClErrorCode
ValidateElfHeader(const ElfW(Ehdr
)& ehdr
) {
47 if (std::memcmp(ehdr
.e_ident
, ELFMAG
, SELFMAG
)) {
48 LOG(ERROR
) << "Bad elf magic";
49 return LOAD_BAD_ELF_MAGIC
;
52 #if NACL_BUILD_SUBARCH == 32
53 if (ehdr
.e_ident
[EI_CLASS
] != ELFCLASS32
) {
54 LOG(ERROR
) << "Bad elf class";
55 return LOAD_NOT_32_BIT
;
57 #elif NACL_BUILD_SUBARCH == 64
58 if (ehdr
.e_ident
[EI_CLASS
] != ELFCLASS64
) {
59 LOG(ERROR
) << "Bad elf class";
60 return LOAD_NOT_64_BIT
;
63 # error Unknown platform.
66 if (ehdr
.e_type
!= ET_DYN
) {
67 LOG(ERROR
) << "Not a relocatable ELF object (not ET_DYN)";
71 if (ehdr
.e_machine
!= NACL_ELF_E_MACHINE
) {
72 LOG(ERROR
) << "Bad machine: "
73 << base::HexEncode(&ehdr
.e_machine
, sizeof(ehdr
.e_machine
));
74 return LOAD_BAD_MACHINE
;
77 if (ehdr
.e_version
!= EV_CURRENT
) {
78 LOG(ERROR
) << "Bad elf version: "
79 << base::HexEncode(&ehdr
.e_version
, sizeof(ehdr
.e_version
));
85 // Returns the address of the page starting at address 'addr' for non-SFI mode.
86 ElfW(Addr
) GetPageStart(ElfW(Addr
) addr
) {
87 return addr
& ~kNonSfiPageMask
;
90 // Returns the offset of address 'addr' in its memory page. In other words,
91 // this equals to 'addr' - GetPageStart(addr).
92 ElfW(Addr
) GetPageOffset(ElfW(Addr
) addr
) {
93 return addr
& kNonSfiPageMask
;
96 // Returns the address of the next page after address 'addr', unless 'addr' is
97 // at the start of a page. This equals to:
98 // addr == GetPageStart(addr) ? addr : GetPageStart(addr) + kNonSfiPageSize
99 ElfW(Addr
) GetPageEnd(ElfW(Addr
) addr
) {
100 return GetPageStart(addr
+ kNonSfiPageSize
- 1);
103 // Converts the pflags (in phdr) to mmap's prot flags.
104 int PFlagsToProt(int pflags
) {
105 return ((pflags
& PF_X
) ? PROT_EXEC
: 0) |
106 ((pflags
& PF_R
) ? PROT_READ
: 0) |
107 ((pflags
& PF_W
) ? PROT_WRITE
: 0);
110 // Converts the pflags (in phdr) to NaCl ABI's prot flags.
111 int PFlagsToNaClProt(int pflags
) {
112 return ((pflags
& PF_X
) ? NACL_ABI_PROT_EXEC
: 0) |
113 ((pflags
& PF_R
) ? NACL_ABI_PROT_READ
: 0) |
114 ((pflags
& PF_W
) ? NACL_ABI_PROT_WRITE
: 0);
117 // Returns the load size for the given phdrs, or 0 on error.
118 ElfW(Addr
) GetLoadSize(const ElfW(Phdr
)* phdrs
, int phnum
) {
119 ElfW(Addr
) begin
= ~static_cast<ElfW(Addr
)>(0);
122 for (int i
= 0; i
< phnum
; ++i
) {
123 const ElfW(Phdr
)& phdr
= phdrs
[i
];
124 if (phdr
.p_type
!= PT_LOAD
) {
125 // Do nothing for non PT_LOAD header.
129 begin
= std::min(begin
, phdr
.p_vaddr
);
130 end
= std::max(end
, phdr
.p_vaddr
+ phdr
.p_memsz
);
134 // The end address looks overflowing, or PT_LOAD is not found.
138 return GetPageEnd(end
) - GetPageStart(begin
);
141 // Reserves the memory for the given phdrs, and stores the memory bias to the
143 NaClErrorCode
ReserveMemory(const ElfW(Phdr
)* phdrs
,
145 ElfW(Addr
)* load_bias
) {
146 ElfW(Addr
) size
= GetLoadSize(phdrs
, phnum
);
148 LOG(ERROR
) << "ReserveMemory failed to calculate size";
149 return LOAD_UNLOADABLE
;
152 // Make sure that the given program headers represents PIE binary.
153 for (int i
= 0; i
< phnum
; ++i
) {
154 if (phdrs
[i
].p_type
== PT_LOAD
) {
155 // Here, phdrs[i] is the first loadable segment.
156 if (phdrs
[i
].p_vaddr
!= 0) {
157 // The binary is not PIE (i.e. needs to be loaded onto fixed addressed
158 // memory. We don't support such a case.
160 << "ReserveMemory: Non-PIE binary loading is not supported.";
161 return LOAD_UNLOADABLE
;
167 void* start
= mmap(0, size
, PROT_NONE
, MAP_PRIVATE
| MAP_ANONYMOUS
, -1, 0);
168 if (start
== MAP_FAILED
) {
169 LOG(ERROR
) << "ReserveMemory: failed to mmap.";
170 return LOAD_NO_MEMORY
;
173 *load_bias
= reinterpret_cast<ElfW(Addr
)>(start
);
177 NaClErrorCode
LoadSegments(
178 const ElfW(Phdr
)* phdrs
, int phnum
, ElfW(Addr
) load_bias
,
179 struct NaClDesc
* descriptor
) {
180 for (int i
= 0; i
< phnum
; ++i
) {
181 const ElfW(Phdr
)& phdr
= phdrs
[i
];
182 if (phdr
.p_type
!= PT_LOAD
) {
183 // Not a load target.
187 // Addresses on the memory.
188 ElfW(Addr
) seg_start
= phdr
.p_vaddr
+ load_bias
;
189 ElfW(Addr
) seg_end
= seg_start
+ phdr
.p_memsz
;
190 ElfW(Addr
) seg_page_start
= GetPageStart(seg_start
);
191 ElfW(Addr
) seg_page_end
= GetPageEnd(seg_end
);
192 ElfW(Addr
) seg_file_end
= seg_start
+ phdr
.p_filesz
;
194 // Addresses on the file content.
195 ElfW(Addr
) file_start
= phdr
.p_offset
;
196 ElfW(Addr
) file_end
= file_start
+ phdr
.p_filesz
;
197 ElfW(Addr
) file_page_start
= GetPageStart(file_start
);
199 uintptr_t seg_addr
= (*NACL_VTBL(NaClDesc
, descriptor
)->Map
)(
201 NaClDescEffectorTrustedMem(),
202 reinterpret_cast<void *>(seg_page_start
),
203 file_end
- file_page_start
,
204 PFlagsToNaClProt(phdr
.p_flags
),
205 NACL_ABI_MAP_PRIVATE
| NACL_ABI_MAP_FIXED
,
207 if (NaClPtrIsNegErrno(&seg_addr
)) {
208 LOG(ERROR
) << "LoadSegments: [" << i
<< "] mmap failed, " << seg_addr
;
209 return LOAD_NO_MEMORY
;
212 // Handle the BSS: fill Zero between the segment end and the page boundary
213 // if necessary (i.e. if the segment doesn't end on a page boundary).
214 ElfW(Addr
) seg_file_end_offset
= GetPageOffset(seg_file_end
);
215 if ((phdr
.p_flags
& PF_W
) && seg_file_end_offset
> 0) {
216 memset(reinterpret_cast<void *>(seg_file_end
), 0,
217 kNonSfiPageSize
- seg_file_end_offset
);
220 // Hereafter, seg_file_end is now the first page address after the file
221 // content. If seg_end is larger, we need to zero anything between them.
222 // This is done by using a private anonymous mmap for all extra pages.
223 seg_file_end
= GetPageEnd(seg_file_end
);
224 if (seg_page_end
> seg_file_end
) {
225 void* zeromap
= mmap(reinterpret_cast<void *>(seg_file_end
),
226 seg_page_end
- seg_file_end
,
227 PFlagsToProt(phdr
.p_flags
),
228 MAP_FIXED
| MAP_ANONYMOUS
| MAP_PRIVATE
,
230 if (zeromap
== MAP_FAILED
) {
231 LOG(ERROR
) << "LoadSegments: [" << i
<< "] Failed to zeromap.";
232 return LOAD_NO_MEMORY
;
241 struct ElfImage::Data
{
242 // Limit of elf program headers allowed.
244 MAX_PROGRAM_HEADERS
= 128
248 ElfW(Phdr
) phdrs
[MAX_PROGRAM_HEADERS
];
249 ElfW(Addr
) load_bias
;
252 ElfImage::ElfImage() {
255 ElfImage::~ElfImage() {
258 uintptr_t ElfImage::entry_point() const {
260 LOG(DFATAL
) << "entry_point must be called after Read().";
263 return data_
->ehdr
.e_entry
+ data_
->load_bias
;
266 NaClErrorCode
ElfImage::Read(struct NaClDesc
* descriptor
) {
269 ::scoped_ptr
<Data
> data(new Data
);
272 ssize_t read_ret
= (*NACL_VTBL(NaClDesc
, descriptor
)->PRead
)(
273 descriptor
, &data
->ehdr
, sizeof(data
->ehdr
), 0);
274 if (NaClSSizeIsNegErrno(&read_ret
) ||
275 static_cast<size_t>(read_ret
) != sizeof(data
->ehdr
)) {
276 LOG(ERROR
) << "Could not load elf headers.";
277 return LOAD_READ_ERROR
;
280 NaClErrorCode error_code
= ValidateElfHeader(data
->ehdr
);
281 if (error_code
!= LOAD_OK
)
284 // Read program headers.
285 if (data
->ehdr
.e_phnum
> Data::MAX_PROGRAM_HEADERS
) {
286 LOG(ERROR
) << "Too many program headers";
287 return LOAD_TOO_MANY_PROG_HDRS
;
290 if (data
->ehdr
.e_phentsize
!= sizeof(data
->phdrs
[0])) {
291 LOG(ERROR
) << "Bad program headers size\n"
292 << " ehdr_.e_phentsize = " << data
->ehdr
.e_phentsize
<< "\n"
293 << " sizeof phdrs[0] = " << sizeof(data
->phdrs
[0]);
294 return LOAD_BAD_PHENTSIZE
;
297 size_t read_size
= data
->ehdr
.e_phnum
* data
->ehdr
.e_phentsize
;
298 read_ret
= (*NACL_VTBL(NaClDesc
, descriptor
)->PRead
)(
299 descriptor
, data
->phdrs
, read_size
, data
->ehdr
.e_phoff
);
301 if (NaClSSizeIsNegErrno(&read_ret
) ||
302 static_cast<size_t>(read_ret
) != read_size
) {
303 LOG(ERROR
) << "Cannot load prog headers";
304 return LOAD_READ_ERROR
;
311 NaClErrorCode
ElfImage::Load(struct NaClDesc
* descriptor
) {
313 LOG(DFATAL
) << "ElfImage::Load() must be called after Read()";
314 return LOAD_INTERNAL
;
317 NaClErrorCode error
=
318 ReserveMemory(data_
->phdrs
, data_
->ehdr
.e_phnum
, &data_
->load_bias
);
319 if (error
!= LOAD_OK
) {
320 LOG(ERROR
) << "ElfImage::Load: Failed to allocate memory";
324 error
= LoadSegments(
325 data_
->phdrs
, data_
->ehdr
.e_phnum
, data_
->load_bias
, descriptor
);
326 if (error
!= LOAD_OK
) {
327 LOG(ERROR
) << "ElfImage::Load: Failed to load segments";
334 } // namespace nonsfi