2 * Tiny C Memory and bounds checker
4 * Copyright (c) 2002 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 #if !defined(__FreeBSD__) \
27 && !defined(__FreeBSD_kernel__) \
28 && !defined(__DragonFly__) \
29 && !defined(__OpenBSD__) \
30 && !defined(__APPLE__) \
31 && !defined(__NetBSD__)
37 #include <sys/syscall.h>
40 #define BOUND_DEBUG (1)
41 #define BOUND_STATISTIC (1)
44 #define dprintf(a...) if (print_calls) fprintf(a)
50 /* an __attribute__ macro is defined in the system headers */
53 #define FASTCALL __attribute__((regparm(3)))
56 # define DLL_EXPORT __declspec(dllexport)
61 #if defined(__FreeBSD__) \
62 || defined(__FreeBSD_kernel__) \
63 || defined(__DragonFly__) \
64 || defined(__OpenBSD__) \
65 || defined(__NetBSD__) \
66 || defined(__dietlibc__)
74 #define HAVE_MEMALIGN (0)
75 #define MALLOC_REDIR (0)
76 #define HAVE_PTHREAD_CREATE (0)
77 #define HAVE_CTYPE (0)
78 #define HAVE_ERRNO (0)
79 #define HAVE_SIGNAL (0)
80 #define HAVE_SIGACTION (0)
82 #define HAVE_TLS_FUNC (0)
83 #define HAVE_TLS_VAR (0)
89 static CRITICAL_SECTION bounds_sem
;
90 #define INIT_SEM() InitializeCriticalSection(&bounds_sem)
91 #define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
92 #define WAIT_SEM() EnterCriticalSection(&bounds_sem)
93 #define POST_SEM() LeaveCriticalSection(&bounds_sem)
94 #define TRY_SEM() TryEnterCriticalSection(&bounds_sem)
95 #define HAVE_MEMALIGN (0)
96 #define MALLOC_REDIR (0)
97 #define HAVE_PTHREAD_CREATE (0)
98 #define HAVE_CTYPE (0)
99 #define HAVE_ERRNO (0)
100 #define HAVE_SIGNAL (1)
101 #define HAVE_SIGACTION (0)
102 #define HAVE_FORK (0)
103 #define HAVE_TLS_FUNC (1)
104 #define HAVE_TLS_VAR (0)
108 #define __USE_GNU /* get RTLD_NEXT */
109 #include <sys/mman.h>
116 #include <dispatch/dispatch.h>
117 static dispatch_semaphore_t bounds_sem
;
118 #define INIT_SEM() bounds_sem = dispatch_semaphore_create(1)
119 #define EXIT_SEM() dispatch_release(*(dispatch_object_t*)&bounds_sem)
120 #define WAIT_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_FOREVER)
121 #define POST_SEM() if (use_sem) dispatch_semaphore_signal(bounds_sem)
122 #define TRY_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_NOW)
124 #include <semaphore.h>
125 static sem_t bounds_sem
;
126 #define INIT_SEM() sem_init (&bounds_sem, 0, 1)
127 #define EXIT_SEM() sem_destroy (&bounds_sem)
128 #define WAIT_SEM() if (use_sem) while (sem_wait (&bounds_sem) < 0 \
130 #define POST_SEM() if (use_sem) sem_post (&bounds_sem)
131 #define TRY_SEM() if (use_sem) while (sem_trywait (&bounds_sem) < 0 \
134 static pthread_mutex_t bounds_mtx
;
135 #define INIT_SEM() pthread_mutex_init (&bounds_mtx, NULL)
136 #define EXIT_SEM() pthread_mutex_destroy (&bounds_mtx)
137 #define WAIT_SEM() if (use_sem) pthread_mutex_lock (&bounds_mtx)
138 #define POST_SEM() if (use_sem) pthread_mutex_unlock (&bounds_mtx)
139 #define TRY_SEM() if (use_sem) pthread_mutex_trylock (&bounds_mtx)
141 static pthread_spinlock_t bounds_spin
;
142 /* about 25% faster then semaphore. */
143 #define INIT_SEM() pthread_spin_init (&bounds_spin, 0)
144 #define EXIT_SEM() pthread_spin_destroy (&bounds_spin)
145 #define WAIT_SEM() if (use_sem) pthread_spin_lock (&bounds_spin)
146 #define POST_SEM() if (use_sem) pthread_spin_unlock (&bounds_spin)
147 #define TRY_SEM() if (use_sem) pthread_spin_trylock (&bounds_spin)
149 #define HAVE_MEMALIGN (1)
150 #define MALLOC_REDIR (1)
151 #define HAVE_PTHREAD_CREATE (1)
152 #define HAVE_CTYPE (1)
153 #define HAVE_ERRNO (1)
154 #define HAVE_SIGNAL (1)
155 #define HAVE_SIGACTION (1)
156 #define HAVE_FORK (1)
157 #if !defined(__APPLE__) && defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
158 #define HAVE_TLS_FUNC (0)
159 #define HAVE_TLS_VAR (1)
161 #define HAVE_TLS_FUNC (1)
162 #define HAVE_TLS_VAR (0)
167 static void *(*malloc_redir
) (size_t);
168 static void *(*calloc_redir
) (size_t, size_t);
169 static void (*free_redir
) (void *);
170 static void *(*realloc_redir
) (void *, size_t);
171 static unsigned int pool_index
;
172 static unsigned char __attribute__((aligned(16))) initial_pool
[256];
175 static void *(*memalign_redir
) (size_t, size_t);
177 #if HAVE_PTHREAD_CREATE
178 static int (*pthread_create_redir
) (pthread_t
*thread
,
179 const pthread_attr_t
*attr
,
180 void *(*start_routine
)(void *), void *arg
);
183 typedef void (*bound_sig
)(int);
184 static bound_sig (*signal_redir
) (int signum
, bound_sig handler
);
187 static int (*sigaction_redir
) (int signum
, const struct sigaction
*act
,
188 struct sigaction
*oldact
);
191 static int (*fork_redir
) (void);
194 #define TCC_TYPE_NONE (0)
195 #define TCC_TYPE_MALLOC (1)
196 #define TCC_TYPE_CALLOC (2)
197 #define TCC_TYPE_REALLOC (3)
198 #define TCC_TYPE_MEMALIGN (4)
199 #define TCC_TYPE_STRDUP (5)
201 /* this pointer is generated when bound check is incorrect */
202 #define INVALID_POINTER ((void *)(-2))
204 typedef struct tree_node Tree
;
206 Tree
* left
, * right
;
210 unsigned char is_invalid
; /* true if pointers outside region are invalid */
213 typedef struct alloca_list_struct
{
217 struct alloca_list_struct
*next
;
221 #define BOUND_TID_TYPE DWORD
222 #define BOUND_GET_TID GetCurrentThreadId()
223 #elif defined(__OpenBSD__)
224 #define BOUND_TID_TYPE pid_t
225 #define BOUND_GET_TID syscall (SYS_getthrid)
226 #elif defined(__FreeBSD__) || defined(__NetBSD__)
227 #define BOUND_TID_TYPE pid_t
228 #define BOUND_GET_TID 0
229 #elif defined(__i386__) || defined(__x86_64__) || defined(__arm__) || defined(__aarch64__) || defined(__riscv)
230 #define BOUND_TID_TYPE pid_t
231 #define BOUND_GET_TID syscall (SYS_gettid)
233 #define BOUND_TID_TYPE int
234 #define BOUND_GET_TID 0
237 typedef struct jmp_list_struct
{
242 struct jmp_list_struct
*next
;
245 #define BOUND_STATISTIC_SPLAY (0)
246 static Tree
* splay (size_t addr
, Tree
*t
);
247 static Tree
* splay_end (size_t addr
, Tree
*t
);
248 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
);
249 static Tree
* splay_delete(size_t addr
, Tree
*t
);
250 void splay_printtree(Tree
* t
, int d
);
252 /* external interface */
253 void __bounds_checking (int no_check
);
254 void __bound_never_fatal (int no_check
);
255 DLL_EXPORT
void * __bound_ptr_add(void *p
, size_t offset
);
256 DLL_EXPORT
void * __bound_ptr_indir1(void *p
, size_t offset
);
257 DLL_EXPORT
void * __bound_ptr_indir2(void *p
, size_t offset
);
258 DLL_EXPORT
void * __bound_ptr_indir4(void *p
, size_t offset
);
259 DLL_EXPORT
void * __bound_ptr_indir8(void *p
, size_t offset
);
260 DLL_EXPORT
void * __bound_ptr_indir12(void *p
, size_t offset
);
261 DLL_EXPORT
void * __bound_ptr_indir16(void *p
, size_t offset
);
262 DLL_EXPORT
void FASTCALL
__bound_local_new(void *p1
);
263 DLL_EXPORT
void FASTCALL
__bound_local_delete(void *p1
);
264 void __bound_init(size_t *, int);
265 void __bound_main_arg(int argc
, char **argv
, char **envp
);
266 void __bound_exit(void);
268 void *__bound_mmap (void *start
, size_t size
, int prot
, int flags
, int fd
,
270 int __bound_munmap (void *start
, size_t size
);
271 DLL_EXPORT
void __bound_siglongjmp(jmp_buf env
, int val
);
273 DLL_EXPORT
void __bound_new_region(void *p
, size_t size
);
274 DLL_EXPORT
void __bound_setjmp(jmp_buf env
);
275 DLL_EXPORT
void __bound_longjmp(jmp_buf env
, int val
);
276 DLL_EXPORT
void *__bound_memcpy(void *dst
, const void *src
, size_t size
);
277 DLL_EXPORT
int __bound_memcmp(const void *s1
, const void *s2
, size_t size
);
278 DLL_EXPORT
void *__bound_memmove(void *dst
, const void *src
, size_t size
);
279 DLL_EXPORT
void *__bound_memset(void *dst
, int c
, size_t size
);
280 DLL_EXPORT
int __bound_strlen(const char *s
);
281 DLL_EXPORT
char *__bound_strcpy(char *dst
, const char *src
);
282 DLL_EXPORT
char *__bound_strncpy(char *dst
, const char *src
, size_t n
);
283 DLL_EXPORT
int __bound_strcmp(const char *s1
, const char *s2
);
284 DLL_EXPORT
int __bound_strncmp(const char *s1
, const char *s2
, size_t n
);
285 DLL_EXPORT
char *__bound_strcat(char *dest
, const char *src
);
286 DLL_EXPORT
char *__bound_strchr(const char *string
, int ch
);
287 DLL_EXPORT
char *__bound_strdup(const char *s
);
290 DLL_EXPORT
void *__bound___aeabi_memcpy(void *dst
, const void *src
, size_t size
);
291 DLL_EXPORT
void *__bound___aeabi_memmove(void *dst
, const void *src
, size_t size
);
292 DLL_EXPORT
void *__bound___aeabi_memmove4(void *dst
, const void *src
, size_t size
);
293 DLL_EXPORT
void *__bound___aeabi_memmove8(void *dst
, const void *src
, size_t size
);
294 DLL_EXPORT
void *__bound___aeabi_memset(void *dst
, int c
, size_t size
);
295 DLL_EXPORT
void *__aeabi_memcpy(void *dst
, const void *src
, size_t size
);
296 DLL_EXPORT
void *__aeabi_memmove(void *dst
, const void *src
, size_t size
);
297 DLL_EXPORT
void *__aeabi_memmove4(void *dst
, const void *src
, size_t size
);
298 DLL_EXPORT
void *__aeabi_memmove8(void *dst
, const void *src
, size_t size
);
299 DLL_EXPORT
void *__aeabi_memset(void *dst
, int c
, size_t size
);
303 #define BOUND_MALLOC(a) malloc_redir(a)
304 #define BOUND_MEMALIGN(a,b) memalign_redir(a,b)
305 #define BOUND_FREE(a) free_redir(a)
306 #define BOUND_REALLOC(a,b) realloc_redir(a,b)
307 #define BOUND_CALLOC(a,b) calloc_redir(a,b)
309 #define BOUND_MALLOC(a) malloc(a)
310 #define BOUND_MEMALIGN(a,b) memalign(a,b)
311 #define BOUND_FREE(a) free(a)
312 #define BOUND_REALLOC(a,b) realloc(a,b)
313 #define BOUND_CALLOC(a,b) calloc(a,b)
314 DLL_EXPORT
void *__bound_malloc(size_t size
, const void *caller
);
315 DLL_EXPORT
void *__bound_memalign(size_t size
, size_t align
, const void *caller
);
316 DLL_EXPORT
void __bound_free(void *ptr
, const void *caller
);
317 DLL_EXPORT
void *__bound_realloc(void *ptr
, size_t size
, const void *caller
);
318 DLL_EXPORT
void *__bound_calloc(size_t nmemb
, size_t size
);
321 #define FREE_REUSE_SIZE (100)
322 static unsigned int free_reuse_index
;
323 static void *free_reuse_list
[FREE_REUSE_SIZE
];
325 static Tree
*tree
= NULL
;
326 #define TREE_REUSE (1)
328 static Tree
*tree_free_list
;
330 static alloca_list_type
*alloca_list
;
331 static jmp_list_type
*jmp_list
;
333 static unsigned char inited
;
334 static unsigned char print_warn_ptr_add
;
335 static unsigned char print_calls
;
336 static unsigned char print_heap
;
337 static unsigned char print_statistic
;
338 static unsigned char no_strdup
;
339 static unsigned char use_sem
;
340 static int never_fatal
;
343 static int no_checking
= 0;
344 static DWORD no_checking_key
;
345 #define NO_CHECKING_CHECK() if (!p) { \
346 p = (int *) LocalAlloc(LPTR, sizeof(int)); \
347 if (!p) bound_alloc_error("tls malloc"); \
349 TlsSetValue(no_checking_key, p); \
351 #define NO_CHECKING_GET() ({ int *p = TlsGetValue(no_checking_key); \
352 NO_CHECKING_CHECK(); \
355 #define NO_CHECKING_SET(v) { int *p = TlsGetValue(no_checking_key); \
356 NO_CHECKING_CHECK(); \
360 static int no_checking
= 0;
361 static pthread_key_t no_checking_key
;
362 #define NO_CHECKING_CHECK() if (!p) { \
363 p = (int *) BOUND_MALLOC(sizeof(int)); \
364 if (!p) bound_alloc_error("tls malloc"); \
366 pthread_setspecific(no_checking_key, p); \
368 #define NO_CHECKING_GET() ({ int *p = pthread_getspecific(no_checking_key); \
369 NO_CHECKING_CHECK(); \
372 #define NO_CHECKING_SET(v) { int *p = pthread_getspecific(no_checking_key); \
373 NO_CHECKING_CHECK(); \
378 static __thread
int no_checking
= 0;
379 #define NO_CHECKING_GET() no_checking
380 #define NO_CHECKING_SET(v) no_checking = v
382 static int no_checking
= 0;
383 #define NO_CHECKING_GET() no_checking
384 #define NO_CHECKING_SET(v) no_checking = v
386 static char exec
[100];
389 static unsigned long long bound_ptr_add_count
;
390 static unsigned long long bound_ptr_indir1_count
;
391 static unsigned long long bound_ptr_indir2_count
;
392 static unsigned long long bound_ptr_indir4_count
;
393 static unsigned long long bound_ptr_indir8_count
;
394 static unsigned long long bound_ptr_indir12_count
;
395 static unsigned long long bound_ptr_indir16_count
;
396 static unsigned long long bound_local_new_count
;
397 static unsigned long long bound_local_delete_count
;
398 static unsigned long long bound_malloc_count
;
399 static unsigned long long bound_calloc_count
;
400 static unsigned long long bound_realloc_count
;
401 static unsigned long long bound_free_count
;
402 static unsigned long long bound_memalign_count
;
403 static unsigned long long bound_mmap_count
;
404 static unsigned long long bound_munmap_count
;
405 static unsigned long long bound_alloca_count
;
406 static unsigned long long bound_setjmp_count
;
407 static unsigned long long bound_longjmp_count
;
408 static unsigned long long bound_mempcy_count
;
409 static unsigned long long bound_memcmp_count
;
410 static unsigned long long bound_memmove_count
;
411 static unsigned long long bound_memset_count
;
412 static unsigned long long bound_strlen_count
;
413 static unsigned long long bound_strcpy_count
;
414 static unsigned long long bound_strncpy_count
;
415 static unsigned long long bound_strcmp_count
;
416 static unsigned long long bound_strncmp_count
;
417 static unsigned long long bound_strcat_count
;
418 static unsigned long long bound_strchr_count
;
419 static unsigned long long bound_strdup_count
;
420 static unsigned long long bound_not_found
;
421 #define INCR_COUNT(x) ++x
423 #define INCR_COUNT(x)
425 #if BOUND_STATISTIC_SPLAY
426 static unsigned long long bound_splay
;
427 static unsigned long long bound_splay_end
;
428 static unsigned long long bound_splay_insert
;
429 static unsigned long long bound_splay_delete
;
430 #define INCR_COUNT_SPLAY(x) ++x
432 #define INCR_COUNT_SPLAY(x)
435 int tcc_backtrace(const char *fmt
, ...);
437 /* print a bound error message */
438 #define bound_warning(...) \
439 tcc_backtrace("^bcheck.c^BCHECK: " __VA_ARGS__)
441 #define bound_error(...) \
443 bound_warning(__VA_ARGS__); \
444 if (never_fatal == 0) \
448 static void bound_alloc_error(const char *s
)
450 fprintf(stderr
,"FATAL: %s\n",s
);
454 static void bound_not_found_warning(const char *file
, const char *function
,
457 dprintf(stderr
, "%s%s, %s(): Not found %p\n", exec
, file
, function
, ptr
);
460 static void fetch_and_add(int* variable
, int value
)
462 #if defined __i386__ || defined __x86_64__
463 __asm__
volatile("lock; addl %0, %1"
464 : "+r" (value
), "+m" (*variable
) // input+output
468 #elif defined __arm__
469 extern void fetch_and_add_arm(int* variable
, int value
);
470 fetch_and_add_arm(variable
, value
);
471 #elif defined __aarch64__
472 extern void fetch_and_add_arm64(int* variable
, int value
);
473 fetch_and_add_arm64(variable
, value
);
474 #elif defined __riscv
475 extern void fetch_and_add_riscv64(int* variable
, int value
);
476 fetch_and_add_riscv64(variable
, value
);
482 /* enable/disable checking. This can be used in signal handlers. */
483 void __bounds_checking (int no_check
)
485 #if HAVE_TLS_FUNC || HAVE_TLS_VAR
486 NO_CHECKING_SET(NO_CHECKING_GET() + no_check
);
488 fetch_and_add (&no_checking
, no_check
);
492 /* enable/disable checking. This can be used in signal handlers. */
493 void __bound_never_fatal (int neverfatal
)
495 fetch_and_add (&never_fatal
, neverfatal
);
498 /* return '(p + offset)' for pointer arithmetic (a pointer can reach
499 the end of a region in this case */
500 void * __bound_ptr_add(void *p
, size_t offset
)
502 size_t addr
= (size_t)p
;
504 if (NO_CHECKING_GET())
507 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
508 __FILE__
, __FUNCTION__
, p
, (unsigned long)offset
);
511 INCR_COUNT(bound_ptr_add_count
);
514 if (addr
>= tree
->size
) {
516 tree
= splay (addr
, tree
);
519 if (addr
>= tree
->size
) {
521 tree
= splay_end (addr
, tree
);
524 if (addr
<= tree
->size
) {
525 if (tree
->is_invalid
|| addr
+ offset
> tree
->size
) {
527 if (print_warn_ptr_add
)
528 bound_warning("%p is outside of the region", p
+ offset
);
529 if (never_fatal
<= 0)
530 return INVALID_POINTER
; /* return an invalid pointer */
534 else if (p
) { /* Allow NULL + offset. offsetoff is using it. */
535 INCR_COUNT(bound_not_found
);
537 bound_not_found_warning (__FILE__
, __FUNCTION__
, p
);
545 /* return '(p + offset)' for pointer indirection (the resulting must
546 be strictly inside the region */
547 #define BOUND_PTR_INDIR(dsize) \
548 void * __bound_ptr_indir ## dsize (void *p, size_t offset) \
550 size_t addr = (size_t)p; \
552 if (NO_CHECKING_GET()) \
555 dprintf(stderr, "%s, %s(): %p 0x%lx\n", \
556 __FILE__, __FUNCTION__, p, (unsigned long)offset); \
558 INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
560 addr -= tree->start; \
561 if (addr >= tree->size) { \
563 tree = splay (addr, tree); \
564 addr -= tree->start; \
566 if (addr >= tree->size) { \
568 tree = splay_end (addr, tree); \
569 addr -= tree->start; \
571 if (addr <= tree->size) { \
572 if (tree->is_invalid || addr + offset + dsize > tree->size) { \
574 bound_warning("%p is outside of the region", p + offset); \
575 if (never_fatal <= 0) \
576 return INVALID_POINTER; /* return an invalid pointer */ \
581 INCR_COUNT(bound_not_found); \
583 bound_not_found_warning (__FILE__, __FUNCTION__, p); \
598 #if defined(__GNUC__) && (__GNUC__ >= 6)
600 * At least gcc 6.2 complains when __builtin_frame_address is used with
603 #pragma GCC diagnostic push
604 #pragma GCC diagnostic ignored "-Wframe-address"
607 /* return the frame pointer of the caller */
608 #define GET_CALLER_FP(fp)\
610 fp = (size_t)__builtin_frame_address(1);\
613 /* called when entering a function to add all the local regions */
614 void FASTCALL
__bound_local_new(void *p1
)
616 size_t addr
, fp
, *p
= p1
;
618 if (NO_CHECKING_GET())
621 dprintf(stderr
, "%s, %s(): p1=%p fp=%p\n",
622 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
624 while ((addr
= p
[0])) {
625 INCR_COUNT(bound_local_new_count
);
626 tree
= splay_insert(addr
+ fp
, p
[1], tree
);
633 while ((addr
= p
[0])) {
634 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
635 __FILE__
, __FUNCTION__
,
636 (void *) (addr
+ fp
), (unsigned long) p
[1]);
643 /* called when leaving a function to delete all the local regions */
644 void FASTCALL
__bound_local_delete(void *p1
)
646 size_t addr
, fp
, *p
= p1
;
648 if (NO_CHECKING_GET())
651 dprintf(stderr
, "%s, %s(): p1=%p fp=%p\n",
652 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
654 while ((addr
= p
[0])) {
655 INCR_COUNT(bound_local_delete_count
);
656 tree
= splay_delete(addr
+ fp
, tree
);
660 alloca_list_type
*last
= NULL
;
661 alloca_list_type
*cur
= alloca_list
;
666 last
->next
= cur
->next
;
668 alloca_list
= cur
->next
;
669 tree
= splay_delete ((size_t) cur
->p
, tree
);
670 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
671 __FILE__
, __FUNCTION__
, cur
->p
);
673 cur
= last
? last
->next
: alloca_list
;
682 jmp_list_type
*last
= NULL
;
683 jmp_list_type
*cur
= jmp_list
;
688 last
->next
= cur
->next
;
690 jmp_list
= cur
->next
;
691 dprintf(stderr
, "%s, %s(): remove setjmp %p\n",
692 __FILE__
, __FUNCTION__
, cur
->penv
);
694 cur
= last
? last
->next
: jmp_list
;
707 while ((addr
= p
[0])) {
709 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
710 __FILE__
, __FUNCTION__
,
711 (void *) (addr
+ fp
), (unsigned long) p
[1]);
720 void __bound_new_region(void *p
, size_t size
)
723 alloca_list_type
*last
;
724 alloca_list_type
*cur
;
725 alloca_list_type
*new;
727 if (NO_CHECKING_GET())
730 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
731 __FILE__
, __FUNCTION__
, p
, (unsigned long)size
);
733 new = BOUND_MALLOC (sizeof (alloca_list_type
));
735 INCR_COUNT(bound_alloca_count
);
739 #if defined(__i386__) || (defined(__arm__) && !defined(TCC_ARM_EABI))
741 #elif defined(__arm__)
746 void *cure
= (void *)((char *)cur
->p
+ ((cur
->size
+ align
) & -align
));
747 void *pe
= (void *)((char *)p
+ ((size
+ align
) & -align
));
748 if (cur
->fp
== fp
&& ((cur
->p
<= p
&& cure
> p
) ||
749 (p
<= cur
->p
&& pe
> cur
->p
))) {
751 last
->next
= cur
->next
;
753 alloca_list
= cur
->next
;
754 tree
= splay_delete((size_t)cur
->p
, tree
);
760 tree
= splay_insert((size_t)p
, size
, tree
);
765 new->next
= alloca_list
;
770 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
771 __FILE__
, __FUNCTION__
, cur
->p
);
776 void __bound_setjmp(jmp_buf env
)
779 void *e
= (void *) env
;
781 if (NO_CHECKING_GET() == 0) {
782 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, __FUNCTION__
, e
);
784 INCR_COUNT(bound_setjmp_count
);
792 jl
= BOUND_MALLOC (sizeof (jmp_list_type
));
804 jl
->end_fp
= (size_t)__builtin_frame_address(0);
805 jl
->tid
= BOUND_GET_TID
;
811 static void __bound_long_jump(jmp_buf env
, int val
, int sig
, const char *func
)
817 if (NO_CHECKING_GET() == 0) {
820 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, func
, e
);
822 INCR_COUNT(bound_longjmp_count
);
825 if (jl
->penv
== e
&& jl
->tid
== tid
) {
826 size_t start_fp
= (size_t)__builtin_frame_address(0);
827 size_t end_fp
= jl
->end_fp
;
828 jmp_list_type
*cur
= jmp_list
;
829 jmp_list_type
*last
= NULL
;
831 while (cur
->penv
!= e
|| cur
->tid
!= tid
) {
832 if (cur
->tid
== tid
) {
833 dprintf(stderr
, "%s, %s(): remove setjmp %p\n",
834 __FILE__
, func
, cur
->penv
);
836 last
->next
= cur
->next
;
838 jmp_list
= cur
->next
;
840 cur
= last
? last
->next
: jmp_list
;
849 alloca_list_type
*last
;
850 alloca_list_type
*cur
;
852 while (t
&& (t
->start
< start_fp
|| t
->start
> end_fp
))
853 if (t
->start
< start_fp
)
862 if ((size_t) cur
->p
== t
->start
) {
863 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
864 __FILE__
, func
, cur
->p
);
866 last
->next
= cur
->next
;
868 alloca_list
= cur
->next
;
875 dprintf(stderr
, "%s, %s(): delete %p\n",
876 __FILE__
, func
, (void *) t
->start
);
877 tree
= splay_delete(t
->start
, tree
);
886 sig
? siglongjmp(env
, val
) :
891 void __bound_longjmp(jmp_buf env
, int val
)
893 __bound_long_jump(env
,val
, 0, __FUNCTION__
);
897 void __bound_siglongjmp(jmp_buf env
, int val
)
899 __bound_long_jump(env
,val
, 1, __FUNCTION__
);
903 #if defined(__GNUC__) && (__GNUC__ >= 6)
904 #pragma GCC diagnostic pop
907 void __bound_init(size_t *p
, int mode
)
909 dprintf(stderr
, "%s, %s(): start %s\n", __FILE__
, __FUNCTION__
,
910 mode
< 0 ? "lazy" : mode
== 0 ? "normal use" : "for -run");
920 no_checking_key
= TlsAlloc();
921 TlsSetValue(no_checking_key
, &no_checking
);
923 pthread_key_create(&no_checking_key
, NULL
);
924 pthread_setspecific(no_checking_key
, &no_checking
);
929 print_warn_ptr_add
= getenv ("TCC_BOUNDS_WARN_POINTER_ADD") != NULL
;
930 print_calls
= getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL
;
931 print_heap
= getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL
;
932 print_statistic
= getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL
;
933 never_fatal
= getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL
;
939 void *addr
= mode
> 0 ? RTLD_DEFAULT
: RTLD_NEXT
;
941 /* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT,
942 but using RTLD_NEXT with -run segfaults on MacOS in dyld as the
943 generated code segment isn't registered with dyld and hence the
944 caller image of dlsym isn't known to it */
945 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
946 if (malloc_redir
== NULL
) {
947 dprintf(stderr
, "%s, %s(): use RTLD_DEFAULT\n",
948 __FILE__
, __FUNCTION__
);
950 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
952 *(void **) (&calloc_redir
) = dlsym (addr
, "calloc");
953 *(void **) (&free_redir
) = dlsym (addr
, "free");
954 *(void **) (&realloc_redir
) = dlsym (addr
, "realloc");
955 *(void **) (&memalign_redir
) = dlsym (addr
, "memalign");
956 dprintf(stderr
, "%s, %s(): malloc_redir %p\n",
957 __FILE__
, __FUNCTION__
, malloc_redir
);
958 dprintf(stderr
, "%s, %s(): free_redir %p\n",
959 __FILE__
, __FUNCTION__
, free_redir
);
960 dprintf(stderr
, "%s, %s(): realloc_redir %p\n",
961 __FILE__
, __FUNCTION__
, realloc_redir
);
962 dprintf(stderr
, "%s, %s(): memalign_redir %p\n",
963 __FILE__
, __FUNCTION__
, memalign_redir
);
964 if (malloc_redir
== NULL
|| free_redir
== NULL
)
965 bound_alloc_error ("Cannot redirect malloc/free");
966 #if HAVE_PTHREAD_CREATE
967 *(void **) (&pthread_create_redir
) = dlsym (addr
, "pthread_create");
968 dprintf(stderr
, "%s, %s(): pthread_create_redir %p\n",
969 __FILE__
, __FUNCTION__
, pthread_create_redir
);
970 if (pthread_create_redir
== NULL
)
971 bound_alloc_error ("Cannot redirect pthread_create");
974 *(void **) (&signal_redir
) = dlsym (addr
, "signal");
975 dprintf(stderr
, "%s, %s(): signal_redir %p\n",
976 __FILE__
, __FUNCTION__
, signal_redir
);
977 if (signal_redir
== NULL
)
978 bound_alloc_error ("Cannot redirect signal");
981 *(void **) (&sigaction_redir
) = dlsym (addr
, "sigaction");
982 dprintf(stderr
, "%s, %s(): sigaction_redir %p\n",
983 __FILE__
, __FUNCTION__
, sigaction_redir
);
984 if (sigaction_redir
== NULL
)
985 bound_alloc_error ("Cannot redirect sigaction");
988 *(void **) (&fork_redir
) = dlsym (addr
, "fork");
989 dprintf(stderr
, "%s, %s(): fork_redir %p\n",
990 __FILE__
, __FUNCTION__
, fork_redir
);
991 if (fork_redir
== NULL
)
992 bound_alloc_error ("Cannot redirect fork");
1000 unsigned char found
;
1001 unsigned long start
;
1004 (unsigned long) __builtin_return_address(0);
1007 /* Display exec name. Usefull when a lot of code is compiled with tcc */
1008 fp
= fopen ("/proc/self/comm", "r");
1010 memset (exec
, 0, sizeof(exec
));
1011 fread (exec
, 1, sizeof(exec
) - 2, fp
);
1012 if (strchr(exec
,'\n'))
1013 *strchr(exec
,'\n') = '\0';
1017 /* check if dlopen is used (is threre a better way?) */
1019 fp
= fopen ("/proc/self/maps", "r");
1021 while (fgets (line
, sizeof(line
), fp
)) {
1022 if (sscanf (line
, "%lx-%lx", &start
, &end
) == 2 &&
1023 ad
>= start
&& ad
< end
) {
1027 if (strstr (line
,"[heap]"))
1043 tree
= splay_insert((size_t) &_DefaultRuneLocale
,
1044 sizeof (_DefaultRuneLocale
), tree
);
1046 /* XXX: Does not work if locale is changed */
1047 tree
= splay_insert((size_t) __ctype_b_loc(),
1048 sizeof (unsigned short *), tree
);
1049 tree
= splay_insert((size_t) (*__ctype_b_loc() - 128),
1050 384 * sizeof (unsigned short), tree
);
1051 tree
= splay_insert((size_t) __ctype_tolower_loc(),
1052 sizeof (__int32_t
*), tree
);
1053 tree
= splay_insert((size_t) (*__ctype_tolower_loc() - 128),
1054 384 * sizeof (__int32_t
), tree
);
1055 tree
= splay_insert((size_t) __ctype_toupper_loc(),
1056 sizeof (__int32_t
*), tree
);
1057 tree
= splay_insert((size_t) (*__ctype_toupper_loc() - 128),
1058 384 * sizeof (__int32_t
), tree
);
1062 tree
= splay_insert((size_t) (&errno
), sizeof (int), tree
);
1069 /* add all static bound check values */
1071 tree
= splay_insert(p
[0], p
[1], tree
);
1074 dprintf(stderr
, "%s, %s(): static var %p 0x%lx\n",
1075 __FILE__
, __FUNCTION__
,
1076 (void *) p
[0], (unsigned long) p
[1]);
1085 dprintf(stderr
, "%s, %s(): end\n\n", __FILE__
, __FUNCTION__
);
1089 #if (defined(__GLIBC__) && (__GLIBC_MINOR__ >= 4)) || defined(_WIN32)
1090 __attribute__((constructor
))
1092 __bound_main_arg(int argc
, char **argv
, char **envp
)
1094 __bound_init (0, -1);
1099 for (i
= 0; i
< argc
; i
++)
1100 tree
= splay_insert((size_t) argv
[i
], strlen (argv
[i
]) + 1, tree
);
1101 tree
= splay_insert((size_t) argv
, (argc
+ 1) * sizeof(char *), tree
);
1105 for (i
= 0; i
< argc
; i
++)
1106 dprintf(stderr
, "%s, %s(): arg %p 0x%lx\n",
1107 __FILE__
, __FUNCTION__
,
1108 argv
[i
], (unsigned long)(strlen (argv
[i
]) + 1));
1109 dprintf(stderr
, "%s, %s(): argv %p %d\n",
1110 __FILE__
, __FUNCTION__
, argv
,
1111 (int)((argc
+ 1) * sizeof(char *)));
1116 if (envp
&& *envp
) {
1121 tree
= splay_insert((size_t) *p
, strlen (*p
) + 1, tree
);
1124 tree
= splay_insert((size_t) envp
, (++p
- envp
) * sizeof(char *), tree
);
1130 dprintf(stderr
, "%s, %s(): env %p 0x%lx\n",
1131 __FILE__
, __FUNCTION__
,
1132 *p
, (unsigned long)(strlen (*p
) + 1));
1135 dprintf(stderr
, "%s, %s(): environ %p %d\n",
1136 __FILE__
, __FUNCTION__
, envp
,
1137 (int)((++p
- envp
) * sizeof(char *)));
1143 void __attribute__((destructor
)) __bound_exit(void)
1146 static const char * const alloc_type
[] = {
1147 "", "malloc", "calloc", "realloc", "memalign", "strdup"
1150 dprintf(stderr
, "%s, %s():\n", __FILE__
, __FUNCTION__
);
1153 #if !defined(_WIN32) && !defined(__APPLE__) && !defined(__OpenBSD__)
1155 extern void __libc_freeres (void);
1163 while (alloca_list
) {
1164 alloca_list_type
*next
= alloca_list
->next
;
1166 tree
= splay_delete ((size_t) alloca_list
->p
, tree
);
1167 BOUND_FREE (alloca_list
);
1171 jmp_list_type
*next
= jmp_list
->next
;
1173 BOUND_FREE (jmp_list
);
1176 for (i
= 0; i
< FREE_REUSE_SIZE
; i
++) {
1177 if (free_reuse_list
[i
]) {
1178 tree
= splay_delete ((size_t) free_reuse_list
[i
], tree
);
1179 BOUND_FREE (free_reuse_list
[i
]);
1183 if (print_heap
&& tree
->type
!= 0)
1184 fprintf (stderr
, "%s, %s(): %s found size %lu\n",
1185 __FILE__
, __FUNCTION__
, alloc_type
[tree
->type
],
1186 (unsigned long) tree
->size
);
1187 tree
= splay_delete (tree
->start
, tree
);
1190 while (tree_free_list
) {
1191 Tree
*next
= tree_free_list
->left
;
1192 BOUND_FREE (tree_free_list
);
1193 tree_free_list
= next
;
1200 TlsFree(no_checking_key
);
1202 pthread_key_delete(no_checking_key
);
1206 if (print_statistic
) {
1208 fprintf (stderr
, "bound_ptr_add_count %llu\n", bound_ptr_add_count
);
1209 fprintf (stderr
, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count
);
1210 fprintf (stderr
, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count
);
1211 fprintf (stderr
, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count
);
1212 fprintf (stderr
, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count
);
1213 fprintf (stderr
, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count
);
1214 fprintf (stderr
, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count
);
1215 fprintf (stderr
, "bound_local_new_count %llu\n", bound_local_new_count
);
1216 fprintf (stderr
, "bound_local_delete_count %llu\n", bound_local_delete_count
);
1217 fprintf (stderr
, "bound_malloc_count %llu\n", bound_malloc_count
);
1218 fprintf (stderr
, "bound_calloc_count %llu\n", bound_calloc_count
);
1219 fprintf (stderr
, "bound_realloc_count %llu\n", bound_realloc_count
);
1220 fprintf (stderr
, "bound_free_count %llu\n", bound_free_count
);
1221 fprintf (stderr
, "bound_memalign_count %llu\n", bound_memalign_count
);
1222 fprintf (stderr
, "bound_mmap_count %llu\n", bound_mmap_count
);
1223 fprintf (stderr
, "bound_munmap_count %llu\n", bound_munmap_count
);
1224 fprintf (stderr
, "bound_alloca_count %llu\n", bound_alloca_count
);
1225 fprintf (stderr
, "bound_setjmp_count %llu\n", bound_setjmp_count
);
1226 fprintf (stderr
, "bound_longjmp_count %llu\n", bound_longjmp_count
);
1227 fprintf (stderr
, "bound_mempcy_count %llu\n", bound_mempcy_count
);
1228 fprintf (stderr
, "bound_memcmp_count %llu\n", bound_memcmp_count
);
1229 fprintf (stderr
, "bound_memmove_count %llu\n", bound_memmove_count
);
1230 fprintf (stderr
, "bound_memset_count %llu\n", bound_memset_count
);
1231 fprintf (stderr
, "bound_strlen_count %llu\n", bound_strlen_count
);
1232 fprintf (stderr
, "bound_strcpy_count %llu\n", bound_strcpy_count
);
1233 fprintf (stderr
, "bound_strncpy_count %llu\n", bound_strncpy_count
);
1234 fprintf (stderr
, "bound_strcmp_count %llu\n", bound_strcmp_count
);
1235 fprintf (stderr
, "bound_strncmp_count %llu\n", bound_strncmp_count
);
1236 fprintf (stderr
, "bound_strcat_count %llu\n", bound_strcat_count
);
1237 fprintf (stderr
, "bound_strchr_count %llu\n", bound_strchr_count
);
1238 fprintf (stderr
, "bound_strdup_count %llu\n", bound_strdup_count
);
1239 fprintf (stderr
, "bound_not_found %llu\n", bound_not_found
);
1241 #if BOUND_STATISTIC_SPLAY
1242 fprintf (stderr
, "bound_splay %llu\n", bound_splay
);
1243 fprintf (stderr
, "bound_splay_end %llu\n", bound_splay_end
);
1244 fprintf (stderr
, "bound_splay_insert %llu\n", bound_splay_insert
);
1245 fprintf (stderr
, "bound_splay_delete %llu\n", bound_splay_delete
);
1251 #if HAVE_PTHREAD_CREATE
1253 void *(*start_routine
) (void *);
1256 } bound_thread_create_type
;
1258 static void *bound_thread_create(void *bdata
)
1260 bound_thread_create_type
*data
= (bound_thread_create_type
*) bdata
;
1263 int *p
= (int *) BOUND_MALLOC(sizeof(int));
1265 if (!p
) bound_alloc_error("bound_thread_create malloc");
1267 pthread_setspecific(no_checking_key
, p
);
1269 pthread_sigmask(SIG_SETMASK
, &data
->old_mask
, NULL
);
1270 retval
= data
->start_routine(data
->arg
);
1272 pthread_setspecific(no_checking_key
, NULL
);
1279 int pthread_create(pthread_t
*thread
, const pthread_attr_t
*attr
,
1280 void *(*start_routine
) (void *), void *arg
)
1283 bound_thread_create_type
*data
;
1288 dprintf (stderr
, "%s, %s()\n", __FILE__
, __FUNCTION__
);
1290 pthread_sigmask(SIG_SETMASK
, &mask
, &old_mask
);
1291 data
= (bound_thread_create_type
*) BOUND_MALLOC(sizeof(bound_thread_create_type
));
1292 if (!data
) bound_alloc_error("bound_thread_create malloc");
1293 data
->start_routine
= start_routine
;
1295 data
->old_mask
= old_mask
;
1296 retval
= pthread_create_redir(thread
, attr
, bound_thread_create
, data
);
1297 pthread_sigmask(SIG_SETMASK
, &old_mask
, NULL
);
1302 #if HAVE_SIGNAL || HAVE_SIGACTION
1305 bound_sig signal_handler
;
1308 void (*sig_handler
)(int);
1309 void (*sig_sigaction
)(int, siginfo_t
*, void *);
1313 static unsigned char bound_sig_used
[NSIG
];
1314 static bound_sig_type bound_sig_data
[NSIG
];
1318 static void signal_handler(int sig
)
1320 __bounds_checking(1);
1321 bound_sig_data
[sig
].signal_handler(sig
);
1322 __bounds_checking(-1);
1325 bound_sig
signal(int signum
, bound_sig handler
)
1329 dprintf (stderr
, "%s, %s() %d %p\n", __FILE__
, __FUNCTION__
,
1331 retval
= signal_redir(signum
, handler
? signal_handler
: handler
);
1332 if (retval
!= SIG_ERR
) {
1333 if (bound_sig_used
[signum
])
1334 retval
= bound_sig_data
[signum
].signal_handler
;
1336 bound_sig_used
[signum
] = 1;
1337 bound_sig_data
[signum
].signal_handler
= handler
;
1345 static void sig_handler(int sig
)
1347 __bounds_checking(1);
1348 bound_sig_data
[sig
].sig_handler(sig
);
1349 __bounds_checking(-1);
1352 static void sig_sigaction(int sig
, siginfo_t
*info
, void *ucontext
)
1354 __bounds_checking(1);
1355 bound_sig_data
[sig
].sig_sigaction(sig
, info
, ucontext
);
1356 __bounds_checking(-1);
1359 int sigaction(int signum
, const struct sigaction
*act
, struct sigaction
*oldact
)
1362 struct sigaction nact
, oact
;
1364 dprintf (stderr
, "%s, %s() %d %p %p\n", __FILE__
, __FUNCTION__
,
1365 signum
, act
, oldact
);
1368 if (nact
.sa_flags
& SA_SIGINFO
)
1369 nact
.sa_sigaction
= sig_sigaction
;
1371 nact
.sa_handler
= sig_handler
;
1372 retval
= sigaction_redir(signum
, &nact
, &oact
);
1375 retval
= sigaction_redir(signum
, act
, &oact
);
1377 if (bound_sig_used
[signum
]) {
1378 if (oact
.sa_flags
& SA_SIGINFO
)
1379 oact
.sa_sigaction
= bound_sig_data
[signum
].sig_sigaction
;
1381 oact
.sa_handler
= bound_sig_data
[signum
].sig_handler
;
1387 bound_sig_used
[signum
] = 1;
1388 if (act
->sa_flags
& SA_SIGINFO
)
1389 bound_sig_data
[signum
].sig_sigaction
= act
->sa_sigaction
;
1391 bound_sig_data
[signum
].sig_handler
= act
->sa_handler
;
1404 retval
= (*fork_redir
)();
1414 void *malloc(size_t size
)
1416 void *__bound_malloc(size_t size
, const void *caller
)
1422 /* This will catch the first dlsym call from __bound_init */
1423 if (malloc_redir
== NULL
) {
1424 __bound_init (0, -1);
1425 if (malloc_redir
== NULL
) {
1426 ptr
= &initial_pool
[pool_index
];
1427 pool_index
= (pool_index
+ size
+ 15) & ~15;
1428 if (pool_index
>= sizeof (initial_pool
))
1429 bound_alloc_error ("initial memory pool too small");
1430 dprintf (stderr
, "%s, %s(): initial %p, 0x%lx\n",
1431 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1436 /* we allocate one more byte to ensure the regions will be
1437 separated by at least one byte. With the glibc malloc, it may
1438 be in fact not necessary */
1439 ptr
= BOUND_MALLOC (size
+ 1);
1440 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1441 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1443 if (NO_CHECKING_GET() == 0) {
1445 INCR_COUNT(bound_malloc_count
);
1448 tree
= splay_insert ((size_t) ptr
, size
? size
: size
+ 1, tree
);
1449 if (tree
&& tree
->start
== (size_t) ptr
)
1450 tree
->type
= TCC_TYPE_MALLOC
;
1458 void *memalign(size_t size
, size_t align
)
1460 void *__bound_memalign(size_t size
, size_t align
, const void *caller
)
1466 /* we allocate one more byte to ensure the regions will be
1467 separated by at least one byte. With the glibc malloc, it may
1468 be in fact not necessary */
1469 ptr
= BOUND_MEMALIGN(size
+ 1, align
);
1472 /* XXX: handle it ? */
1475 /* we suppose that malloc aligns to at least four bytes */
1476 ptr
= BOUND_MALLOC(size
+ 1);
1479 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1480 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1482 if (NO_CHECKING_GET() == 0) {
1484 INCR_COUNT(bound_memalign_count
);
1487 tree
= splay_insert((size_t) ptr
, size
? size
: size
+ 1, tree
);
1488 if (tree
&& tree
->start
== (size_t) ptr
)
1489 tree
->type
= TCC_TYPE_MEMALIGN
;
1497 void free(void *ptr
)
1499 void __bound_free(void *ptr
, const void *caller
)
1502 size_t addr
= (size_t) ptr
;
1505 if (ptr
== NULL
|| tree
== NULL
1507 || ((unsigned char *) ptr
>= &initial_pool
[0] &&
1508 (unsigned char *) ptr
< &initial_pool
[sizeof(initial_pool
)])
1513 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, __FUNCTION__
, ptr
);
1515 if (NO_CHECKING_GET() == 0) {
1517 INCR_COUNT(bound_free_count
);
1518 tree
= splay (addr
, tree
);
1519 if (tree
->start
== addr
) {
1520 if (tree
->is_invalid
) {
1522 bound_error("freeing invalid region");
1525 tree
->is_invalid
= 1;
1526 memset (ptr
, 0x5a, tree
->size
);
1527 p
= free_reuse_list
[free_reuse_index
];
1528 free_reuse_list
[free_reuse_index
] = ptr
;
1529 free_reuse_index
= (free_reuse_index
+ 1) % FREE_REUSE_SIZE
;
1531 tree
= splay_delete((size_t)p
, tree
);
1540 void *realloc(void *ptr
, size_t size
)
1542 void *__bound_realloc(void *ptr
, size_t size
, const void *caller
)
1551 __bound_free(ptr
, caller
);
1556 new_ptr
= BOUND_REALLOC (ptr
, size
+ 1);
1557 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1558 __FILE__
, __FUNCTION__
, new_ptr
, (unsigned long)size
);
1560 if (NO_CHECKING_GET() == 0) {
1562 INCR_COUNT(bound_realloc_count
);
1565 tree
= splay_delete ((size_t) ptr
, tree
);
1567 tree
= splay_insert ((size_t) new_ptr
, size
? size
: size
+ 1, tree
);
1568 if (tree
&& tree
->start
== (size_t) new_ptr
)
1569 tree
->type
= TCC_TYPE_REALLOC
;
1577 void *calloc(size_t nmemb
, size_t size
)
1579 void *__bound_calloc(size_t nmemb
, size_t size
)
1586 /* This will catch the first dlsym call from __bound_init */
1587 if (malloc_redir
== NULL
) {
1588 __bound_init (0, -1);
1589 if (malloc_redir
== NULL
) {
1590 ptr
= &initial_pool
[pool_index
];
1591 pool_index
= (pool_index
+ size
+ 15) & ~15;
1592 if (pool_index
>= sizeof (initial_pool
))
1593 bound_alloc_error ("initial memory pool too small");
1594 dprintf (stderr
, "%s, %s(): initial %p, 0x%lx\n",
1595 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1596 memset (ptr
, 0, size
);
1601 ptr
= BOUND_MALLOC(size
+ 1);
1602 dprintf (stderr
, "%s, %s(): %p, 0x%lx\n",
1603 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1606 memset (ptr
, 0, size
);
1607 if (NO_CHECKING_GET() == 0) {
1609 INCR_COUNT(bound_calloc_count
);
1610 tree
= splay_insert ((size_t) ptr
, size
? size
: size
+ 1, tree
);
1611 if (tree
&& tree
->start
== (size_t) ptr
)
1612 tree
->type
= TCC_TYPE_CALLOC
;
1619 #if !defined(_WIN32)
1620 void *__bound_mmap (void *start
, size_t size
, int prot
,
1621 int flags
, int fd
, off_t offset
)
1625 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1626 __FILE__
, __FUNCTION__
, start
, (unsigned long)size
);
1627 result
= mmap (start
, size
, prot
, flags
, fd
, offset
);
1628 if (result
&& NO_CHECKING_GET() == 0) {
1630 INCR_COUNT(bound_mmap_count
);
1631 tree
= splay_insert((size_t)result
, size
, tree
);
1637 int __bound_munmap (void *start
, size_t size
)
1641 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1642 __FILE__
, __FUNCTION__
, start
, (unsigned long)size
);
1643 if (start
&& NO_CHECKING_GET() == 0) {
1645 INCR_COUNT(bound_munmap_count
);
1646 tree
= splay_delete ((size_t) start
, tree
);
1649 result
= munmap (start
, size
);
1654 /* some useful checked functions */
1656 /* check that (p ... p + size - 1) lies inside 'p' region, if any */
1657 static void __bound_check(const void *p
, size_t size
, const char *function
)
1659 if (size
!= 0 && __bound_ptr_add((void *)p
, size
) == INVALID_POINTER
) {
1660 bound_error("invalid pointer %p, size 0x%lx in %s",
1661 p
, (unsigned long)size
, function
);
1665 static int check_overlap (const void *p1
, size_t n1
,
1666 const void *p2
, size_t n2
,
1667 const char *function
)
1669 const void *p1e
= (const void *) ((const char *) p1
+ n1
);
1670 const void *p2e
= (const void *) ((const char *) p2
+ n2
);
1672 if (NO_CHECKING_GET() == 0 && n1
!= 0 && n2
!=0 &&
1673 ((p1
<= p2
&& p1e
> p2
) || /* p1----p2====p1e----p2e */
1674 (p2
<= p1
&& p2e
> p1
))) { /* p2----p1====p2e----p1e */
1675 bound_error("overlapping regions %p(0x%lx), %p(0x%lx) in %s",
1676 p1
, (unsigned long)n1
, p2
, (unsigned long)n2
, function
);
1677 return never_fatal
< 0;
1682 void *__bound_memcpy(void *dest
, const void *src
, size_t n
)
1684 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1685 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1686 INCR_COUNT(bound_mempcy_count
);
1687 __bound_check(dest
, n
, "memcpy dest");
1688 __bound_check(src
, n
, "memcpy src");
1689 if (check_overlap(dest
, n
, src
, n
, "memcpy"))
1691 return memcpy(dest
, src
, n
);
1694 int __bound_memcmp(const void *s1
, const void *s2
, size_t n
)
1696 const unsigned char *u1
= (const unsigned char *) s1
;
1697 const unsigned char *u2
= (const unsigned char *) s2
;
1700 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1701 __FILE__
, __FUNCTION__
, s1
, s2
, (unsigned long)n
);
1702 INCR_COUNT(bound_memcmp_count
);
1704 if ((ssize_t
) --n
== -1)
1706 else if (*u1
!= *u2
) {
1707 retval
= *u1
++ - *u2
++;
1713 __bound_check(s1
, (const void *)u1
- s1
, "memcmp s1");
1714 __bound_check(s2
, (const void *)u2
- s2
, "memcmp s2");
1718 void *__bound_memmove(void *dest
, const void *src
, size_t n
)
1720 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1721 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1722 INCR_COUNT(bound_memmove_count
);
1723 __bound_check(dest
, n
, "memmove dest");
1724 __bound_check(src
, n
, "memmove src");
1725 return memmove(dest
, src
, n
);
1728 void *__bound_memset(void *s
, int c
, size_t n
)
1730 dprintf(stderr
, "%s, %s(): %p, %d, 0x%lx\n",
1731 __FILE__
, __FUNCTION__
, s
, c
, (unsigned long)n
);
1732 INCR_COUNT(bound_memset_count
);
1733 __bound_check(s
, n
, "memset");
1734 return memset(s
, c
, n
);
1737 #if defined(__arm__)
1738 void *__bound___aeabi_memcpy(void *dest
, const void *src
, size_t n
)
1740 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1741 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1742 INCR_COUNT(bound_mempcy_count
);
1743 __bound_check(dest
, n
, "memcpy dest");
1744 __bound_check(src
, n
, "memcpy src");
1745 if (check_overlap(dest
, n
, src
, n
, "memcpy"))
1747 return __aeabi_memcpy(dest
, src
, n
);
1750 void *__bound___aeabi_memmove(void *dest
, const void *src
, size_t n
)
1752 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1753 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1754 INCR_COUNT(bound_memmove_count
);
1755 __bound_check(dest
, n
, "memmove dest");
1756 __bound_check(src
, n
, "memmove src");
1757 return __aeabi_memmove(dest
, src
, n
);
1760 void *__bound___aeabi_memmove4(void *dest
, const void *src
, size_t n
)
1762 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1763 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1764 INCR_COUNT(bound_memmove_count
);
1765 __bound_check(dest
, n
, "memmove dest");
1766 __bound_check(src
, n
, "memmove src");
1767 return __aeabi_memmove4(dest
, src
, n
);
1770 void *__bound___aeabi_memmove8(void *dest
, const void *src
, size_t n
)
1772 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1773 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1774 INCR_COUNT(bound_memmove_count
);
1775 __bound_check(dest
, n
, "memmove dest");
1776 __bound_check(src
, n
, "memmove src");
1777 return __aeabi_memmove8(dest
, src
, n
);
1780 void *__bound___aeabi_memset(void *s
, int c
, size_t n
)
1782 dprintf(stderr
, "%s, %s(): %p, %d, 0x%lx\n",
1783 __FILE__
, __FUNCTION__
, s
, c
, (unsigned long)n
);
1784 INCR_COUNT(bound_memset_count
);
1785 __bound_check(s
, n
, "memset");
1786 return __aeabi_memset(s
, c
, n
);
1790 int __bound_strlen(const char *s
)
1794 dprintf(stderr
, "%s, %s(): %p\n",
1795 __FILE__
, __FUNCTION__
, s
);
1796 INCR_COUNT(bound_strlen_count
);
1798 __bound_check(s
, p
- s
, "strlen");
1802 char *__bound_strcpy(char *dest
, const char *src
)
1805 const char *p
= src
;
1807 dprintf(stderr
, "%s, %s(): %p, %p\n",
1808 __FILE__
, __FUNCTION__
, dest
, src
);
1809 INCR_COUNT(bound_strcpy_count
);
1812 __bound_check(dest
, len
, "strcpy dest");
1813 __bound_check(src
, len
, "strcpy src");
1814 if (check_overlap(dest
, len
, src
, len
, "strcpy"))
1816 return strcpy (dest
, src
);
1819 char *__bound_strncpy(char *dest
, const char *src
, size_t n
)
1822 const char *p
= src
;
1824 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1825 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1826 INCR_COUNT(bound_strncpy_count
);
1827 while (len
-- && *p
++);
1829 __bound_check(dest
, len
, "strncpy dest");
1830 __bound_check(src
, len
, "strncpy src");
1831 if (check_overlap(dest
, len
, src
, len
, "strncpy"))
1833 return strncpy(dest
, src
, n
);
1836 int __bound_strcmp(const char *s1
, const char *s2
)
1838 const unsigned char *u1
= (const unsigned char *) s1
;
1839 const unsigned char *u2
= (const unsigned char *) s2
;
1841 dprintf(stderr
, "%s, %s(): %p, %p\n",
1842 __FILE__
, __FUNCTION__
, s1
, s2
);
1843 INCR_COUNT(bound_strcmp_count
);
1844 while (*u1
&& *u1
== *u2
) {
1848 __bound_check(s1
, ((const char *)u1
- s1
) + 1, "strcmp s1");
1849 __bound_check(s2
, ((const char *)u2
- s2
) + 1, "strcmp s2");
1853 int __bound_strncmp(const char *s1
, const char *s2
, size_t n
)
1855 const unsigned char *u1
= (const unsigned char *) s1
;
1856 const unsigned char *u2
= (const unsigned char *) s2
;
1859 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1860 __FILE__
, __FUNCTION__
, s1
, s2
, (unsigned long)n
);
1861 INCR_COUNT(bound_strncmp_count
);
1863 if ((ssize_t
) --n
== -1)
1865 else if (*u1
!= *u2
) {
1866 retval
= *u1
++ - *u2
++;
1871 __bound_check(s1
, (const char *)u1
- s1
, "strncmp s1");
1872 __bound_check(s2
, (const char *)u2
- s2
, "strncmp s2");
1876 char *__bound_strcat(char *dest
, const char *src
)
1879 const char *s
= src
;
1881 dprintf(stderr
, "%s, %s(): %p, %p\n",
1882 __FILE__
, __FUNCTION__
, dest
, src
);
1883 INCR_COUNT(bound_strcat_count
);
1886 __bound_check(r
, (dest
- r
) + (src
- s
) - 1, "strcat dest");
1887 __bound_check(s
, src
- s
, "strcat src");
1888 if (check_overlap(r
, (dest
- r
) + (src
- s
) - 1, s
, src
- s
, "strcat"))
1890 return strcat(r
, s
);
1893 char *__bound_strchr(const char *s
, int c
)
1895 const unsigned char *str
= (const unsigned char *) s
;
1896 unsigned char ch
= c
;
1898 dprintf(stderr
, "%s, %s(): %p, %d\n",
1899 __FILE__
, __FUNCTION__
, s
, ch
);
1900 INCR_COUNT(bound_strchr_count
);
1906 __bound_check(s
, ((const char *)str
- s
) + 1, "strchr");
1907 return *str
== ch
? (char *) str
: NULL
;
1910 char *__bound_strdup(const char *s
)
1915 INCR_COUNT(bound_strdup_count
);
1917 __bound_check(s
, p
- s
, "strdup");
1918 new = BOUND_MALLOC ((p
- s
) + 1);
1919 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1920 __FILE__
, __FUNCTION__
, new, (unsigned long)(p
-s
));
1922 if (NO_CHECKING_GET() == 0 && no_strdup
== 0) {
1924 tree
= splay_insert((size_t)new, p
- s
, tree
);
1925 if (tree
&& tree
->start
== (size_t) new)
1926 tree
->type
= TCC_TYPE_STRDUP
;
1929 memcpy (new, s
, p
- s
);
1935 An implementation of top-down splaying with sizes
1936 D. Sleator <sleator@cs.cmu.edu>, January 1994.
1938 This extends top-down-splay.c to maintain a size field in each node.
1939 This is the number of nodes in the subtree rooted there. This makes
1940 it possible to efficiently compute the rank of a key. (The rank is
1941 the number of nodes to the left of the given key.) It it also
1942 possible to quickly find the node of a given rank. Both of these
1943 operations are illustrated in the code below. The remainder of this
1944 introduction is taken from top-down-splay.c.
1946 "Splay trees", or "self-adjusting search trees" are a simple and
1947 efficient data structure for storing an ordered set. The data
1948 structure consists of a binary tree, with no additional fields. It
1949 allows searching, insertion, deletion, deletemin, deletemax,
1950 splitting, joining, and many other operations, all with amortized
1951 logarithmic performance. Since the trees adapt to the sequence of
1952 requests, their performance on real access patterns is typically even
1953 better. Splay trees are described in a number of texts and papers
1956 The code here is adapted from simple top-down splay, at the bottom of
1957 page 669 of [2]. It can be obtained via anonymous ftp from
1958 spade.pc.cs.cmu.edu in directory /usr/sleator/public.
1960 The chief modification here is that the splay operation works even if the
1961 item being splayed is not in the tree, and even if the tree root of the
1962 tree is NULL. So the line:
1966 causes it to search for item with key i in the tree rooted at t. If it's
1967 there, it is splayed to the root. If it isn't there, then the node put
1968 at the root is the last one before NULL that would have been reached in a
1969 normal binary search for i. (It's a neighbor of i in the tree.) This
1970 allows many other operations to be easily implemented, as shown below.
1972 [1] "Data Structures and Their Algorithms", Lewis and Denenberg,
1973 Harper Collins, 1991, pp 243-251.
1974 [2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
1975 JACM Volume 32, No 3, July 1985, pp 652-686.
1976 [3] "Data Structure and Algorithm Analysis", Mark Weiss,
1977 Benjamin Cummins, 1992, pp 119-130.
1978 [4] "Data Structures, Algorithms, and Performance", Derick Wood,
1979 Addison-Wesley, 1993, pp 367-375
1982 /* Code adapted for tcc */
1984 #define compare(start,tstart,tsize) (start < tstart ? -1 : \
1985 start >= tstart+tsize ? 1 : 0)
1987 static Tree
* splay (size_t addr
, Tree
*t
)
1988 /* Splay using the key start (which may or may not be in the tree.) */
1989 /* The starting root is t, and the tree used is defined by rat */
1994 INCR_COUNT_SPLAY(bound_splay
);
1995 if (t
== NULL
) return t
;
1996 N
.left
= N
.right
= NULL
;
2000 comp
= compare(addr
, t
->start
, t
->size
);
2003 if (y
== NULL
) break;
2004 if (compare(addr
, y
->start
, y
->size
) < 0) {
2005 t
->left
= y
->right
; /* rotate right */
2008 if (t
->left
== NULL
) break;
2010 r
->left
= t
; /* link right */
2013 } else if (comp
> 0) {
2015 if (y
== NULL
) break;
2016 if (compare(addr
, y
->start
, y
->size
) > 0) {
2017 t
->right
= y
->left
; /* rotate left */
2020 if (t
->right
== NULL
) break;
2022 l
->right
= t
; /* link left */
2029 l
->right
= t
->left
; /* assemble */
2037 #define compare_end(start,tend) (start < tend ? -1 : \
2038 start > tend ? 1 : 0)
2040 static Tree
* splay_end (size_t addr
, Tree
*t
)
2041 /* Splay using the key start (which may or may not be in the tree.) */
2042 /* The starting root is t, and the tree used is defined by rat */
2047 INCR_COUNT_SPLAY(bound_splay_end
);
2048 if (t
== NULL
) return t
;
2049 N
.left
= N
.right
= NULL
;
2053 comp
= compare_end(addr
, t
->start
+ t
->size
);
2056 if (y
== NULL
) break;
2057 if (compare_end(addr
, y
->start
+ y
->size
) < 0) {
2058 t
->left
= y
->right
; /* rotate right */
2061 if (t
->left
== NULL
) break;
2063 r
->left
= t
; /* link right */
2066 } else if (comp
> 0) {
2068 if (y
== NULL
) break;
2069 if (compare_end(addr
, y
->start
+ y
->size
) > 0) {
2070 t
->right
= y
->left
; /* rotate left */
2073 if (t
->right
== NULL
) break;
2075 l
->right
= t
; /* link left */
2082 l
->right
= t
->left
; /* assemble */
2090 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
)
2091 /* Insert key start into the tree t, if it is not already there. */
2092 /* Return a pointer to the resulting tree. */
2096 INCR_COUNT_SPLAY(bound_splay_insert
);
2099 if (compare(addr
, t
->start
, t
->size
)==0) {
2100 return t
; /* it's already there */
2104 if (tree_free_list
) {
2105 new = tree_free_list
;
2106 tree_free_list
= new->left
;
2111 new = (Tree
*) BOUND_MALLOC (sizeof (Tree
));
2114 bound_alloc_error("not enough memory for bound checking code");
2118 new->left
= new->right
= NULL
;
2119 } else if (compare(addr
, t
->start
, t
->size
) < 0) {
2120 new->left
= t
->left
;
2124 new->right
= t
->right
;
2130 new->type
= TCC_TYPE_NONE
;
2131 new->is_invalid
= 0;
2136 #define compare_destroy(start,tstart) (start < tstart ? -1 : \
2137 start > tstart ? 1 : 0)
2139 static Tree
* splay_delete(size_t addr
, Tree
*t
)
2140 /* Deletes addr from the tree if it's there. */
2141 /* Return a pointer to the resulting tree. */
2145 INCR_COUNT_SPLAY(bound_splay_delete
);
2146 if (t
==NULL
) return NULL
;
2148 if (compare_destroy(addr
, t
->start
) == 0) { /* found it */
2149 if (t
->left
== NULL
) {
2152 x
= splay(addr
, t
->left
);
2153 x
->right
= t
->right
;
2156 t
->left
= tree_free_list
;
2163 return t
; /* It wasn't there */
2167 void splay_printtree(Tree
* t
, int d
)
2170 if (t
== NULL
) return;
2171 splay_printtree(t
->right
, d
+1);
2172 for (i
=0; i
<d
; i
++) fprintf(stderr
," ");
2173 fprintf(stderr
,"%p(0x%lx:%u:%u)\n",
2174 (void *) t
->start
, (unsigned long) t
->size
,
2175 (unsigned)t
->type
, (unsigned)t
->is_invalid
);
2176 splay_printtree(t
->left
, d
+1);