mm: fix exec activate_mm vs TLB shootdown and lazy tlb switching race
[linux/fpc-iii.git] / arch / x86 / entry / thunk_64.S
blobbe36bf4e0957ec0df1437407fa98800cb7869737
1 /*
2  * Save registers before calling assembly functions. This avoids
3  * disturbance of register allocation in some inline assembly constructs.
4  * Copyright 2001,2002 by Andi Kleen, SuSE Labs.
5  * Added trace_hardirqs callers - Copyright 2007 Steven Rostedt, Red Hat, Inc.
6  * Subject to the GNU public license, v.2. No warranty of any kind.
7  */
8 #include <linux/linkage.h>
9 #include "calling.h"
10 #include <asm/asm.h>
11 #include <asm/export.h>
13         /* rdi: arg1 ... normal C conventions. rax is saved/restored. */
14         .macro THUNK name, func, put_ret_addr_in_rdi=0
15         .globl \name
16         .type \name, @function
17 \name:
18         pushq %rbp
19         movq %rsp, %rbp
21         pushq %rdi
22         pushq %rsi
23         pushq %rdx
24         pushq %rcx
25         pushq %rax
26         pushq %r8
27         pushq %r9
28         pushq %r10
29         pushq %r11
31         .if \put_ret_addr_in_rdi
32         /* 8(%rbp) is return addr on stack */
33         movq 8(%rbp), %rdi
34         .endif
36         call \func
37         jmp  .L_restore
38         _ASM_NOKPROBE(\name)
39         .endm
41 #ifdef CONFIG_TRACE_IRQFLAGS
42         THUNK trace_hardirqs_on_thunk,trace_hardirqs_on_caller,1
43         THUNK trace_hardirqs_off_thunk,trace_hardirqs_off_caller,1
44 #endif
46 #ifdef CONFIG_DEBUG_LOCK_ALLOC
47         THUNK lockdep_sys_exit_thunk,lockdep_sys_exit
48 #endif
50 #ifdef CONFIG_PREEMPT
51         THUNK ___preempt_schedule, preempt_schedule
52         THUNK ___preempt_schedule_notrace, preempt_schedule_notrace
53         EXPORT_SYMBOL(___preempt_schedule)
54         EXPORT_SYMBOL(___preempt_schedule_notrace)
55 #endif
57 #if defined(CONFIG_TRACE_IRQFLAGS) \
58  || defined(CONFIG_DEBUG_LOCK_ALLOC) \
59  || defined(CONFIG_PREEMPT)
60 .L_restore:
61         popq %r11
62         popq %r10
63         popq %r9
64         popq %r8
65         popq %rax
66         popq %rcx
67         popq %rdx
68         popq %rsi
69         popq %rdi
70         popq %rbp
71         ret
72         _ASM_NOKPROBE(.L_restore)
73 #endif