Staging: hv: mousevsc: Change the allocation flags to reflect interrupt context
[zen-stable.git] / arch / sparc / include / asm / winmacro.h
bloba9be04b0d049c28c7aae4893ea548e7071721326
1 /*
2 * winmacro.h: Window loading-unloading macros.
4 * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu)
5 */
7 #ifndef _SPARC_WINMACRO_H
8 #define _SPARC_WINMACRO_H
10 #include <asm/ptrace.h>
12 /* Store the register window onto the 8-byte aligned area starting
13 * at %reg. It might be %sp, it might not, we don't care.
15 #define STORE_WINDOW(reg) \
16 std %l0, [%reg + RW_L0]; \
17 std %l2, [%reg + RW_L2]; \
18 std %l4, [%reg + RW_L4]; \
19 std %l6, [%reg + RW_L6]; \
20 std %i0, [%reg + RW_I0]; \
21 std %i2, [%reg + RW_I2]; \
22 std %i4, [%reg + RW_I4]; \
23 std %i6, [%reg + RW_I6];
25 /* Load a register window from the area beginning at %reg. */
26 #define LOAD_WINDOW(reg) \
27 ldd [%reg + RW_L0], %l0; \
28 ldd [%reg + RW_L2], %l2; \
29 ldd [%reg + RW_L4], %l4; \
30 ldd [%reg + RW_L6], %l6; \
31 ldd [%reg + RW_I0], %i0; \
32 ldd [%reg + RW_I2], %i2; \
33 ldd [%reg + RW_I4], %i4; \
34 ldd [%reg + RW_I6], %i6;
36 /* Loading and storing struct pt_reg trap frames. */
37 #define LOAD_PT_INS(base_reg) \
38 ldd [%base_reg + STACKFRAME_SZ + PT_I0], %i0; \
39 ldd [%base_reg + STACKFRAME_SZ + PT_I2], %i2; \
40 ldd [%base_reg + STACKFRAME_SZ + PT_I4], %i4; \
41 ldd [%base_reg + STACKFRAME_SZ + PT_I6], %i6;
43 #define LOAD_PT_GLOBALS(base_reg) \
44 ld [%base_reg + STACKFRAME_SZ + PT_G1], %g1; \
45 ldd [%base_reg + STACKFRAME_SZ + PT_G2], %g2; \
46 ldd [%base_reg + STACKFRAME_SZ + PT_G4], %g4; \
47 ldd [%base_reg + STACKFRAME_SZ + PT_G6], %g6;
49 #define LOAD_PT_YREG(base_reg, scratch) \
50 ld [%base_reg + STACKFRAME_SZ + PT_Y], %scratch; \
51 wr %scratch, 0x0, %y;
53 #define LOAD_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc) \
54 ld [%base_reg + STACKFRAME_SZ + PT_PSR], %pt_psr; \
55 ld [%base_reg + STACKFRAME_SZ + PT_PC], %pt_pc; \
56 ld [%base_reg + STACKFRAME_SZ + PT_NPC], %pt_npc;
58 #define LOAD_PT_ALL(base_reg, pt_psr, pt_pc, pt_npc, scratch) \
59 LOAD_PT_YREG(base_reg, scratch) \
60 LOAD_PT_INS(base_reg) \
61 LOAD_PT_GLOBALS(base_reg) \
62 LOAD_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc)
64 #define STORE_PT_INS(base_reg) \
65 std %i0, [%base_reg + STACKFRAME_SZ + PT_I0]; \
66 std %i2, [%base_reg + STACKFRAME_SZ + PT_I2]; \
67 std %i4, [%base_reg + STACKFRAME_SZ + PT_I4]; \
68 std %i6, [%base_reg + STACKFRAME_SZ + PT_I6];
70 #define STORE_PT_GLOBALS(base_reg) \
71 st %g1, [%base_reg + STACKFRAME_SZ + PT_G1]; \
72 std %g2, [%base_reg + STACKFRAME_SZ + PT_G2]; \
73 std %g4, [%base_reg + STACKFRAME_SZ + PT_G4]; \
74 std %g6, [%base_reg + STACKFRAME_SZ + PT_G6];
76 #define STORE_PT_YREG(base_reg, scratch) \
77 rd %y, %scratch; \
78 st %scratch, [%base_reg + STACKFRAME_SZ + PT_Y];
80 #define STORE_PT_PRIV(base_reg, pt_psr, pt_pc, pt_npc) \
81 st %pt_psr, [%base_reg + STACKFRAME_SZ + PT_PSR]; \
82 st %pt_pc, [%base_reg + STACKFRAME_SZ + PT_PC]; \
83 st %pt_npc, [%base_reg + STACKFRAME_SZ + PT_NPC];
85 #define STORE_PT_ALL(base_reg, reg_psr, reg_pc, reg_npc, g_scratch) \
86 STORE_PT_PRIV(base_reg, reg_psr, reg_pc, reg_npc) \
87 STORE_PT_GLOBALS(base_reg) \
88 STORE_PT_YREG(base_reg, g_scratch) \
89 STORE_PT_INS(base_reg)
91 #define SAVE_BOLIXED_USER_STACK(cur_reg, scratch) \
92 ld [%cur_reg + TI_W_SAVED], %scratch; \
93 sll %scratch, 2, %scratch; \
94 add %scratch, %cur_reg, %scratch; \
95 st %sp, [%scratch + TI_RWIN_SPTRS]; \
96 sub %scratch, %cur_reg, %scratch; \
97 sll %scratch, 4, %scratch; \
98 add %scratch, %cur_reg, %scratch; \
99 STORE_WINDOW(scratch + TI_REG_WINDOW); \
100 sub %scratch, %cur_reg, %scratch; \
101 srl %scratch, 6, %scratch; \
102 add %scratch, 1, %scratch; \
103 st %scratch, [%cur_reg + TI_W_SAVED];
105 #ifdef CONFIG_SMP
106 /* Results of LOAD_CURRENT() after BTFIXUP for SUN4M, SUN4D & LEON (comments) */
107 #define LOAD_CURRENT4M(dest_reg, idreg) \
108 rd %tbr, %idreg; \
109 sethi %hi(current_set), %dest_reg; \
110 srl %idreg, 10, %idreg; \
111 or %dest_reg, %lo(current_set), %dest_reg; \
112 and %idreg, 0xc, %idreg; \
113 ld [%idreg + %dest_reg], %dest_reg;
115 #define LOAD_CURRENT4D(dest_reg, idreg) \
116 lda [%g0] ASI_M_VIKING_TMP1, %idreg; \
117 sethi %hi(C_LABEL(current_set)), %dest_reg; \
118 sll %idreg, 2, %idreg; \
119 or %dest_reg, %lo(C_LABEL(current_set)), %dest_reg; \
120 ld [%idreg + %dest_reg], %dest_reg;
122 #define LOAD_CURRENT_LEON(dest_reg, idreg) \
123 rd %asr17, %idreg; \
124 sethi %hi(current_set), %dest_reg; \
125 srl %idreg, 0x1c, %idreg; \
126 or %dest_reg, %lo(current_set), %dest_reg; \
127 sll %idreg, 0x2, %idreg; \
128 ld [%idreg + %dest_reg], %dest_reg;
130 /* Blackbox - take care with this... - check smp4m and smp4d before changing this. */
131 #define LOAD_CURRENT(dest_reg, idreg) \
132 sethi %hi(___b_load_current), %idreg; \
133 sethi %hi(current_set), %dest_reg; \
134 sethi %hi(boot_cpu_id4), %idreg; \
135 or %dest_reg, %lo(current_set), %dest_reg; \
136 ldub [%idreg + %lo(boot_cpu_id4)], %idreg; \
137 ld [%idreg + %dest_reg], %dest_reg;
138 #else
139 #define LOAD_CURRENT(dest_reg, idreg) \
140 sethi %hi(current_set), %idreg; \
141 ld [%idreg + %lo(current_set)], %dest_reg;
142 #endif
144 #endif /* !(_SPARC_WINMACRO_H) */