dmake: do not set MAKEFLAGS=k
[unleashed/tickless.git] / arch / x86 / kernel / platform / i86pc / boot / bios_call_src.s
blob078abf728f505a4c660cfde00177eb74fd85ab55
1 /*
2 * CDDL HEADER START
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
19 * CDDL HEADER END
23 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
27 #pragma ident "%Z%%M% %I% %E% SMI"
30 #include <sys/segments.h>
31 #include <sys/controlregs.h>
34 * Do a call into BIOS. This goes down to 16 bit real mode and back again.
38 * instruction prefix to change operand size in instruction
40 #define DATASZ .byte 0x66;
42 #if defined(__amd64)
43 #define MOVCR(x, y) movq x,%rax; movq %rax, y
44 #define LOAD_XAX(sym) leaq sym, %rax
45 #elif defined(__i386)
46 #define MOVCR(x, y) movl x,%eax; movl %eax, y
47 #define LOAD_XAX(sym) leal sym, %eax
48 #endif
50 .globl _start
51 _start:
53 #if defined(__i386)
56 * Save caller registers
58 movl %ebp, save_ebp
59 movl %esp, save_esp
60 movl %ebx, save_ebx
61 movl %esi, save_esi
62 movl %edi, save_edi
64 /* get registers argument into esi */
65 movl 8(%esp), %esi
67 /* put interrupt number in %bl */
68 movl 4(%esp), %ebx
70 /* Switch to a low memory stack */
71 movl $_start, %esp
73 /* allocate space for args on stack */
74 subl $18, %esp
75 movl %esp, %edi
77 #elif defined(__amd64)
80 * Save caller registers
82 movq %rbp, save_rbp
83 movq %rsp, save_rsp
84 movq %rbx, save_rbx
85 movq %rsi, save_rsi
86 movq %r12, save_r12
87 movq %r13, save_r13
88 movq %r14, save_r14
89 movq %r15, save_r15
91 /* Switch to a low memory stack */
92 movq $_start, %rsp
94 /* put interrupt number in %bl */
95 movq %rdi, %rbx
97 /* allocate space for args on stack */
98 subq $18, %rsp
99 movq %rsp, %rdi
101 #endif
103 /* copy args from high memory to stack in low memory */
105 movl $18, %ecx
107 movsb
110 * Save system registers
112 sidt save_idt
113 sgdt save_gdt
114 str save_tr
115 movw %cs, save_cs
116 movw %ds, save_ds
117 movw %ss, save_ss
118 movw %es, save_es
119 movw %fs, save_fs
120 movw %gs, save_gs
121 MOVCR( %cr4, save_cr4)
122 MOVCR( %cr3, save_cr3)
123 MOVCR( %cr0, save_cr0)
125 #if defined(__amd64)
127 * save/clear the extension parts of the fs/gs base registers and cr8
129 movl $MSR_AMD_FSBASE, %ecx
130 rdmsr
131 movl %eax, save_fsbase
132 movl %edx, save_fsbase + 4
133 xorl %eax, %eax
134 xorl %edx, %edx
135 wrmsr
137 movl $MSR_AMD_GSBASE, %ecx
138 rdmsr
139 movl %eax, save_gsbase
140 movl %edx, save_gsbase + 4
141 xorl %eax, %eax
142 xorl %edx, %edx
143 wrmsr
145 movl $MSR_AMD_KGSBASE, %ecx
146 rdmsr
147 movl %eax, save_kgsbase
148 movl %edx, save_kgsbase + 4
149 xorl %eax, %eax
150 xorl %edx, %edx
151 wrmsr
153 movq %cr8, %rax
154 movq %rax, save_cr8
155 #endif
158 * set offsets in 16 bit ljmp instructions below
160 LOAD_XAX(enter_real)
161 movw %ax, enter_real_ljmp
163 LOAD_XAX(enter_protected)
164 movw %ax, enter_protected_ljmp
166 LOAD_XAX(gdt_info)
167 movw %ax, gdt_info_load
170 * insert BIOS interrupt number into later instruction
172 movb %bl, int_instr+1
173 jmp 1f
177 * zero out all the registers to make sure they're 16 bit clean
179 #if defined(__amd64)
180 xorq %r8, %r8
181 xorq %r9, %r9
182 xorq %r10, %r10
183 xorq %r11, %r11
184 xorq %r12, %r12
185 xorq %r13, %r13
186 xorq %r14, %r14
187 xorq %r15, %r15
188 #endif
189 xorl %eax, %eax
190 xorl %ebx, %ebx
191 xorl %ecx, %ecx
192 xorl %edx, %edx
193 xorl %ebp, %ebp
194 xorl %esi, %esi
195 xorl %edi, %edi
198 * Load our own GDT/IDT
200 lgdt gdt_info
201 lidt idt_info
203 #if defined(__amd64)
205 * Shut down 64 bit mode. First get into compatiblity mode.
207 movq %rsp, %rax
208 pushq $B32DATA_SEL
209 pushq %rax
210 pushf
211 pushq $B32CODE_SEL
212 pushq $1f
213 iretq
215 .code32
218 * disable long mode by:
219 * - shutting down paging (bit 31 of cr0)
220 * - flushing the TLB
221 * - disabling LME (long made enable) in EFER (extended feature reg)
223 movl %cr0, %eax
224 btcl $31, %eax /* disable paging */
225 movl %eax, %cr0
226 ljmp $B32CODE_SEL, $1f
229 xorl %eax, %eax
230 movl %eax, %cr3 /* flushes TLB */
232 movl $MSR_AMD_EFER, %ecx /* Extended Feature Enable */
233 rdmsr
234 btcl $8, %eax /* bit 8 Long Mode Enable bit */
235 wrmsr
236 #endif
239 * ok.. now enter 16 bit mode, so we can shut down protected mode
241 * We'll have to act like we're still in a 32 bit section.
242 * So the code from this point has DATASZ in front of it to get 32 bit
243 * operands. If DATASZ is missing the operands will be 16 bit.
245 * Now shut down paging and protected (ie. segmentation) modes.
247 ljmp $B16CODE_SEL, $enter_16_bit
248 enter_16_bit:
251 * Make sure hidden parts of segment registers are 16 bit clean
253 DATASZ movl $B16DATA_SEL, %eax
254 movw %ax, %ss
255 movw %ax, %ds
256 movw %ax, %es
257 movw %ax, %fs
258 movw %ax, %gs
261 DATASZ movl $0x0, %eax /* put us in real mode */
262 DATASZ movl %eax, %cr0
263 .byte 0xea /* ljmp */
264 enter_real_ljmp:
265 .value 0 /* addr (16 bit) */
266 .value 0x0 /* value for %cs */
267 enter_real:
270 * zero out the remaining segment registers
272 DATASZ xorl %eax, %eax
273 movw %ax, %ss
274 movw %ax, %ds
275 movw %ax, %es
276 movw %ax, %fs
277 movw %ax, %gs
280 * load the arguments to the BIOS call from the stack
282 popl %eax /* really executes a 16 bit pop */
283 popl %ebx
284 popl %ecx
285 popl %edx
286 popl %esi
287 popl %edi
288 popl %ebp
289 pop %es
290 pop %ds
293 * do the actual BIOS call
296 int_instr:
297 int $0x10 /* this int number is overwritten */
298 cli /* ensure interrupts remain disabled */
301 * save results of the BIOS call
303 pushf
304 push %ds
305 push %es
306 pushl %ebp /* still executes as 16 bit */
307 pushl %edi
308 pushl %esi
309 pushl %edx
310 pushl %ecx
311 pushl %ebx
312 pushl %eax
315 * Restore protected mode and 32 bit execution
317 push $0 /* make sure %ds is zero before lgdt */
318 pop %ds
319 .byte 0x0f, 0x01, 0x16 /* lgdt */
320 gdt_info_load:
321 .value 0 /* temp GDT in currently addressible mem */
323 DATASZ movl $0x1, %eax
324 DATASZ movl %eax, %cr0
326 .byte 0xea /* ljmp */
327 enter_protected_ljmp:
328 .value 0 /* addr (still in 16 bit) */
329 .value B32CODE_SEL /* %cs value */
330 enter_protected:
333 * We are now back in a 32 bit code section, fix data/stack segments
335 .code32
336 movw $B32DATA_SEL, %ax
337 movw %ax, %ds
338 movw %ax, %ss
341 * Re-enable paging. Note we only use 32 bit mov's to restore these
342 * control registers. That's OK as the upper 32 bits are always zero.
344 movl save_cr4, %eax
345 movl %eax, %cr4
346 movl save_cr3, %eax
347 movl %eax, %cr3
349 #if defined(__amd64)
351 * re-enable long mode
353 movl $MSR_AMD_EFER, %ecx
354 rdmsr
355 btsl $8, %eax
356 wrmsr
357 #endif
359 movl save_cr0, %eax
360 movl %eax, %cr0
361 jmp enter_paging
362 enter_paging:
365 #if defined(__amd64)
367 * transition back to 64 bit mode
369 pushl $B64CODE_SEL
370 pushl $longmode
371 lret
372 longmode:
373 .code64
374 #endif
376 * restore caller frame pointer and segment registers
378 lgdt save_gdt
379 lidt save_idt
382 * Before loading the task register we need to reset the busy bit
383 * in its corresponding GDT selector. The busy bit is the 2nd bit in
384 * the 5th byte of the selector.
386 #if defined(__i386)
387 movzwl save_tr, %eax
388 addl save_gdt+2, %eax
389 btcl $1, 5(%eax)
390 #elif defined(__amd64)
391 movzwq save_tr, %rax
392 addq save_gdt+2, %rax
393 btcl $1, 5(%rax)
394 #endif
395 ltr save_tr
396 movw save_ds, %ds
397 movw save_ss, %ss
398 movw save_es, %es
399 movw save_fs, %fs
400 movw save_gs, %gs
402 #if defined(__i386)
403 pushl save_cs
404 pushl $.newcs
405 lret
406 #elif defined(__amd64)
407 pushq save_cs
408 pushq $.newcs
409 lretq
410 #endif
411 .newcs:
413 #if defined(__amd64)
415 * restore the hidden kernel segment base register values
417 movl save_fsbase, %eax
418 movl save_fsbase + 4, %edx
419 movl $MSR_AMD_FSBASE, %ecx
420 wrmsr
422 movl save_gsbase, %eax
423 movl save_gsbase + 4, %edx
424 movl $MSR_AMD_GSBASE, %ecx
425 wrmsr
427 movl save_kgsbase, %eax
428 movl save_kgsbase + 4, %edx
429 movl $MSR_AMD_KGSBASE, %ecx
430 wrmsr
432 movq save_cr8, %rax
433 cmpq $0, %rax
434 je 1f
435 movq %rax, %cr8
437 #endif
440 * copy results to caller's location, then restore remaining registers
442 #if defined(__i386)
443 movl save_esp, %edi
444 movl 8(%edi), %edi
445 movl %esp, %esi
446 movl $18, %ecx
448 movsb
449 movw 18(%esp), %ax
450 andl $0xffff, %eax
451 movl save_ebx, %ebx
452 movl save_esi, %esi
453 movl save_edi, %edi
454 movl save_esp, %esp
455 movl save_ebp, %ebp
456 movl save_esp, %esp
459 #elif defined(__amd64)
460 movq save_rsi, %rdi
461 movq %rsp, %rsi
462 movq $18, %rcx
464 movsb
465 movw 18(%rsp), %ax
466 andq $0xffff, %rax
467 movq save_r12, %r12
468 movq save_r13, %r13
469 movq save_r14, %r14
470 movq save_r15, %r15
471 movq save_rbx, %rbx
472 movq save_rbp, %rbp
473 movq save_rsp, %rsp
476 #endif
480 * Caller's registers to restore
482 .align 4
483 save_esi:
484 .long 0
485 save_edi:
486 .long 0
487 save_ebx:
488 .long 0
489 save_ebp:
490 .long 0
491 save_esp:
492 .long 0
494 .align 8
495 #if defined(__amd64)
496 save_rsi:
497 .quad 0
498 save_rbx:
499 .quad 0
500 save_rbp:
501 .quad 0
502 save_rsp:
503 .quad 0
504 save_r12:
505 .quad 0
506 save_r13:
507 .quad 0
508 save_r14:
509 .quad 0
510 save_r15:
511 .quad 0
512 save_kgsbase:
513 .quad 0
514 save_gsbase:
515 .quad 0
516 save_fsbase:
517 .quad 0
518 save_cr8:
519 .quad 0
520 #endif /* __amd64 */
522 save_idt:
523 .quad 0
524 .quad 0
526 save_gdt:
527 .quad 0
528 .quad 0
530 save_cr0:
531 .quad 0
532 save_cr3:
533 .quad 0
534 save_cr4:
535 .quad 0
536 save_cs:
537 .quad 0
538 save_ss:
539 .value 0
540 save_ds:
541 .value 0
542 save_es:
543 .value 0
544 save_fs:
545 .value 0
546 save_gs:
547 .value 0
548 save_tr:
549 .value 0
551 idt_info:
552 .value 0x3ff
553 .quad 0
557 * We need to trampoline thru a gdt we have in low memory.
559 #include "../boot/boot_gdt.s"