Updated PCI IDs to latest snapshot.
[tangerine.git] / arch / x86_64-pc / kernel / smpbootstrap.c
blob2283b0f032ab5709fe393f74aa5d2600493c81e2
1 #include "../bootstrap/cpu.h"
3 extern const struct {
4 unsigned short size __attribute__((packed));
5 unsigned long addr __attribute__((packed));
6 } smp_gdt64_sel;
8 extern const struct {
9 void *target_ip __attribute__((packed));
10 unsigned short target_sel __attribute__((packed));
11 } smp_kernel_target;
13 extern unsigned long smp_mmu, smp_arg1, smp_arg2, smp_arg3, smp_arg4, smp_baseaddr, smp_sp, smp_ip;
15 asm (".code16 \n"
16 " .type smpbootstrap_0,@function \n"
17 "smpbootstrap_0: \n"
18 " cli \n"
19 " /* The config variables will be passed here... Do not execute them */ \n"
20 " jmp 1f \n"
21 " \n"
22 " .align 4 \n"
23 "smp_arg1: .long 0 \n" /* 0x0004 */
24 "smp_arg2: .long 0 \n" /* 0x0008 */
25 "smp_arg3: .long 0 \n" /* 0x000c */
26 "smp_arg4: .long 0 \n" /* 0x0010 */
27 "smp_mmu: .long 0 \n" /* 0x0014 */
28 "smp_sp: .long 0; .long 0 \n" /* 0x0018 */
29 "smp_ip: .long 0; .long 0 \n" /* 0x0020 */
30 "smp_baseaddr: .long 0 \n"
31 "smp_gdt: .short 0; .short 0; .short 0; .short 0 \n"
32 " .short 0x1000; .short 0x0000; .short 0x9a00; .short 0x0040 \n"
33 " .short 0x1000; .short 0x0000; .short 0x9200; .short 0x0040 \n"
34 "smp_gdt64: .short 0; .short 0; .short 0; .short 0 \n"
35 " .short 0xffff; .short 0x0000; .short 0x9a00; .short 0x00af \n"
36 " .short 0xffff; .short 0x0000; .short 0x9200; .short 0x00af \n"
37 "smp_gdt_sel: .short 23 \n"
38 " .long 0 \n"
39 "smp_gdt64_sel: .short 23 \n"
40 " .long 0 \n"
41 "smp_kernel_target: .long 0; .short 8\n"
42 " \n"
43 "1: \n"
44 " mov %cs,%ax \n" /* Find out where the code resides */
45 " shl $4, %eax \n"
46 " movl %eax,%cs:smp_baseaddr \n" /* Store base address of the trampoline */
47 " leal smp_gdt(%eax), %ebx \n" /* Load physical address of 32-bit gdt */
48 " movl %ebx, %cs:smp_gdt_sel+2\n" /* Set up 32-bit gdt address */
49 " leal smp_gdt64(%eax), %ebx \n" /* Load physical address of 64-bit gdt */
50 " movl %ebx, %cs:smp_gdt64_sel+2\n" /* Set up 64-bit gdt address */
51 " leal boot64(%eax), %ebx \n" /* Load physical address of 64-bit code */
52 " movl %ebx, %cs:smp_kernel_target \n" /* Set up 64-bit entry address */
53 " movw %ax, %cs:smp_gdt+10 \n" /* Set base address of 32-bit code segment (bits 0:15) */
54 " movw %ax, %cs:smp_gdt+18 \n" /* Set base address of 32-bit data segment (bits 0:15) */
55 " shr $16, %eax \n"
56 " movb %al, %cs:smp_gdt+12 \n" /* Set base address of 32-bit code segment (bits 16:23) */
57 " movb %al, %cs:smp_gdt+20 \n" /* Set base address of 32-bit data segment (bits 16:23) */
58 " ADDR32 DATA32 lgdt %cs:smp_gdt_sel \n" /* Load gdt */
59 " \n"
60 " movw $0x3f8,%dx \n"
61 " movb $'x',%al \n"
62 " outb %al,%dx \n"
63 " \n"
64 " movl %cr0,%eax \n" /* Enter protected mode */
65 " orb $1,%al \n"
66 " movl %eax,%cr0 \n"
67 " \n"
68 " ljmp $0x8,$2f /* Please note that the %cs segment selector has to have it's base address properly adjusted */ \n"
69 " \n"
70 " .code32 \n"
71 "2: movw $0x10,%ax \n" /* Setup the 32-bit data selectors */
72 " movw %ax,%ds \n"
73 " movw %ax,%es \n"
74 " movw %ax,%ss \n"
75 " movl $4096-4,%esp \n" /* Stack at the top of this 4K page */
76 " \n"
77 " jmp smp_c_trampoline \n");
80 * This tiny procedure sets the complete 64-bit environment up - it loads the descriptors,
81 * enables 64-bit mode, loads MMU tables and trhough paging it activates the 64-bit long mode.
83 * After that it is perfectly safe to jump into the pure 64-bit kernel.
85 static void leave_32bit_mode()
87 unsigned int v1, v2, v3, v4;
88 asm volatile ("lgdt %0"::"m"(smp_gdt64_sel));
90 asm volatile ("outb %b0,%w1"::"a"('b'),"Nd"(0x3f8));
92 /* Enable PAE */
93 wrcr(cr4, _CR4_PAE | _CR4_PGE);
95 asm volatile ("outb %b0,%w1"::"a"('c'),"Nd"(0x3f8));
97 /* enable pages */
98 wrcr(cr3, smp_mmu);
100 asm volatile ("outb %b0,%w1"::"a"('d'),"Nd"(0x3f8));
102 /* enable long mode */
103 rdmsr(EFER, &v1, &v2);
104 v1 |= _EFER_LME;
105 wrmsr(EFER, v1, v2);
107 asm volatile ("outb %b0,%w1"::"a"('e'),"Nd"(0x3f8));
109 /* enable paging and activate long mode */
110 wrcr(cr0, _CR0_PG | _CR0_PE);
112 static void __attribute__((used, noreturn)) smp_c_trampoline()
114 asm volatile ("outb %b0,%w1"::"a"('a'),"Nd"(0x3f8));
115 leave_32bit_mode();
117 asm volatile ("outb %b0,%w1"::"a"('f'),"Nd"(0x3f8));
119 asm volatile("ljmp *%0"::"m"(smp_kernel_target),"b"(smp_baseaddr),"D"(smp_arg1),"S"(smp_arg2),"d"(smp_arg3),"c"(smp_arg4));
120 while(1);
123 asm (".code64\n"
124 "boot64: \n"
125 " mov $0x10,%ax \n"
126 " mov %ax,%ds \n"
127 " mov %ax,%es \n"
128 " mov %ax,%ss \n"
129 " movq 0x18(%rbx), %rsp \n"
130 " movq 0x20(%rbx), %rax \n"
131 " jmp *%rax \n"
132 "1: hlt; jmp 1b \n"
133 " .code32\n"