Print this page
de-linting of .s files
m

*** 22,39 **** /* * Copyright 2007 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ ! #pragma ident "%Z%%M% %I% %E% SMI" - #if defined(__lint) - - int silence_lint = 0; - - #else - #include <sys/segments.h> #include <sys/controlregs.h> /* * Do a call into BIOS. This goes down to 16 bit real mode and back again. --- 22,35 ---- /* * Copyright 2007 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ ! /* ! * Copyright 2019 Joyent, Inc. ! */ #include <sys/segments.h> #include <sys/controlregs.h> /* * Do a call into BIOS. This goes down to 16 bit real mode and back again.
*** 42,91 **** /* * instruction prefix to change operand size in instruction */ #define DATASZ .byte 0x66; - #if defined(__amd64) - #define MOVCR(x, y) movq x,%rax; movq %rax, y - #define LOAD_XAX(sym) leaq sym, %rax - #elif defined(__i386) - #define MOVCR(x, y) movl x,%eax; movl %eax, y - #define LOAD_XAX(sym) leal sym, %eax - #endif - .globl _start _start: - #if defined(__i386) - /* * Save caller registers */ - movl %ebp, save_ebp - movl %esp, save_esp - movl %ebx, save_ebx - movl %esi, save_esi - movl %edi, save_edi - - /* get registers argument into esi */ - movl 8(%esp), %esi - - /* put interrupt number in %bl */ - movl 4(%esp), %ebx - - /* Switch to a low memory stack */ - movl $_start, %esp - - /* allocate space for args on stack */ - subl $18, %esp - movl %esp, %edi - - #elif defined(__amd64) - - /* - * Save caller registers - */ movq %rbp, save_rbp movq %rsp, save_rsp movq %rbx, save_rbx movq %rsi, save_rsi movq %r12, save_r12 --- 38,53 ----
*** 101,112 **** /* allocate space for args on stack */ subq $18, %rsp movq %rsp, %rdi - #endif - /* copy args from high memory to stack in low memory */ cld movl $18, %ecx rep movsb --- 63,72 ----
*** 121,135 **** movw %ds, save_ds movw %ss, save_ss movw %es, save_es movw %fs, save_fs movw %gs, save_gs ! MOVCR( %cr4, save_cr4) ! MOVCR( %cr3, save_cr3) ! MOVCR( %cr0, save_cr0) - #if defined(__amd64) /* * save/clear the extension parts of the fs/gs base registers and cr8 */ movl $MSR_AMD_FSBASE, %ecx rdmsr --- 81,97 ---- movw %ds, save_ds movw %ss, save_ss movw %es, save_es movw %fs, save_fs movw %gs, save_gs ! movq %cr4, %rax ! movq %rax, save_cr4 ! movq %cr3, %rax ! movq %rax, save_cr3 ! movq %cr0, %rax ! movq %rax, save_cr0 /* * save/clear the extension parts of the fs/gs base registers and cr8 */ movl $MSR_AMD_FSBASE, %ecx rdmsr
*** 155,176 **** xorl %edx, %edx wrmsr movq %cr8, %rax movq %rax, save_cr8 - #endif /* * set offsets in 16 bit ljmp instructions below */ ! LOAD_XAX(enter_real) movw %ax, enter_real_ljmp ! LOAD_XAX(enter_protected) movw %ax, enter_protected_ljmp ! LOAD_XAX(gdt_info) movw %ax, gdt_info_load /* * insert BIOS interrupt number into later instruction */ --- 117,137 ---- xorl %edx, %edx wrmsr movq %cr8, %rax movq %rax, save_cr8 /* * set offsets in 16 bit ljmp instructions below */ ! leaq enter_real, %rax movw %ax, enter_real_ljmp ! leaq enter_protected, %rax movw %ax, enter_protected_ljmp ! leaq gdt_info, %rax movw %ax, gdt_info_load /* * insert BIOS interrupt number into later instruction */
*** 179,198 **** 1: /* * zero out all the registers to make sure they're 16 bit clean */ - #if defined(__amd64) xorq %r8, %r8 xorq %r9, %r9 xorq %r10, %r10 xorq %r11, %r11 xorq %r12, %r12 xorq %r13, %r13 xorq %r14, %r14 xorq %r15, %r15 - #endif xorl %eax, %eax xorl %ebx, %ebx xorl %ecx, %ecx xorl %edx, %edx xorl %ebp, %ebp --- 140,157 ----
*** 203,215 **** * Load our own GDT/IDT */ lgdt gdt_info lidt idt_info - #if defined(__amd64) /* ! * Shut down 64 bit mode. First get into compatiblity mode. */ movq %rsp, %rax pushq $B32DATA_SEL pushq %rax pushf --- 162,173 ---- * Load our own GDT/IDT */ lgdt gdt_info lidt idt_info /* ! * Shut down 64 bit mode. First get into compatibility mode. */ movq %rsp, %rax pushq $B32DATA_SEL pushq %rax pushf
*** 236,246 **** movl $MSR_AMD_EFER, %ecx /* Extended Feature Enable */ rdmsr btcl $8, %eax /* bit 8 Long Mode Enable bit */ wrmsr - #endif /* * ok.. now enter 16 bit mode, so we can shut down protected mode * * We'll have to act like we're still in a 32 bit section. --- 194,203 ----
*** 349,384 **** movl save_cr4, %eax movl %eax, %cr4 movl save_cr3, %eax movl %eax, %cr3 - #if defined(__amd64) /* * re-enable long mode */ movl $MSR_AMD_EFER, %ecx rdmsr btsl $8, %eax wrmsr - #endif movl save_cr0, %eax movl %eax, %cr0 jmp enter_paging enter_paging: - #if defined(__amd64) /* * transition back to 64 bit mode */ pushl $B64CODE_SEL pushl $longmode lret longmode: .code64 - #endif /* * restore caller frame pointer and segment registers */ lgdt save_gdt lidt save_idt --- 306,337 ----
*** 386,423 **** /* * Before loading the task register we need to reset the busy bit * in its corresponding GDT selector. The busy bit is the 2nd bit in * the 5th byte of the selector. */ - #if defined(__i386) - movzwl save_tr, %eax - addl save_gdt+2, %eax - btcl $1, 5(%eax) - #elif defined(__amd64) movzwq save_tr, %rax addq save_gdt+2, %rax btcl $1, 5(%rax) - #endif ltr save_tr movw save_ds, %ds movw save_ss, %ss movw save_es, %es movw save_fs, %fs movw save_gs, %gs - #if defined(__i386) - pushl save_cs - pushl $.newcs - lret - #elif defined(__amd64) pushq save_cs pushq $.newcs lretq - #endif .newcs: - #if defined(__amd64) /* * restore the hidden kernel segment base register values */ movl save_fsbase, %eax movl save_fsbase + 4, %edx --- 339,363 ----
*** 437,469 **** movq save_cr8, %rax cmpq $0, %rax je 1f movq %rax, %cr8 1: - #endif /* * copy results to caller's location, then restore remaining registers */ - #if defined(__i386) - movl save_esp, %edi - movl 8(%edi), %edi - movl %esp, %esi - movl $18, %ecx - rep - movsb - movw 18(%esp), %ax - andl $0xffff, %eax - movl save_ebx, %ebx - movl save_esi, %esi - movl save_edi, %edi - movl save_esp, %esp - movl save_ebp, %ebp - movl save_esp, %esp - ret - - #elif defined(__amd64) movq save_rsi, %rdi movq %rsp, %rsi movq $18, %rcx rep movsb --- 377,390 ----
*** 476,488 **** movq save_rbx, %rbx movq save_rbp, %rbp movq save_rsp, %rsp ret - #endif - /* * Caller's registers to restore */ .align 4 save_esi: --- 397,407 ----
*** 495,505 **** .long 0 save_esp: .long 0 .align 8 - #if defined(__amd64) save_rsi: .quad 0 save_rbx: .quad 0 save_rbp: --- 414,423 ----
*** 520,530 **** .quad 0 save_fsbase: .quad 0 save_cr8: .quad 0 - #endif /* __amd64 */ save_idt: .quad 0 .quad 0 --- 438,447 ----
*** 560,565 **** /* * We need to trampoline thru a gdt we have in low memory. */ #include "../boot/boot_gdt.s" - #endif /* __lint */ --- 477,481 ----