Print this page
de-linting of .s files
m
@@ -22,18 +22,14 @@
/*
* Copyright 2007 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
-#pragma ident "%Z%%M% %I% %E% SMI"
+/*
+ * Copyright 2019 Joyent, Inc.
+ */
-#if defined(__lint)
-
-int silence_lint = 0;
-
-#else
-
#include <sys/segments.h>
#include <sys/controlregs.h>
/*
* Do a call into BIOS. This goes down to 16 bit real mode and back again.
@@ -42,50 +38,16 @@
/*
* instruction prefix to change operand size in instruction
*/
#define DATASZ .byte 0x66;
-#if defined(__amd64)
-#define MOVCR(x, y) movq x,%rax; movq %rax, y
-#define LOAD_XAX(sym) leaq sym, %rax
-#elif defined(__i386)
-#define MOVCR(x, y) movl x,%eax; movl %eax, y
-#define LOAD_XAX(sym) leal sym, %eax
-#endif
-
.globl _start
_start:
-#if defined(__i386)
-
/*
* Save caller registers
*/
- movl %ebp, save_ebp
- movl %esp, save_esp
- movl %ebx, save_ebx
- movl %esi, save_esi
- movl %edi, save_edi
-
- /* get registers argument into esi */
- movl 8(%esp), %esi
-
- /* put interrupt number in %bl */
- movl 4(%esp), %ebx
-
- /* Switch to a low memory stack */
- movl $_start, %esp
-
- /* allocate space for args on stack */
- subl $18, %esp
- movl %esp, %edi
-
-#elif defined(__amd64)
-
- /*
- * Save caller registers
- */
movq %rbp, save_rbp
movq %rsp, save_rsp
movq %rbx, save_rbx
movq %rsi, save_rsi
movq %r12, save_r12
@@ -101,12 +63,10 @@
/* allocate space for args on stack */
subq $18, %rsp
movq %rsp, %rdi
-#endif
-
/* copy args from high memory to stack in low memory */
cld
movl $18, %ecx
rep
movsb
@@ -121,15 +81,17 @@
movw %ds, save_ds
movw %ss, save_ss
movw %es, save_es
movw %fs, save_fs
movw %gs, save_gs
- MOVCR( %cr4, save_cr4)
- MOVCR( %cr3, save_cr3)
- MOVCR( %cr0, save_cr0)
+ movq %cr4, %rax
+ movq %rax, save_cr4
+ movq %cr3, %rax
+ movq %rax, save_cr3
+ movq %cr0, %rax
+ movq %rax, save_cr0
-#if defined(__amd64)
/*
* save/clear the extension parts of the fs/gs base registers and cr8
*/
movl $MSR_AMD_FSBASE, %ecx
rdmsr
@@ -155,22 +117,21 @@
xorl %edx, %edx
wrmsr
movq %cr8, %rax
movq %rax, save_cr8
-#endif
/*
* set offsets in 16 bit ljmp instructions below
*/
- LOAD_XAX(enter_real)
+ leaq enter_real, %rax
movw %ax, enter_real_ljmp
- LOAD_XAX(enter_protected)
+ leaq enter_protected, %rax
movw %ax, enter_protected_ljmp
- LOAD_XAX(gdt_info)
+ leaq gdt_info, %rax
movw %ax, gdt_info_load
/*
* insert BIOS interrupt number into later instruction
*/
@@ -179,20 +140,18 @@
1:
/*
* zero out all the registers to make sure they're 16 bit clean
*/
-#if defined(__amd64)
xorq %r8, %r8
xorq %r9, %r9
xorq %r10, %r10
xorq %r11, %r11
xorq %r12, %r12
xorq %r13, %r13
xorq %r14, %r14
xorq %r15, %r15
-#endif
xorl %eax, %eax
xorl %ebx, %ebx
xorl %ecx, %ecx
xorl %edx, %edx
xorl %ebp, %ebp
@@ -203,13 +162,12 @@
* Load our own GDT/IDT
*/
lgdt gdt_info
lidt idt_info
-#if defined(__amd64)
/*
- * Shut down 64 bit mode. First get into compatiblity mode.
+ * Shut down 64 bit mode. First get into compatibility mode.
*/
movq %rsp, %rax
pushq $B32DATA_SEL
pushq %rax
pushf
@@ -236,11 +194,10 @@
movl $MSR_AMD_EFER, %ecx /* Extended Feature Enable */
rdmsr
btcl $8, %eax /* bit 8 Long Mode Enable bit */
wrmsr
-#endif
/*
* ok.. now enter 16 bit mode, so we can shut down protected mode
*
* We'll have to act like we're still in a 32 bit section.
@@ -349,36 +306,32 @@
movl save_cr4, %eax
movl %eax, %cr4
movl save_cr3, %eax
movl %eax, %cr3
-#if defined(__amd64)
/*
* re-enable long mode
*/
movl $MSR_AMD_EFER, %ecx
rdmsr
btsl $8, %eax
wrmsr
-#endif
movl save_cr0, %eax
movl %eax, %cr0
jmp enter_paging
enter_paging:
-#if defined(__amd64)
/*
* transition back to 64 bit mode
*/
pushl $B64CODE_SEL
pushl $longmode
lret
longmode:
.code64
-#endif
/*
* restore caller frame pointer and segment registers
*/
lgdt save_gdt
lidt save_idt
@@ -386,38 +339,25 @@
/*
* Before loading the task register we need to reset the busy bit
* in its corresponding GDT selector. The busy bit is the 2nd bit in
* the 5th byte of the selector.
*/
-#if defined(__i386)
- movzwl save_tr, %eax
- addl save_gdt+2, %eax
- btcl $1, 5(%eax)
-#elif defined(__amd64)
movzwq save_tr, %rax
addq save_gdt+2, %rax
btcl $1, 5(%rax)
-#endif
ltr save_tr
movw save_ds, %ds
movw save_ss, %ss
movw save_es, %es
movw save_fs, %fs
movw save_gs, %gs
-#if defined(__i386)
- pushl save_cs
- pushl $.newcs
- lret
-#elif defined(__amd64)
pushq save_cs
pushq $.newcs
lretq
-#endif
.newcs:
-#if defined(__amd64)
/*
* restore the hidden kernel segment base register values
*/
movl save_fsbase, %eax
movl save_fsbase + 4, %edx
@@ -437,33 +377,14 @@
movq save_cr8, %rax
cmpq $0, %rax
je 1f
movq %rax, %cr8
1:
-#endif
/*
* copy results to caller's location, then restore remaining registers
*/
-#if defined(__i386)
- movl save_esp, %edi
- movl 8(%edi), %edi
- movl %esp, %esi
- movl $18, %ecx
- rep
- movsb
- movw 18(%esp), %ax
- andl $0xffff, %eax
- movl save_ebx, %ebx
- movl save_esi, %esi
- movl save_edi, %edi
- movl save_esp, %esp
- movl save_ebp, %ebp
- movl save_esp, %esp
- ret
-
-#elif defined(__amd64)
movq save_rsi, %rdi
movq %rsp, %rsi
movq $18, %rcx
rep
movsb
@@ -476,13 +397,11 @@
movq save_rbx, %rbx
movq save_rbp, %rbp
movq save_rsp, %rsp
ret
-#endif
-
/*
* Caller's registers to restore
*/
.align 4
save_esi:
@@ -495,11 +414,10 @@
.long 0
save_esp:
.long 0
.align 8
-#if defined(__amd64)
save_rsi:
.quad 0
save_rbx:
.quad 0
save_rbp:
@@ -520,11 +438,10 @@
.quad 0
save_fsbase:
.quad 0
save_cr8:
.quad 0
-#endif /* __amd64 */
save_idt:
.quad 0
.quad 0
@@ -560,6 +477,5 @@
/*
* We need to trampoline thru a gdt we have in low memory.
*/
#include "../boot/boot_gdt.s"
-#endif /* __lint */