Print this page
8956 Implement KPTI
Reviewed by: Jerry Jelinek <jerry.jelinek@joyent.com>
Reviewed by: Robert Mustacchi <rm@joyent.com>
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/intel/asm/mmu.h
+++ new/usr/src/uts/intel/asm/mmu.h
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
↓ open down ↓ |
13 lines elided |
↑ open up ↑ |
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
23 23 * Use is subject to license terms.
24 + *
25 + * Copyright 2018 Joyent, Inc.
24 26 */
25 27
26 28 #ifndef _ASM_MMU_H
27 29 #define _ASM_MMU_H
28 30
29 31 #include <sys/ccompile.h>
30 32 #include <sys/types.h>
31 33
32 34 #ifdef __cplusplus
33 35 extern "C" {
34 36 #endif
35 37
36 -#if defined(__GNUC__) && !defined(__xpv)
38 +#if defined(__GNUC__)
37 39
38 -#if defined(__amd64)
40 +#if !defined(__xpv)
39 41
40 42 extern __GNU_INLINE ulong_t
41 43 getcr3(void)
42 44 {
43 45 uint64_t value;
44 46
45 47 __asm__ __volatile__(
46 48 "movq %%cr3, %0"
47 49 : "=r" (value));
48 50 return (value);
49 51 }
↓ open down ↓ |
1 lines elided |
↑ open up ↑ |
50 52
51 53 extern __GNU_INLINE void
52 54 setcr3(ulong_t value)
53 55 {
54 56 __asm__ __volatile__(
55 57 "movq %0, %%cr3"
56 58 : /* no output */
57 59 : "r" (value));
58 60 }
59 61
60 -extern __GNU_INLINE void
61 -reload_cr3(void)
62 -{
63 - setcr3(getcr3());
64 -}
65 -
66 -#elif defined(__i386)
67 -
68 62 extern __GNU_INLINE ulong_t
69 -getcr3(void)
63 +getcr4(void)
70 64 {
71 - uint32_t value;
65 + uint64_t value;
72 66
73 67 __asm__ __volatile__(
74 - "movl %%cr3, %0"
68 + "movq %%cr4, %0"
75 69 : "=r" (value));
76 70 return (value);
77 71 }
78 72
79 73 extern __GNU_INLINE void
80 -setcr3(ulong_t value)
74 +setcr4(ulong_t value)
81 75 {
82 76 __asm__ __volatile__(
83 - "movl %0, %%cr3"
77 + "movq %0, %%cr4"
84 78 : /* no output */
85 79 : "r" (value));
86 80 }
87 81
88 82 extern __GNU_INLINE void
89 83 reload_cr3(void)
90 84 {
91 85 setcr3(getcr3());
92 86 }
93 87
94 -#endif
88 +/*
89 + * We clobber memory: we're not writing anything, but we don't want to
90 + * potentially get re-ordered beyond the TLB flush.
91 + */
92 +extern __GNU_INLINE void
93 +invpcid_insn(uint64_t type, uint64_t pcid, uintptr_t addr)
94 +{
95 + uint64_t pcid_desc[2] = { pcid, addr };
96 + __asm__ __volatile__(
97 + "invpcid %0, %1"
98 + : /* no output */
99 + : "m" (*pcid_desc), "r" (type)
100 + : "memory");
101 +}
95 102
96 -#endif /* __GNUC__ && !__xpv */
103 +#endif /* !__xpv */
97 104
105 +extern __GNU_INLINE void
106 +mmu_invlpg(caddr_t addr)
107 +{
108 + __asm__ __volatile__(
109 + "invlpg %0"
110 + : "=m" (*addr)
111 + : "m" (*addr));
112 +}
113 +
114 +#endif /* __GNUC__ */
115 +
98 116 #ifdef __cplusplus
99 117 }
100 118 #endif
101 119
102 120 #endif /* _ASM_MMU_H */
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX