1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 /* 26 * Copyright 2015 Joyent, Inc. 27 */ 28 29 #include <sys/asm_linkage.h> 30 #include <sys/regset.h> 31 32 #if defined(lint) 33 #include <sys/dtrace_impl.h> 34 #else 35 #include "assym.h" 36 #endif 37 38 #if defined(lint) || defined(__lint) 39 40 greg_t 41 dtrace_getfp(void) 42 { return (0); } 43 44 #else /* lint */ 45 46 #if defined(__amd64) 47 48 ENTRY_NP(dtrace_getfp) 49 movq %rbp, %rax 50 ret 51 SET_SIZE(dtrace_getfp) 52 53 #elif defined(__i386) 54 55 ENTRY_NP(dtrace_getfp) 56 movl %ebp, %eax 57 ret 58 SET_SIZE(dtrace_getfp) 59 60 #endif /* __i386 */ 61 #endif /* lint */ 62 63 64 #if defined(lint) || defined(__lint) 65 66 /*ARGSUSED*/ 67 uint64_t 68 dtrace_getvmreg(uint32_t reg, volatile uint16_t *flags) 69 { return (0); } 70 71 #else /* lint */ 72 73 #if defined(__amd64) 74 75 ENTRY_NP(dtrace_getvmreg) 76 77 movq %rdi, %rdx 78 vmread %rdx, %rax 79 ret 80 81 SET_SIZE(dtrace_getvmreg) 82 83 #elif defined(__i386) 84 85 ENTRY_NP(dtrace_getvmreg) 86 pushl %ebp / Setup stack frame 87 movl %esp, %ebp 88 89 movl 12(%ebp), %eax / Load flag pointer 90 movw (%eax), %cx / Load flags 91 orw $CPU_DTRACE_ILLOP, %cx / Set ILLOP 92 movw %cx, (%eax) / Store flags 93 94 leave 95 ret 96 SET_SIZE(dtrace_getvmreg) 97 98 #endif /* __i386 */ 99 #endif /* lint */ 100 101 102 #if defined(lint) || defined(__lint) 103 104 uint32_t 105 dtrace_cas32(uint32_t *target, uint32_t cmp, uint32_t new) 106 { 107 uint32_t old; 108 109 if ((old = *target) == cmp) 110 *target = new; 111 return (old); 112 } 113 114 void * 115 dtrace_casptr(void *target, void *cmp, void *new) 116 { 117 void *old; 118 119 if ((old = *(void **)target) == cmp) 120 *(void **)target = new; 121 return (old); 122 } 123 124 #else /* lint */ 125 126 #if defined(__amd64) 127 128 ENTRY(dtrace_cas32) 129 movl %esi, %eax 130 lock 131 cmpxchgl %edx, (%rdi) 132 ret 133 SET_SIZE(dtrace_cas32) 134 135 ENTRY(dtrace_casptr) 136 movq %rsi, %rax 137 lock 138 cmpxchgq %rdx, (%rdi) 139 ret 140 SET_SIZE(dtrace_casptr) 141 142 #elif defined(__i386) 143 144 ENTRY(dtrace_cas32) 145 ALTENTRY(dtrace_casptr) 146 movl 4(%esp), %edx 147 movl 8(%esp), %eax 148 movl 12(%esp), %ecx 149 lock 150 cmpxchgl %ecx, (%edx) 151 ret 152 SET_SIZE(dtrace_casptr) 153 SET_SIZE(dtrace_cas32) 154 155 #endif /* __i386 */ 156 #endif /* lint */ 157 158 #if defined(lint) 159 160 /*ARGSUSED*/ 161 uintptr_t 162 dtrace_caller(int aframes) 163 { 164 return (0); 165 } 166 167 #else /* lint */ 168 169 #if defined(__amd64) 170 ENTRY(dtrace_caller) 171 movq $-1, %rax 172 ret 173 SET_SIZE(dtrace_caller) 174 175 #elif defined(__i386) 176 177 ENTRY(dtrace_caller) 178 movl $-1, %eax 179 ret 180 SET_SIZE(dtrace_caller) 181 182 #endif /* __i386 */ 183 #endif /* lint */ 184 185 #if defined(lint) 186 187 /*ARGSUSED*/ 188 void 189 dtrace_copy(uintptr_t src, uintptr_t dest, size_t size) 190 {} 191 192 #else 193 194 #if defined(__amd64) 195 196 ENTRY(dtrace_copy) 197 pushq %rbp 198 call smap_disable 199 movq %rsp, %rbp 200 201 xchgq %rdi, %rsi /* make %rsi source, %rdi dest */ 202 movq %rdx, %rcx /* load count */ 203 repz /* repeat for count ... */ 204 smovb /* move from %ds:rsi to %ed:rdi */ 205 call smap_enable 206 leave 207 ret 208 SET_SIZE(dtrace_copy) 209 210 #elif defined(__i386) 211 212 ENTRY(dtrace_copy) 213 pushl %ebp 214 movl %esp, %ebp 215 pushl %esi 216 pushl %edi 217 218 movl 8(%ebp), %esi / Load source address 219 movl 12(%ebp), %edi / Load destination address 220 movl 16(%ebp), %ecx / Load count 221 repz / Repeat for count... 222 smovb / move from %ds:si to %es:di 223 224 popl %edi 225 popl %esi 226 movl %ebp, %esp 227 popl %ebp 228 ret 229 SET_SIZE(dtrace_copy) 230 231 #endif /* __i386 */ 232 #endif 233 234 #if defined(lint) 235 236 /*ARGSUSED*/ 237 void 238 dtrace_copystr(uintptr_t uaddr, uintptr_t kaddr, size_t size, 239 volatile uint16_t *flags) 240 {} 241 242 #else 243 244 #if defined(__amd64) 245 246 ENTRY(dtrace_copystr) 247 pushq %rbp 248 movq %rsp, %rbp 249 call smap_disable 250 0: 251 movb (%rdi), %al /* load from source */ 252 movb %al, (%rsi) /* store to destination */ 253 addq $1, %rdi /* increment source pointer */ 254 addq $1, %rsi /* increment destination pointer */ 255 subq $1, %rdx /* decrement remaining count */ 256 cmpb $0, %al 257 je 2f 258 testq $0xfff, %rdx /* test if count is 4k-aligned */ 259 jnz 1f /* if not, continue with copying */ 260 testq $CPU_DTRACE_BADADDR, (%rcx) /* load and test dtrace flags */ 261 jnz 2f 262 1: 263 cmpq $0, %rdx 264 jne 0b 265 2: 266 call smap_enable 267 leave 268 ret 269 270 SET_SIZE(dtrace_copystr) 271 272 #elif defined(__i386) 273 274 ENTRY(dtrace_copystr) 275 276 pushl %ebp / Setup stack frame 277 movl %esp, %ebp 278 pushl %ebx / Save registers 279 280 movl 8(%ebp), %ebx / Load source address 281 movl 12(%ebp), %edx / Load destination address 282 movl 16(%ebp), %ecx / Load count 283 284 0: 285 movb (%ebx), %al / Load from source 286 movb %al, (%edx) / Store to destination 287 incl %ebx / Increment source pointer 288 incl %edx / Increment destination pointer 289 decl %ecx / Decrement remaining count 290 cmpb $0, %al 291 je 2f 292 testl $0xfff, %ecx / Check if count is 4k-aligned 293 jnz 1f 294 movl 20(%ebp), %eax / load flags pointer 295 testl $CPU_DTRACE_BADADDR, (%eax) / load and test dtrace flags 296 jnz 2f 297 1: 298 cmpl $0, %ecx 299 jne 0b 300 301 2: 302 popl %ebx 303 movl %ebp, %esp 304 popl %ebp 305 ret 306 307 SET_SIZE(dtrace_copystr) 308 309 #endif /* __i386 */ 310 #endif 311 312 #if defined(lint) 313 314 /*ARGSUSED*/ 315 uintptr_t 316 dtrace_fulword(void *addr) 317 { return (0); } 318 319 #else 320 #if defined(__amd64) 321 322 ENTRY(dtrace_fulword) 323 call smap_disable 324 movq (%rdi), %rax 325 call smap_enable 326 ret 327 SET_SIZE(dtrace_fulword) 328 329 #elif defined(__i386) 330 331 ENTRY(dtrace_fulword) 332 movl 4(%esp), %ecx 333 xorl %eax, %eax 334 movl (%ecx), %eax 335 ret 336 SET_SIZE(dtrace_fulword) 337 338 #endif /* __i386 */ 339 #endif 340 341 #if defined(lint) 342 343 /*ARGSUSED*/ 344 uint8_t 345 dtrace_fuword8_nocheck(void *addr) 346 { return (0); } 347 348 #else 349 #if defined(__amd64) 350 351 ENTRY(dtrace_fuword8_nocheck) 352 call smap_disable 353 xorq %rax, %rax 354 movb (%rdi), %al 355 call smap_enable 356 ret 357 SET_SIZE(dtrace_fuword8_nocheck) 358 359 #elif defined(__i386) 360 361 ENTRY(dtrace_fuword8_nocheck) 362 movl 4(%esp), %ecx 363 xorl %eax, %eax 364 movzbl (%ecx), %eax 365 ret 366 SET_SIZE(dtrace_fuword8_nocheck) 367 368 #endif /* __i386 */ 369 #endif 370 371 #if defined(lint) 372 373 /*ARGSUSED*/ 374 uint16_t 375 dtrace_fuword16_nocheck(void *addr) 376 { return (0); } 377 378 #else 379 #if defined(__amd64) 380 381 ENTRY(dtrace_fuword16_nocheck) 382 call smap_disable 383 xorq %rax, %rax 384 movw (%rdi), %ax 385 call smap_enable 386 ret 387 SET_SIZE(dtrace_fuword16_nocheck) 388 389 #elif defined(__i386) 390 391 ENTRY(dtrace_fuword16_nocheck) 392 movl 4(%esp), %ecx 393 xorl %eax, %eax 394 movzwl (%ecx), %eax 395 ret 396 SET_SIZE(dtrace_fuword16_nocheck) 397 398 #endif /* __i386 */ 399 #endif 400 401 #if defined(lint) 402 403 /*ARGSUSED*/ 404 uint32_t 405 dtrace_fuword32_nocheck(void *addr) 406 { return (0); } 407 408 #else 409 #if defined(__amd64) 410 411 ENTRY(dtrace_fuword32_nocheck) 412 call smap_disable 413 xorq %rax, %rax 414 movl (%rdi), %eax 415 call smap_enable 416 ret 417 SET_SIZE(dtrace_fuword32_nocheck) 418 419 #elif defined(__i386) 420 421 ENTRY(dtrace_fuword32_nocheck) 422 movl 4(%esp), %ecx 423 xorl %eax, %eax 424 movl (%ecx), %eax 425 ret 426 SET_SIZE(dtrace_fuword32_nocheck) 427 428 #endif /* __i386 */ 429 #endif 430 431 #if defined(lint) 432 433 /*ARGSUSED*/ 434 uint64_t 435 dtrace_fuword64_nocheck(void *addr) 436 { return (0); } 437 438 #else 439 #if defined(__amd64) 440 441 ENTRY(dtrace_fuword64_nocheck) 442 call smap_disable 443 movq (%rdi), %rax 444 call smap_enable 445 ret 446 SET_SIZE(dtrace_fuword64_nocheck) 447 448 #elif defined(__i386) 449 450 ENTRY(dtrace_fuword64_nocheck) 451 movl 4(%esp), %ecx 452 xorl %eax, %eax 453 xorl %edx, %edx 454 movl (%ecx), %eax 455 movl 4(%ecx), %edx 456 ret 457 SET_SIZE(dtrace_fuword64_nocheck) 458 459 #endif /* __i386 */ 460 #endif 461 462 #if defined(lint) || defined(__lint) 463 464 /*ARGSUSED*/ 465 void 466 dtrace_probe_error(dtrace_state_t *state, dtrace_epid_t epid, int which, 467 int fault, int fltoffs, uintptr_t illval) 468 {} 469 470 #else /* lint */ 471 #if defined(__amd64) 472 473 ENTRY(dtrace_probe_error) 474 pushq %rbp 475 movq %rsp, %rbp 476 subq $0x8, %rsp 477 movq %r9, (%rsp) 478 movq %r8, %r9 479 movq %rcx, %r8 480 movq %rdx, %rcx 481 movq %rsi, %rdx 482 movq %rdi, %rsi 483 movl dtrace_probeid_error(%rip), %edi 484 call dtrace_probe 485 addq $0x8, %rsp 486 leave 487 ret 488 SET_SIZE(dtrace_probe_error) 489 490 #elif defined(__i386) 491 492 ENTRY(dtrace_probe_error) 493 pushl %ebp 494 movl %esp, %ebp 495 pushl 0x1c(%ebp) 496 pushl 0x18(%ebp) 497 pushl 0x14(%ebp) 498 pushl 0x10(%ebp) 499 pushl 0xc(%ebp) 500 pushl 0x8(%ebp) 501 pushl dtrace_probeid_error 502 call dtrace_probe 503 movl %ebp, %esp 504 popl %ebp 505 ret 506 SET_SIZE(dtrace_probe_error) 507 508 #endif /* __i386 */ 509 #endif