571 nop
572 .mutex_enter_lockstat_6323525_patch_point: /* new patch point if lfence */
573 nop
574 #else /* OPTERON_WORKAROUND_6323525 */
575 ret
576 #endif /* OPTERON_WORKAROUND_6323525 */
577 movq %rdi, %rsi
578 movl $LS_MUTEX_ENTER_ACQUIRE, %edi
579 /*
580 * expects %rdx=thread, %rsi=lock, %edi=lockstat event
581 */
582 ALTENTRY(lockstat_wrapper)
583 incb T_LOCKSTAT(%rdx) /* curthread->t_lockstat++ */
584 leaq lockstat_probemap(%rip), %rax
585 movl (%rax, %rdi, DTRACE_IDSIZE), %eax
586 testl %eax, %eax /* check for non-zero probe */
587 jz 1f
588 pushq %rbp /* align stack properly */
589 movq %rsp, %rbp
590 movl %eax, %edi
591 call *lockstat_probe
592 leave /* unwind stack */
593 1:
594 movq %gs:CPU_THREAD, %rdx /* reload thread ptr */
595 decb T_LOCKSTAT(%rdx) /* curthread->t_lockstat-- */
596 movl $1, %eax /* return success if tryenter */
597 ret
598 SET_SIZE(lockstat_wrapper)
599 SET_SIZE(mutex_enter)
600
601 /*
602 * expects %rcx=thread, %rdx=arg, %rsi=lock, %edi=lockstat event
603 */
604 ENTRY(lockstat_wrapper_arg)
605 incb T_LOCKSTAT(%rcx) /* curthread->t_lockstat++ */
606 leaq lockstat_probemap(%rip), %rax
607 movl (%rax, %rdi, DTRACE_IDSIZE), %eax
608 testl %eax, %eax /* check for non-zero probe */
609 jz 1f
610 pushq %rbp /* align stack properly */
611 movq %rsp, %rbp
612 movl %eax, %edi
613 call *lockstat_probe
614 leave /* unwind stack */
615 1:
616 movq %gs:CPU_THREAD, %rdx /* reload thread ptr */
617 decb T_LOCKSTAT(%rdx) /* curthread->t_lockstat-- */
618 movl $1, %eax /* return success if tryenter */
619 ret
620 SET_SIZE(lockstat_wrapper_arg)
621
622
623 ENTRY(mutex_tryenter)
624 movq %gs:CPU_THREAD, %rdx /* rdx = thread ptr */
625 xorl %eax, %eax /* rax = 0 (unheld adaptive) */
626 lock
627 cmpxchgq %rdx, (%rdi)
628 jnz mutex_vector_tryenter
629 not %eax /* return success (nonzero) */
630 #if defined(OPTERON_WORKAROUND_6323525)
631 .mutex_tryenter_lockstat_patch_point:
632 .mutex_tryenter_6323525_patch_point:
633 ret /* nop space for lfence */
|
571 nop
572 .mutex_enter_lockstat_6323525_patch_point: /* new patch point if lfence */
573 nop
574 #else /* OPTERON_WORKAROUND_6323525 */
575 ret
576 #endif /* OPTERON_WORKAROUND_6323525 */
577 movq %rdi, %rsi
578 movl $LS_MUTEX_ENTER_ACQUIRE, %edi
579 /*
580 * expects %rdx=thread, %rsi=lock, %edi=lockstat event
581 */
582 ALTENTRY(lockstat_wrapper)
583 incb T_LOCKSTAT(%rdx) /* curthread->t_lockstat++ */
584 leaq lockstat_probemap(%rip), %rax
585 movl (%rax, %rdi, DTRACE_IDSIZE), %eax
586 testl %eax, %eax /* check for non-zero probe */
587 jz 1f
588 pushq %rbp /* align stack properly */
589 movq %rsp, %rbp
590 movl %eax, %edi
591 movq lockstat_probe, %rax
592 INDIRECT_CALL_REG(rax)
593 leave /* unwind stack */
594 1:
595 movq %gs:CPU_THREAD, %rdx /* reload thread ptr */
596 decb T_LOCKSTAT(%rdx) /* curthread->t_lockstat-- */
597 movl $1, %eax /* return success if tryenter */
598 ret
599 SET_SIZE(lockstat_wrapper)
600 SET_SIZE(mutex_enter)
601
602 /*
603 * expects %rcx=thread, %rdx=arg, %rsi=lock, %edi=lockstat event
604 */
605 ENTRY(lockstat_wrapper_arg)
606 incb T_LOCKSTAT(%rcx) /* curthread->t_lockstat++ */
607 leaq lockstat_probemap(%rip), %rax
608 movl (%rax, %rdi, DTRACE_IDSIZE), %eax
609 testl %eax, %eax /* check for non-zero probe */
610 jz 1f
611 pushq %rbp /* align stack properly */
612 movq %rsp, %rbp
613 movl %eax, %edi
614 movq lockstat_probe, %rax
615 INDIRECT_CALL_REG(rax)
616 leave /* unwind stack */
617 1:
618 movq %gs:CPU_THREAD, %rdx /* reload thread ptr */
619 decb T_LOCKSTAT(%rdx) /* curthread->t_lockstat-- */
620 movl $1, %eax /* return success if tryenter */
621 ret
622 SET_SIZE(lockstat_wrapper_arg)
623
624
625 ENTRY(mutex_tryenter)
626 movq %gs:CPU_THREAD, %rdx /* rdx = thread ptr */
627 xorl %eax, %eax /* rax = 0 (unheld adaptive) */
628 lock
629 cmpxchgq %rdx, (%rdi)
630 jnz mutex_vector_tryenter
631 not %eax /* return success (nonzero) */
632 #if defined(OPTERON_WORKAROUND_6323525)
633 .mutex_tryenter_lockstat_patch_point:
634 .mutex_tryenter_6323525_patch_point:
635 ret /* nop space for lfence */
|