497 static uchar_t tlsinstr_ld_le[] = {
498 /*
499 * .byte 0x66
500 */
501 0x66,
502 /*
503 * .byte 0x66
504 */
505 0x66,
506 /*
507 * .byte 0x66
508 */
509 0x66,
510 /*
511 * movq %fs:0, %rax
512 */
513 0x64, 0x48, 0x8b, 0x04, 0x25,
514 0x00, 0x00, 0x00, 0x00
515 };
516
517
518 static Fixupret
519 tls_fixups(Ofl_desc *ofl, Rel_desc *arsp)
520 {
521 Sym_desc *sdp = arsp->rel_sym;
522 Word rtype = arsp->rel_rtype;
523 uchar_t *offset;
524
525 offset = (uchar_t *)((uintptr_t)arsp->rel_roffset +
526 (uintptr_t)_elf_getxoff(arsp->rel_isdesc->is_indata) +
527 (uintptr_t)RELAUX_GET_OSDESC(arsp)->os_outdata->d_buf);
528
529 if (sdp->sd_ref == REF_DYN_NEED) {
530 /*
531 * IE reference model
532 */
533 switch (rtype) {
534 case R_AMD64_TLSGD:
535 /*
536 * GD -> IE
537 *
538 * Transition:
539 * 0x00 .byte 0x66
540 * 0x01 leaq x@tlsgd(%rip), %rdi
541 * 0x08 .word 0x6666
542 * 0x0a rex64
543 * 0x0b call __tls_get_addr@plt
544 * 0x10
545 * To:
546 * 0x00 movq %fs:0, %rax
547 * 0x09 addq x@gottpoff(%rip), %rax
548 * 0x10
588 * 0x0b call __tls_get_addr@plt
589 * 0x10
590 * To:
591 * 0x00 movq %fs:0, %rax
592 * 0x09 leaq x@tpoff(%rax), %rax
593 * 0x10
594 */
595 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
596 R_AMD64_TPOFF32, arsp, ld_reloc_sym_name));
597 arsp->rel_rtype = R_AMD64_TPOFF32;
598 arsp->rel_roffset += 8;
599 arsp->rel_raddend = 0;
600
601 /*
602 * Adjust 'offset' to beginning of instruction sequence.
603 */
604 offset -= 4;
605 (void) memcpy(offset, tlsinstr_gd_le, sizeof (tlsinstr_gd_le));
606 return (FIX_RELOC);
607
608 case R_AMD64_GOTTPOFF:
609 /*
610 * IE -> LE
611 *
612 * Transition:
613 * 0x00 movq %fs:0, %rax
614 * 0x09 addq x@gottopoff(%rip), %rax
615 * 0x10
616 * To:
617 * 0x00 movq %fs:0, %rax
618 * 0x09 leaq x@tpoff(%rax), %rax
619 * 0x10
620 */
621 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
622 R_AMD64_TPOFF32, arsp, ld_reloc_sym_name));
623 arsp->rel_rtype = R_AMD64_TPOFF32;
624 arsp->rel_raddend = 0;
625
626 /*
627 * Adjust 'offset' to beginning of instruction sequence.
628 */
629 offset -= 12;
630
631 /*
632 * Same code sequence used in the GD -> LE transition.
633 */
634 (void) memcpy(offset, tlsinstr_gd_le, sizeof (tlsinstr_gd_le));
635 return (FIX_RELOC);
636
637 case R_AMD64_TLSLD:
638 /*
639 * LD -> LE
640 *
641 * Transition
642 * 0x00 leaq x1@tlsgd(%rip), %rdi
643 * 0x07 call __tls_get_addr@plt
644 * 0x0c
645 * To:
646 * 0x00 .byte 0x66
647 * 0x01 .byte 0x66
648 * 0x02 .byte 0x66
649 * 0x03 movq %fs:0, %rax
650 */
651 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
652 R_AMD64_NONE, arsp, ld_reloc_sym_name));
653 offset -= 3;
654 (void) memcpy(offset, tlsinstr_ld_le, sizeof (tlsinstr_ld_le));
655 return (FIX_DONE);
656
|
497 static uchar_t tlsinstr_ld_le[] = {
498 /*
499 * .byte 0x66
500 */
501 0x66,
502 /*
503 * .byte 0x66
504 */
505 0x66,
506 /*
507 * .byte 0x66
508 */
509 0x66,
510 /*
511 * movq %fs:0, %rax
512 */
513 0x64, 0x48, 0x8b, 0x04, 0x25,
514 0x00, 0x00, 0x00, 0x00
515 };
516
517 #define REX_B 0x1
518 #define REX_X 0x2
519 #define REX_R 0x4
520 #define REX_W 0x8
521 #define REX_PREFIX 0x40
522
523 #define REX_RW (REX_PREFIX | REX_R | REX_W)
524 #define REX_BW (REX_PREFIX | REX_B | REX_W)
525 #define REX_BRW (REX_PREFIX | REX_B | REX_R | REX_W)
526
527 #define REG_ESP 0x4
528
529 #define INSN_ADDMR 0x03 /* addq mem,reg */
530 #define INSN_ADDIR 0x81 /* addq imm,reg */
531 #define INSN_MOVMR 0x8b /* movq mem,reg */
532 #define INSN_MOVIR 0xc7 /* movq imm,reg */
533 #define INSN_LEA 0x8d /* leaq mem,reg */
534
535 static Fixupret
536 tls_fixups(Ofl_desc *ofl, Rel_desc *arsp)
537 {
538 Sym_desc *sdp = arsp->rel_sym;
539 Word rtype = arsp->rel_rtype;
540 uchar_t *offset;
541
542 offset = (uchar_t *)((uintptr_t)arsp->rel_roffset +
543 (uintptr_t)_elf_getxoff(arsp->rel_isdesc->is_indata) +
544 (uintptr_t)RELAUX_GET_OSDESC(arsp)->os_outdata->d_buf);
545
546 /*
547 * Note that in certain of the original insn sequences below, the
548 * instructions are not necessarily adjacent
549 */
550 if (sdp->sd_ref == REF_DYN_NEED) {
551 /*
552 * IE reference model
553 */
554 switch (rtype) {
555 case R_AMD64_TLSGD:
556 /*
557 * GD -> IE
558 *
559 * Transition:
560 * 0x00 .byte 0x66
561 * 0x01 leaq x@tlsgd(%rip), %rdi
562 * 0x08 .word 0x6666
563 * 0x0a rex64
564 * 0x0b call __tls_get_addr@plt
565 * 0x10
566 * To:
567 * 0x00 movq %fs:0, %rax
568 * 0x09 addq x@gottpoff(%rip), %rax
569 * 0x10
609 * 0x0b call __tls_get_addr@plt
610 * 0x10
611 * To:
612 * 0x00 movq %fs:0, %rax
613 * 0x09 leaq x@tpoff(%rax), %rax
614 * 0x10
615 */
616 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
617 R_AMD64_TPOFF32, arsp, ld_reloc_sym_name));
618 arsp->rel_rtype = R_AMD64_TPOFF32;
619 arsp->rel_roffset += 8;
620 arsp->rel_raddend = 0;
621
622 /*
623 * Adjust 'offset' to beginning of instruction sequence.
624 */
625 offset -= 4;
626 (void) memcpy(offset, tlsinstr_gd_le, sizeof (tlsinstr_gd_le));
627 return (FIX_RELOC);
628
629 case R_AMD64_GOTTPOFF: {
630 /*
631 * IE -> LE
632 *
633 * Transition 1:
634 * movq %fs:0, %reg
635 * addq x@gottpoff(%rip), %reg
636 * To:
637 * movq %fs:0, %reg
638 * leaq x@tpoff(%reg), %reg
639 *
640 * Transition (as a special case):
641 * movq %fs:0, %r12/%rsp
642 * addq x@gottpoff(%rip), %r12/%rsp
643 * To:
644 * movq %fs:0, %r12/%rsp
645 * addq x@tpoff(%rax), %r12/%rsp
646 *
647 * Transition 2:
648 * movq x@gottpoff(%rip), %reg
649 * movq %fs:(%reg), %reg
650 * To:
651 * movq x@tpoff(%reg), %reg
652 * movq %fs:(%reg), %reg
653 */
654 Conv_inv_buf_t inv_buf;
655 uint8_t reg; /* Register */
656
657 offset -= 3;
658
659 reg = offset[2] >> 3; /* Encoded dest. reg. operand */
660
661 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
662 R_AMD64_TPOFF32, arsp, ld_reloc_sym_name));
663 arsp->rel_rtype = R_AMD64_TPOFF32;
664 arsp->rel_raddend = 0;
665
666 /*
667 * This is transition 2, and the special case of form 1 where
668 * a normal transition would index %rsp or %r12 and need a SIB
669 * byte in the leaq for which we lack space
670 */
671 if ((offset[1] == INSN_MOVMR) ||
672 ((offset[1] == INSN_ADDMR) && (reg == REG_ESP))) {
673 /*
674 * If we needed an extra bit of MOD.reg to refer to
675 * this register as the dest of the original movq we
676 * need an extra bit of MOD.rm to refer to it in the
677 * dest of the replacement movq or addq.
678 */
679 if (offset[0] == REX_RW)
680 offset[0] = REX_BW;
681
682 offset[1] = (offset[1] == INSN_MOVMR) ?
683 INSN_MOVIR : INSN_ADDIR;
684 offset[2] = 0xc0 | reg;
685
686 return (FIX_RELOC);
687 } else if (offset[1] == INSN_ADDMR) {
688 /*
689 * If we needed an extra bit of MOD.reg to refer to
690 * this register in the dest of the addq we need an
691 * extra bit of both MOD.reg and MOD.rm to refer to it
692 * in the source and dest of the leaq
693 */
694 if (offset[0] == REX_RW)
695 offset[0] = REX_BRW;
696
697 offset[1] = INSN_LEA;
698 offset[2] = 0x80 | (reg << 3) | reg;
699
700 return (FIX_RELOC);
701 }
702
703 ld_eprintf(ofl, ERR_FATAL, MSG_INTL(MSG_REL_BADTLSINS),
704 conv_reloc_amd64_type(arsp->rel_rtype, 0, &inv_buf),
705 arsp->rel_isdesc->is_file->ifl_name,
706 ld_reloc_sym_name(arsp),
707 arsp->rel_isdesc->is_name,
708 EC_OFF(arsp->rel_roffset));
709 return (FIX_ERROR);
710 }
711 case R_AMD64_TLSLD:
712 /*
713 * LD -> LE
714 *
715 * Transition
716 * 0x00 leaq x1@tlsgd(%rip), %rdi
717 * 0x07 call __tls_get_addr@plt
718 * 0x0c
719 * To:
720 * 0x00 .byte 0x66
721 * 0x01 .byte 0x66
722 * 0x02 .byte 0x66
723 * 0x03 movq %fs:0, %rax
724 */
725 DBG_CALL(Dbg_reloc_transition(ofl->ofl_lml, M_MACH,
726 R_AMD64_NONE, arsp, ld_reloc_sym_name));
727 offset -= 3;
728 (void) memcpy(offset, tlsinstr_ld_le, sizeof (tlsinstr_ld_le));
729 return (FIX_DONE);
730
|