455 */
456 for (idx = 0; idx < predef_seg_nelts; idx++, sgp++) {
457 Phdr *phdr = &(sgp->sg_phdr);
458
459 #if defined(_ELF64)
460 /* Ignore amd64 segment templates for non-amd64 targets */
461 switch (sgp->sg_id) {
462 case SGID_LRODATA:
463 case SGID_LDATA:
464 if ((ld_targ.t_m.m_mach != EM_AMD64))
465 continue;
466 }
467 #endif
468 if (phdr->p_type == PT_LOAD)
469 phdr->p_align = segalign;
470
471 if ((aplist_append(&ofl->ofl_segs, sgp,
472 AL_CNT_SEGMENTS)) == NULL)
473 return (S_ERROR);
474
475 #ifdef NDEBUG /* assert() is enabled */
476 /*
477 * Enforce the segment name rule: Any segment that can
478 * be referenced by an entrance descriptor must have
479 * a name. Any segment that cannot, must have a NULL
480 * name pointer.
481 */
482 switch (phdr->p_type) {
483 case PT_LOAD:
484 case PT_NOTE:
485 case PT_NULL:
486 assert(sgp->sg_name != NULL);
487 break;
488 default:
489 assert(sgp->sg_name == NULL);
490 break;
491 }
492 #endif
493
494 /*
495 * Add named segment descriptors to the AVL tree to
|
455 */
456 for (idx = 0; idx < predef_seg_nelts; idx++, sgp++) {
457 Phdr *phdr = &(sgp->sg_phdr);
458
459 #if defined(_ELF64)
460 /* Ignore amd64 segment templates for non-amd64 targets */
461 switch (sgp->sg_id) {
462 case SGID_LRODATA:
463 case SGID_LDATA:
464 if ((ld_targ.t_m.m_mach != EM_AMD64))
465 continue;
466 }
467 #endif
468 if (phdr->p_type == PT_LOAD)
469 phdr->p_align = segalign;
470
471 if ((aplist_append(&ofl->ofl_segs, sgp,
472 AL_CNT_SEGMENTS)) == NULL)
473 return (S_ERROR);
474
475 #ifndef NDEBUG /* assert() is enabled */
476 /*
477 * Enforce the segment name rule: Any segment that can
478 * be referenced by an entrance descriptor must have
479 * a name. Any segment that cannot, must have a NULL
480 * name pointer.
481 */
482 switch (phdr->p_type) {
483 case PT_LOAD:
484 case PT_NOTE:
485 case PT_NULL:
486 assert(sgp->sg_name != NULL);
487 break;
488 default:
489 assert(sgp->sg_name == NULL);
490 break;
491 }
492 #endif
493
494 /*
495 * Add named segment descriptors to the AVL tree to
|