Print this page
7029 want per-process exploit mitigation features (secflags)
7030 want basic address space layout randomization (aslr)
7031 noexec_user_stack should be a secflag
7032 want a means to forbid mappings around NULL.
        
*** 57,66 ****
--- 57,67 ----
  #include <sys/exechdr.h>
  #include <sys/debug.h>
  #include <sys/vmsystm.h>
  #include <sys/swap.h>
  #include <sys/dumphdr.h>
+ #include <sys/random.h>
  
  #include <vm/hat.h>
  #include <vm/as.h>
  #include <vm/seg.h>
  #include <vm/seg_kp.h>
*** 78,87 ****
--- 79,89 ----
  #include <sys/x86_archext.h>
  #include <sys/elf_386.h>
  #include <sys/cmn_err.h>
  #include <sys/archsystm.h>
  #include <sys/machsystm.h>
+ #include <sys/secflags.h>
  
  #include <sys/vtrace.h>
  #include <sys/ddidmareq.h>
  #include <sys/promif.h>
  #include <sys/memnode.h>
*** 635,644 ****
--- 637,653 ----
  {
          return (0);
  }
  
  /*
+  * The maximum amount a randomized mapping will be slewed.  We should perhaps
+  * arrange things so these tunables can be separate for mmap, mmapobj, and
+  * ld.so
+  */
+ size_t aslr_max_map_skew = 256 * 1024 * 1024; /* 256MB */
+ 
+ /*
   * map_addr_proc() is the routine called when the system is to
   * choose an address for the user.  We will pick an address
   * range which is the highest available below userlimit.
   *
   * Every mapping will have a redzone of a single page on either side of
*** 750,759 ****
--- 759,769 ----
  
          ASSERT(ISP2(align_amount));
          ASSERT(align_amount == 0 || align_amount >= PAGESIZE);
  
          off = off & (align_amount - 1);
+ 
          /*
           * Look for a large enough hole starting below userlimit.
           * After finding it, use the upper part.
           */
          if (as_gap_aligned(as, len, &base, &slen, AH_HI, NULL, align_amount,
*** 777,786 ****
--- 787,810 ----
                  addr += (uintptr_t)off;
                  if (addr > as_addr) {
                          addr -= align_amount;
                  }
  
+                 /*
+                  * If randomization is requested, slew the allocation
+                  * backwards, within the same gap, by a random amount.
+                  */
+                 if (flags & _MAP_RANDOMIZE) {
+                         uint32_t slew;
+ 
+                         (void) random_get_pseudo_bytes((uint8_t *)&slew,
+                             sizeof (slew));
+ 
+                         slew = slew % MIN(aslr_max_map_skew, (addr - base));
+                         addr -= P2ALIGN(slew, align_amount);
+                 }
+ 
                  ASSERT(addr > base);
                  ASSERT(addr + len < base + slen);
                  ASSERT(((uintptr_t)addr & (align_amount - 1)) ==
                      ((uintptr_t)(off)));
                  *addrp = addr;
*** 902,911 ****
--- 926,942 ----
  {
          return (valid_va_range_aligned(basep, lenp, minlen, dir, 0, 0, 0));
  }
  
  /*
+  * Default to forbidding the first 64k of address space.  This protects most
+  * reasonably sized structures from dereferences through NULL:
+  *     ((foo_t *)0)->bar
+  */
+ uintptr_t forbidden_null_mapping_sz = 0x10000;
+ 
+ /*
   * Determine whether [addr, addr+len] are valid user addresses.
   */
  /*ARGSUSED*/
  int
  valid_usr_range(caddr_t addr, size_t len, uint_t prot, struct as *as,
*** 914,923 ****
--- 945,958 ----
          caddr_t eaddr = addr + len;
  
          if (eaddr <= addr || addr >= userlimit || eaddr > userlimit)
                  return (RANGE_BADADDR);
  
+         if ((addr <= (caddr_t)forbidden_null_mapping_sz) &&
+             secflag_enabled(as->a_proc, PROC_SEC_FORBIDNULLMAP))
+                 return (RANGE_BADADDR);
+ 
  #if defined(__amd64)
          /*
           * Check for the VA hole
           */
          if (eaddr > (caddr_t)hole_start && addr < (caddr_t)hole_end)
*** 3924,3939 ****
   */
  void
  dcache_flushall()
  {}
  
- size_t
- exec_get_spslew(void)
- {
-         return (0);
- }
- 
  /*
   * Allocate a memory page.  The argument 'seed' can be any pseudo-random
   * number to vary where the pages come from.  This is quite a hacked up
   * method -- it works for now, but really needs to be fixed up a bit.
   *
--- 3959,3968 ----