87 ldv(int len, const void *addr)
88 {
89 switch (len) {
90 case 1:
91 return (*(uint8_t *)addr);
92 case 2:
93 return (*(uint16_t *)addr);
94 case 4:
95 return (*(uint32_t *)addr);
96 case 8:
97 return (*(uint64_t *)addr);
98 }
99 ASSERT(!"bad int len");
100 return (0xFEEDFACEDEADBEEFULL);
101 }
102
103 void
104 zap_leaf_byteswap(zap_leaf_phys_t *buf, int size)
105 {
106 int i;
107 zap_leaf_t l;
108 l.l_bs = highbit(size)-1;
109 l.l_phys = buf;
110
111 buf->l_hdr.lh_block_type = BSWAP_64(buf->l_hdr.lh_block_type);
112 buf->l_hdr.lh_prefix = BSWAP_64(buf->l_hdr.lh_prefix);
113 buf->l_hdr.lh_magic = BSWAP_32(buf->l_hdr.lh_magic);
114 buf->l_hdr.lh_nfree = BSWAP_16(buf->l_hdr.lh_nfree);
115 buf->l_hdr.lh_nentries = BSWAP_16(buf->l_hdr.lh_nentries);
116 buf->l_hdr.lh_prefix_len = BSWAP_16(buf->l_hdr.lh_prefix_len);
117 buf->l_hdr.lh_freelist = BSWAP_16(buf->l_hdr.lh_freelist);
118
119 for (i = 0; i < ZAP_LEAF_HASH_NUMENTRIES(&l); i++)
120 buf->l_hash[i] = BSWAP_16(buf->l_hash[i]);
121
122 for (i = 0; i < ZAP_LEAF_NUMCHUNKS(&l); i++) {
123 zap_leaf_chunk_t *lc = &ZAP_LEAF_CHUNK(&l, i);
124 struct zap_leaf_entry *le;
125
126 switch (lc->l_free.lf_type) {
127 case ZAP_CHUNK_ENTRY:
128 le = &lc->l_entry;
129
815 * but this accesses memory more sequentially, and when we're
816 * called, the block is usually pretty full.
817 */
818 for (i = 0; i < ZAP_LEAF_NUMCHUNKS(l); i++) {
819 struct zap_leaf_entry *le = ZAP_LEAF_ENTRY(l, i);
820 if (le->le_type != ZAP_CHUNK_ENTRY)
821 continue;
822
823 if (le->le_hash & (1ULL << bit))
824 zap_leaf_transfer_entry(l, i, nl);
825 else
826 (void) zap_leaf_rehash_entry(l, i);
827 }
828 }
829
830 void
831 zap_leaf_stats(zap_t *zap, zap_leaf_t *l, zap_stats_t *zs)
832 {
833 int i, n;
834
835 n = zap->zap_f.zap_phys->zap_ptrtbl.zt_shift -
836 l->l_phys->l_hdr.lh_prefix_len;
837 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
838 zs->zs_leafs_with_2n_pointers[n]++;
839
840
841 n = l->l_phys->l_hdr.lh_nentries/5;
842 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
843 zs->zs_blocks_with_n5_entries[n]++;
844
845 n = ((1<<FZAP_BLOCK_SHIFT(zap)) -
846 l->l_phys->l_hdr.lh_nfree * (ZAP_LEAF_ARRAY_BYTES+1))*10 /
847 (1<<FZAP_BLOCK_SHIFT(zap));
848 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
849 zs->zs_blocks_n_tenths_full[n]++;
850
851 for (i = 0; i < ZAP_LEAF_HASH_NUMENTRIES(l); i++) {
852 int nentries = 0;
853 int chunk = l->l_phys->l_hash[i];
854
855 while (chunk != CHAIN_END) {
|
87 ldv(int len, const void *addr)
88 {
89 switch (len) {
90 case 1:
91 return (*(uint8_t *)addr);
92 case 2:
93 return (*(uint16_t *)addr);
94 case 4:
95 return (*(uint32_t *)addr);
96 case 8:
97 return (*(uint64_t *)addr);
98 }
99 ASSERT(!"bad int len");
100 return (0xFEEDFACEDEADBEEFULL);
101 }
102
103 void
104 zap_leaf_byteswap(zap_leaf_phys_t *buf, int size)
105 {
106 int i;
107 zap_leaf_t l = { 0 };
108
109 l.l_bs = highbit(size)-1;
110
111 buf->l_hdr.lh_block_type = BSWAP_64(buf->l_hdr.lh_block_type);
112 buf->l_hdr.lh_prefix = BSWAP_64(buf->l_hdr.lh_prefix);
113 buf->l_hdr.lh_magic = BSWAP_32(buf->l_hdr.lh_magic);
114 buf->l_hdr.lh_nfree = BSWAP_16(buf->l_hdr.lh_nfree);
115 buf->l_hdr.lh_nentries = BSWAP_16(buf->l_hdr.lh_nentries);
116 buf->l_hdr.lh_prefix_len = BSWAP_16(buf->l_hdr.lh_prefix_len);
117 buf->l_hdr.lh_freelist = BSWAP_16(buf->l_hdr.lh_freelist);
118
119 for (i = 0; i < ZAP_LEAF_HASH_NUMENTRIES(&l); i++)
120 buf->l_hash[i] = BSWAP_16(buf->l_hash[i]);
121
122 for (i = 0; i < ZAP_LEAF_NUMCHUNKS(&l); i++) {
123 zap_leaf_chunk_t *lc = &ZAP_LEAF_CHUNK(&l, i);
124 struct zap_leaf_entry *le;
125
126 switch (lc->l_free.lf_type) {
127 case ZAP_CHUNK_ENTRY:
128 le = &lc->l_entry;
129
815 * but this accesses memory more sequentially, and when we're
816 * called, the block is usually pretty full.
817 */
818 for (i = 0; i < ZAP_LEAF_NUMCHUNKS(l); i++) {
819 struct zap_leaf_entry *le = ZAP_LEAF_ENTRY(l, i);
820 if (le->le_type != ZAP_CHUNK_ENTRY)
821 continue;
822
823 if (le->le_hash & (1ULL << bit))
824 zap_leaf_transfer_entry(l, i, nl);
825 else
826 (void) zap_leaf_rehash_entry(l, i);
827 }
828 }
829
830 void
831 zap_leaf_stats(zap_t *zap, zap_leaf_t *l, zap_stats_t *zs)
832 {
833 int i, n;
834
835 n = zap->zap_f_phys->zap_ptrtbl.zt_shift -
836 l->l_phys->l_hdr.lh_prefix_len;
837 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
838 zs->zs_leafs_with_2n_pointers[n]++;
839
840
841 n = l->l_phys->l_hdr.lh_nentries/5;
842 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
843 zs->zs_blocks_with_n5_entries[n]++;
844
845 n = ((1<<FZAP_BLOCK_SHIFT(zap)) -
846 l->l_phys->l_hdr.lh_nfree * (ZAP_LEAF_ARRAY_BYTES+1))*10 /
847 (1<<FZAP_BLOCK_SHIFT(zap));
848 n = MIN(n, ZAP_HISTOGRAM_SIZE-1);
849 zs->zs_blocks_n_tenths_full[n]++;
850
851 for (i = 0; i < ZAP_LEAF_HASH_NUMENTRIES(l); i++) {
852 int nentries = 0;
853 int chunk = l->l_phys->l_hash[i];
854
855 while (chunk != CHAIN_END) {
|