The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/kern/subr_alist.c

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*
    2  * ALIST.C -    Bitmap allocator/deallocator, using a radix tree with hinting.
    3  *              Unlimited-size allocations, power-of-2 only, power-of-2
    4  *              aligned results only.
    5  * 
    6  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
    7  * 
    8  * This code is derived from software contributed to The DragonFly Project
    9  * by Matthew Dillon <dillon@backplane.com>
   10  * 
   11  * Redistribution and use in source and binary forms, with or without
   12  * modification, are permitted provided that the following conditions
   13  * are met:
   14  * 
   15  * 1. Redistributions of source code must retain the above copyright
   16  *    notice, this list of conditions and the following disclaimer.
   17  * 2. Redistributions in binary form must reproduce the above copyright
   18  *    notice, this list of conditions and the following disclaimer in
   19  *    the documentation and/or other materials provided with the
   20  *    distribution.
   21  * 3. Neither the name of The DragonFly Project nor the names of its
   22  *    contributors may be used to endorse or promote products derived
   23  *    from this software without specific, prior written permission.
   24  * 
   25  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
   26  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
   27  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
   28  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
   29  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
   30  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
   31  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
   32  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
   33  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   34  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   35  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   36  * SUCH DAMAGE.
   37  */
   38 /*
   39  * This module has been adapted from the BLIST module, which was written
   40  * by Matthew Dillon many years ago.
   41  *
   42  * This module implements a general power-of-2 bitmap allocator/deallocator.
   43  * All allocations must be in powers of 2 and will return similarly aligned
   44  * results.  The module does not try to interpret the meaning of a 'block'
   45  * other then to return ALIST_BLOCK_NONE on an allocation failure.  
   46  *
   47  * A maximum of 2 billion blocks is supported so, for example, if one block
   48  * represented 64 bytes a maximally sized ALIST would represent
   49  * 128 gigabytes.
   50  *
   51  * A radix tree is used to maintain the bitmap and layed out in a manner
   52  * similar to the blist code.  Meta nodes use a radix of 16 and 2 bits per
   53  * block while leaf nodes use a radix of 32 and 1 bit per block (stored in
   54  * a 32 bit bitmap field).  Both meta and leaf nodes have a hint field.
   55  * This field gives us a hint as to the largest free contiguous range of
   56  * blocks under the node.  It may contain a value that is too high, but
   57  * will never contain a value that is too low.  When the radix tree is
   58  * searched, allocation failures in subtrees update the hint. 
   59  *
   60  * The radix tree is layed out recursively using a linear array.  Each meta
   61  * node is immediately followed (layed out sequentially in memory) by
   62  * ALIST_META_RADIX lower level nodes.  This is a recursive structure but one
   63  * that can be easily scanned through a very simple 'skip' calculation.  In
   64  * order to support large radixes, portions of the tree may reside outside our
   65  * memory allocation.  We handle this with an early-terminate optimization
   66  * in the meta-node.  The memory allocation is only large enough to cover
   67  * the number of blocks requested at creation time even if it must be
   68  * encompassed in larger root-node radix.
   69  *
   70  * This code can be compiled stand-alone for debugging.
   71  */
   72 
   73 #ifdef _KERNEL
   74 
   75 #include <sys/param.h>
   76 #include <sys/systm.h>
   77 #include <sys/lock.h>
   78 #include <sys/kernel.h>
   79 #include <sys/alist.h>
   80 #include <sys/malloc.h>
   81 #include <vm/vm.h>
   82 #include <vm/vm_object.h>
   83 #include <vm/vm_kern.h>
   84 #include <vm/vm_extern.h>
   85 #include <vm/vm_page.h>
   86 
   87 #else
   88 
   89 #ifndef ALIST_NO_DEBUG
   90 #define ALIST_DEBUG
   91 #endif
   92 
   93 #include <sys/types.h>
   94 #include <stdio.h>
   95 #include <assert.h>
   96 #include <string.h>
   97 #include <stdlib.h>
   98 #include <stdarg.h>
   99 
  100 #define kmalloc(a,b,c)  malloc(a)
  101 #define kfree(a,b)      free(a)
  102 #define kprintf         printf
  103 #define KKASSERT(exp)   assert(exp)
  104 struct malloc_type;
  105 
  106 #include <sys/alist.h>
  107 
  108 void panic(const char *ctl, ...);
  109 
  110 #endif
  111 
  112 /*
  113  * static support functions
  114  */
  115 
  116 static alist_blk_t alst_leaf_alloc(almeta_t *scan, alist_blk_t blk,
  117                                 alist_blk_t start, alist_blk_t count);
  118 static alist_blk_t alst_meta_alloc(almeta_t *scan, alist_blk_t blk,
  119                                 alist_blk_t start, alist_blk_t count,
  120                                 alist_blk_t radix, alist_blk_t skip);
  121 static void alst_leaf_free(almeta_t *scan, alist_blk_t relblk,
  122                                 alist_blk_t count);
  123 static void alst_meta_free(almeta_t *scan, alist_blk_t freeBlk,
  124                                 alist_blk_t count, alist_blk_t radix,
  125                                 alist_blk_t skip, alist_blk_t blk);
  126 static alist_blk_t alst_radix_init(almeta_t *scan, alist_blk_t blk,
  127                                 alist_blk_t radix, alist_blk_t skip,
  128                                 alist_blk_t count);
  129 #ifndef _KERNEL
  130 static void     alst_radix_print(almeta_t *scan, alist_blk_t blk,
  131                                         alist_blk_t radix, alist_blk_t skip,
  132                                         int tab);
  133 #endif
  134 
  135 /*
  136  * Create a alist capable of handling up to the specified number of blocks.
  137  * Blocks must be greater then 0 but does not have to be a power of 2.
  138  *
  139  * The smallest alist consists of a single leaf node capable of
  140  * managing ALIST_BMAP_RADIX blocks.
  141  */
  142 alist_t 
  143 alist_create(alist_blk_t blocks, struct malloc_type *mtype)
  144 {
  145         alist_t bl;
  146         alist_blk_t radix;
  147         alist_blk_t skip = 0;
  148 
  149         /*
  150          * Calculate radix and skip field used for scanning.
  151          */
  152         radix = ALIST_BMAP_RADIX;
  153 
  154         while (radix < blocks) {
  155                 radix *= ALIST_META_RADIX;
  156                 skip = (skip + 1) * ALIST_META_RADIX;
  157         }
  158 
  159         bl = kmalloc(sizeof(struct alist), mtype, M_WAITOK | M_ZERO);
  160 
  161         bl->bl_blocks = blocks;
  162         bl->bl_radix = radix;
  163         bl->bl_skip = skip;
  164         bl->bl_rootblks = 1 + alst_radix_init(NULL, 0, bl->bl_radix,
  165                                               bl->bl_skip, blocks);
  166         bl->bl_root = kmalloc(sizeof(almeta_t) * bl->bl_rootblks,
  167                               mtype, M_WAITOK);
  168 
  169 #if defined(ALIST_DEBUG)
  170         kprintf(
  171                 "ALIST representing %d blocks (%d MB of swap)"
  172                 ", requiring %dK (%d bytes) of ram\n",
  173                 bl->bl_blocks,
  174                 bl->bl_blocks * 4 / 1024,
  175                 (bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024,
  176                 (bl->bl_rootblks * sizeof(almeta_t))
  177         );
  178         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
  179 #endif
  180         alst_radix_init(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, blocks);
  181 
  182         return(bl);
  183 }
  184 
  185 void
  186 alist_init(alist_t bl, alist_blk_t blocks,
  187            almeta_t *records, alist_blk_t nrecords)
  188 {
  189         alist_blk_t radix;
  190         alist_blk_t skip = 0;
  191 
  192         /*
  193          * Calculate radix and skip field used for scanning.
  194          */
  195         radix = ALIST_BMAP_RADIX;
  196 
  197         while (radix < blocks) {
  198                 radix *= ALIST_META_RADIX;
  199                 skip = (skip + 1) * ALIST_META_RADIX;
  200         }
  201         bzero(bl, sizeof(*bl));
  202 
  203         bl->bl_blocks = blocks;
  204         bl->bl_radix = radix;
  205         bl->bl_skip = skip;
  206         bl->bl_rootblks = 1 + alst_radix_init(NULL, 0, bl->bl_radix,
  207                                               bl->bl_skip, blocks);
  208         KKASSERT(bl->bl_rootblks <= nrecords);
  209         bl->bl_root = records;
  210 
  211 #if defined(ALIST_DEBUG)
  212         kprintf(
  213                 "ALIST representing %d blocks (%d MB of swap)"
  214                 ", requiring %dK (%d bytes) of ram\n",
  215                 bl->bl_blocks,
  216                 bl->bl_blocks * 4 / 1024,
  217                 (bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024,
  218                 (bl->bl_rootblks * sizeof(almeta_t))
  219         );
  220         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
  221 #endif
  222         alst_radix_init(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, blocks);
  223 }
  224 
  225 void 
  226 alist_destroy(alist_t bl, struct malloc_type *mtype)
  227 {
  228         kfree(bl->bl_root, mtype);
  229         kfree(bl, mtype);
  230 }
  231 
  232 /*
  233  * Reserve space in the block bitmap.  Return the base of a contiguous
  234  * region or ALIST_BLOCK_NONE if space could not be allocated.
  235  *
  236  * This nominally allocates a power-of-2 number of blocks.  However,
  237  * non-powers of 2 are accepted and implemented by first allocating
  238  * the nearest power of 2 and then freeing the remainder.
  239  */
  240 alist_blk_t
  241 alist_alloc(alist_t bl, alist_blk_t start, alist_blk_t count)
  242 {
  243         alist_blk_t blk = ALIST_BLOCK_NONE;
  244 
  245         /*
  246          * Check non power-of-2
  247          */
  248         KKASSERT(count);
  249         if ((count | (count - 1)) != (count << 1) - 1) {
  250                 alist_blk_t ncount = (count < 256) ? 1 : 256;
  251                 while (ncount < count)
  252                         ncount <<= 1;
  253                 blk = alist_alloc(bl, start, ncount);
  254                 if (blk != ALIST_BLOCK_NONE)
  255                         alist_free(bl, blk + count, ncount - count);
  256                 return (blk);
  257         }
  258 
  259         /*
  260          * Power of 2
  261          */
  262         if (bl && count < bl->bl_radix) {
  263                 if (bl->bl_radix == ALIST_BMAP_RADIX) {
  264                         blk = alst_leaf_alloc(bl->bl_root, 0, start, count);
  265                 } else {
  266                         blk = alst_meta_alloc(bl->bl_root, 0, start, count,
  267                                               bl->bl_radix, bl->bl_skip);
  268                 }
  269                 if (blk != ALIST_BLOCK_NONE)
  270                         bl->bl_free -= count;
  271         }
  272         return(blk);
  273 }
  274 
  275 /*
  276  * Free up space in the block bitmap.  The starting block and count do not
  277  * need to be power-of-2 aligned.  The related blocks must be in an allocated
  278  * state.
  279  */
  280 void 
  281 alist_free(alist_t bl, alist_blk_t blkno, alist_blk_t count)
  282 {
  283         if (bl) {
  284                 KKASSERT(blkno + count <= bl->bl_blocks);
  285                 if (bl->bl_radix == ALIST_BMAP_RADIX) {
  286                         alst_leaf_free(bl->bl_root, blkno, count);
  287                 } else {
  288                         alst_meta_free(bl->bl_root, blkno, count,
  289                                        bl->bl_radix, bl->bl_skip, 0);
  290                 }
  291                 bl->bl_free += count;
  292         }
  293 }
  294 
  295 /*
  296  * Returns the current total number of free blocks and the
  297  * approximate trailing largest contiguous free block available.
  298  */
  299 alist_blk_t
  300 alist_free_info(alist_t bl, alist_blk_t *startp, alist_blk_t *countp)
  301 {
  302         alist_blk_t radix = bl->bl_radix;
  303         alist_blk_t skip = bl->bl_skip;
  304         alist_blk_t next_skip;
  305         alist_blk_t i;
  306         alist_bmap_t mask;
  307         almeta_t *scan = bl->bl_root;
  308 
  309         *startp = 0;
  310         *countp = 0;
  311 
  312         while (radix != ALIST_BMAP_RADIX) {
  313                 radix /= ALIST_META_RADIX;
  314                 next_skip = skip / ALIST_META_RADIX;
  315 
  316                 /*
  317                  * Find the biggest fully allocated chunk.
  318                  */
  319                 for (i = ALIST_META_RADIX - 1; i != ALIST_BLOCK_NONE; --i) {
  320                         mask = (scan->bm_bitmap >> (i * 2)) & 3;
  321                         if (mask == 0) {
  322                                 /*
  323                                  * All allocated, continue the loop
  324                                  */
  325                                 continue;
  326                         }
  327                         if (mask == 1) {
  328                                 /*
  329                                  * Partially allocated, push into this guy
  330                                  */
  331                                 break;
  332                         }
  333                         if (mask == 2) {
  334                                 /*
  335                                  * Unknown state
  336                                  */
  337                                 return(bl->bl_free);
  338                         }
  339                         /*
  340                          * All free, we can return the chunk.
  341                          */
  342                         *startp += i * radix;
  343                         *countp = radix;
  344                         return(bl->bl_free);
  345                 }
  346 
  347                 /*
  348                  * If we failed to find anything stop here, otherwise push
  349                  * in.
  350                  */
  351                 if (i == ALIST_BLOCK_NONE)
  352                         return(bl->bl_free);
  353                 *startp += i * radix;
  354                 scan += 1 + next_skip * i;
  355                 skip = next_skip - 1;
  356         }
  357 
  358         /*
  359          * If we got all the way down to a leaf node locate the last block,
  360          * power-of-2 aligned and power-of-2 sized.  Well, the easiest way
  361          * to deal with this is to just return 1 block.
  362          */
  363         if (radix == ALIST_BMAP_RADIX) {
  364                 mask = scan->bm_bitmap;
  365                 for (i = ALIST_BMAP_RADIX - 1; i != ALIST_BLOCK_NONE; --i) {
  366                         if ((mask & ((alist_bmap_t)1U << i)))
  367                                 break;
  368                 }
  369 
  370                 /*
  371                  * did not find free entry
  372                  */
  373                 if (i == ALIST_BLOCK_NONE)
  374                         return(bl->bl_free);
  375 
  376                 /*
  377                  * Return one block.
  378                  */
  379                 *startp += i;
  380                 *countp = 1;
  381                 return(bl->bl_free);
  382         }
  383         return(bl->bl_free);
  384 }
  385 
  386 #ifdef ALIST_DEBUG
  387 
  388 /*
  389  * alist_print()    - dump radix tree
  390  */
  391 
  392 void
  393 alist_print(alist_t bl)
  394 {
  395         kprintf("ALIST {\n");
  396         alst_radix_print(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, 4);
  397         kprintf("}\n");
  398 }
  399 
  400 #endif
  401 
  402 /************************************************************************
  403  *                        ALLOCATION SUPPORT FUNCTIONS                  *
  404  ************************************************************************
  405  *
  406  *      These support functions do all the actual work.  They may seem 
  407  *      rather longish, but that's because I've commented them up.  The
  408  *      actual code is straight forward.
  409  *
  410  */
  411 
  412 /*
  413  * alist_leaf_alloc() - allocate at a leaf in the radix tree (a bitmap).
  414  *
  415  *      This is the core of the allocator and is optimized for the 1 block
  416  *      and the ALIST_BMAP_RADIX block allocation cases.  Other cases are
  417  *      somewhat slower.  The 1 block allocation case is log2 and extremely
  418  *      quick.
  419  *
  420  *      mask bit is 0  allocated, not available
  421  *      mask bit is 1  free, available for allocation
  422  */
  423 
  424 static alist_blk_t
  425 alst_leaf_alloc(almeta_t *scan, alist_blk_t blk, alist_blk_t start,
  426                 alist_blk_t count)
  427 {
  428         alist_bmap_t orig = scan->bm_bitmap;
  429 
  430         /*
  431          * Allocate only beyond the start point.  Mask to 0 the low bits
  432          * below start.  If start == blk no bits get cleared so don't
  433          * bother.
  434          */
  435         if (start >= blk + ALIST_BMAP_RADIX)
  436                 return(ALIST_BLOCK_NONE);
  437 
  438         if (start > blk && start < blk + ALIST_BMAP_RADIX)
  439                 orig &= ~(((alist_bmap_t)1U << (start - blk)) - 1);
  440 
  441         start &= ALIST_BMAP_RADIX - 1;
  442 
  443         /*
  444          * Optimize bitmap all-allocated case.  Also, count = 1
  445          * case assumes at least 1 bit is free in the bitmap, so
  446          * we have to take care of this case here.
  447          */
  448         if (orig == 0) {
  449                 if (start <= blk)
  450                         scan->bm_bighint = 0;
  451                 return(ALIST_BLOCK_NONE);
  452         }
  453 
  454         /*
  455          * Optimized code to allocate one bit out of the bitmap
  456          */
  457         if (count == 1) {
  458                 alist_bmap_t mask;
  459                 alist_blk_t j = ALIST_BMAP_RADIX/2;
  460                 alist_blk_t r = 0;
  461 
  462                 mask = (alist_bmap_t)-1 >> (ALIST_BMAP_RADIX/2);
  463 
  464                 while (j) {
  465                         if ((orig & mask) == 0) {
  466                             r += j;
  467                             orig >>= j;
  468                         }
  469                         j >>= 1;
  470                         mask >>= j;
  471                 }
  472                 scan->bm_bitmap &= ~(1 << r);
  473                 return(blk + r);
  474         }
  475 
  476         /*
  477          * non-optimized code to allocate N bits out of the bitmap.
  478          * The more bits, the faster the code runs.  It will run
  479          * the slowest allocating 2 bits, but since there aren't any
  480          * memory ops in the core loop (or shouldn't be, anyway),
  481          * you probably won't notice the difference.
  482          *
  483          * Similar to the blist case, the alist code also requires
  484          * allocations to be power-of-2 sized and aligned to the
  485          * size of the allocation, which simplifies the algorithm.
  486          */
  487         {
  488                 alist_blk_t j;
  489                 alist_blk_t n = ALIST_BMAP_RADIX - count;
  490                 alist_bmap_t mask;
  491 
  492                 mask = (alist_bmap_t)-1 >> n;
  493 
  494                 for (j = 0; j <= n; j += count) {
  495                         if ((orig & mask) == mask) {
  496                                 scan->bm_bitmap &= ~mask;
  497                                 return(blk + j);
  498                         }
  499                         mask = mask << count;
  500                 }
  501         }
  502 
  503         /*
  504          * We couldn't allocate count in this subtree, update bighint
  505          * if we were able to check the entire node.
  506          */
  507         if (start <= blk)
  508                 scan->bm_bighint = count - 1;
  509         return(ALIST_BLOCK_NONE);
  510 }
  511 
  512 /*
  513  * Attempt to allocate at a meta node.  If we can't, we update
  514  * bighint and return a failure.  Updating bighint optimize future
  515  * calls that hit this node.  We have to check for our collapse cases
  516  * and we have a few optimizations strewn in as well.
  517  */
  518 static alist_blk_t
  519 alst_meta_alloc(almeta_t *scan, alist_blk_t blk, alist_blk_t start,
  520                 alist_blk_t count, alist_blk_t radix, alist_blk_t skip)
  521 {
  522         alist_blk_t i;
  523         alist_bmap_t mask;
  524         alist_bmap_t pmask;
  525         alist_blk_t next_skip = ((u_int)skip / ALIST_META_RADIX);
  526         alist_blk_t orig_blk;
  527 
  528         /*
  529          * ALL-ALLOCATED special case
  530          */
  531         if (scan->bm_bitmap == 0)  {
  532                 scan->bm_bighint = 0;
  533                 return(ALIST_BLOCK_NONE);
  534         }
  535 
  536         radix /= ALIST_META_RADIX;
  537 
  538         /*
  539          * Radix now represents each bitmap entry for this meta node.  If
  540          * the number of blocks being allocated can be fully represented,
  541          * we allocate directly out of this meta node.
  542          *
  543          * Meta node bitmaps use 2 bits per block.
  544          *
  545          *      00      ALL-ALLOCATED
  546          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
  547          *      10      (RESERVED)
  548          *      11      ALL-FREE
  549          */
  550         if (count >= radix) {
  551                 alist_blk_t n = count / radix * 2;      /* number of bits */
  552                 alist_blk_t j;
  553 
  554                 mask = (alist_bmap_t)-1 >> (ALIST_BMAP_RADIX - n);
  555                 for (j = 0; j < ALIST_META_RADIX; j += n / 2) {
  556                         if ((scan->bm_bitmap & mask) == mask &&
  557                             blk + j * radix >= start) {
  558                                 scan->bm_bitmap &= ~mask;
  559                                 return(blk + j * radix);
  560                         }
  561                         mask <<= n;
  562                 }
  563                 if (scan->bm_bighint >= count && start <= blk)
  564                         scan->bm_bighint = count >> 1;
  565                 return(ALIST_BLOCK_NONE);
  566         }
  567 
  568         /*
  569          * If not we have to recurse.
  570          */
  571         mask = 0x00000003;
  572         pmask = 0x00000001;
  573         orig_blk = blk;
  574         for (i = 1; i <= skip; i += next_skip) {
  575                 if (scan[i].bm_bighint == (alist_blk_t)-1) {
  576                         /* 
  577                          * Terminator
  578                          */
  579                         break;
  580                 }
  581 
  582                 /*
  583                  * If the element is marked completely free (11), initialize
  584                  * the recursion.
  585                  */
  586                 if ((scan->bm_bitmap & mask) == mask) {
  587                         scan[i].bm_bitmap = (alist_bmap_t)-1;
  588                         scan[i].bm_bighint = radix;
  589                 } 
  590 
  591                 if ((scan->bm_bitmap & mask) == 0) {
  592                         /*
  593                          * Object marked completely allocated, recursion
  594                          * contains garbage.
  595                          */
  596                         /* Skip it */
  597                 } else if (blk + radix <= start) {
  598                         /*
  599                          * Object does not contain or is not beyond our
  600                          * start point.
  601                          */
  602                         /* Skip it */
  603                 } else if (count <= scan[i].bm_bighint) {
  604                         /*
  605                          * count fits in object.  If successful and the
  606                          * deeper level becomes all allocated, mark our
  607                          * level as all-allocated.
  608                          */
  609                         alist_blk_t r;
  610                         if (next_skip == 1) {
  611                                 r = alst_leaf_alloc(&scan[i], blk, start,
  612                                                     count);
  613                         } else {
  614                                 r = alst_meta_alloc(&scan[i], blk, start,
  615                                                     count,
  616                                                     radix, next_skip - 1);
  617                         }
  618                         if (r != ALIST_BLOCK_NONE) {
  619                                 if (scan[i].bm_bitmap == 0) {
  620                                         scan->bm_bitmap &= ~mask;
  621                                 } else {
  622                                         scan->bm_bitmap &= ~mask;
  623                                         scan->bm_bitmap |= pmask;
  624                                 }
  625                                 return(r);
  626                         }
  627                 } 
  628                 blk += radix;
  629                 mask <<= 2;
  630                 pmask <<= 2;
  631         }
  632 
  633         /*
  634          * We couldn't allocate count in this subtree, update bighint
  635          * if we were able to check the entire node.
  636          */
  637         if (scan->bm_bighint >= count && start <= orig_blk)
  638                 scan->bm_bighint = count >> 1;
  639         return(ALIST_BLOCK_NONE);
  640 }
  641 
  642 /*
  643  * Free allocated block from leaf bitmap
  644  */
  645 static void
  646 alst_leaf_free(almeta_t *scan, alist_blk_t blk, alist_blk_t count)
  647 {
  648         /*
  649          * free some data in this bitmap
  650          *
  651          * e.g.
  652          *      0000111111111110000
  653          *          \_________/\__/
  654          *              v        n
  655          */
  656         alist_blk_t n = blk & (ALIST_BMAP_RADIX - 1);
  657         alist_bmap_t mask;
  658 
  659         mask = ((alist_bmap_t)-1 << n) &
  660             ((alist_bmap_t)-1 >> (ALIST_BMAP_RADIX - count - n));
  661 
  662         if (scan->bm_bitmap & mask)
  663                 panic("alst_radix_free: freeing free block");
  664         scan->bm_bitmap |= mask;
  665 
  666         /*
  667          * We could probably do a better job here.  We are required to make
  668          * bighint at least as large as the biggest contiguous block of 
  669          * data.  If we just shoehorn it, a little extra overhead will
  670          * be incured on the next allocation (but only that one typically).
  671          */
  672         scan->bm_bighint = ALIST_BMAP_RADIX;
  673 }
  674 
  675 /*
  676  * Free allocated blocks from radix tree meta info.   This support routine
  677  * frees a range of blocks from the bitmap.  The range must be entirely
  678  * enclosed by this radix node.  If a meta node, we break the range down
  679  * recursively to free blocks in subnodes (which means that this code
  680  * can free an arbitrary range whereas the allocation code cannot allocate
  681  * an arbitrary range).
  682  */
  683 static void 
  684 alst_meta_free(almeta_t *scan, alist_blk_t freeBlk, alist_blk_t count,
  685                alist_blk_t radix, alist_blk_t skip, alist_blk_t blk)
  686 {
  687         alist_blk_t next_skip = ((u_int)skip / ALIST_META_RADIX);
  688         alist_bmap_t mask;
  689         alist_bmap_t pmask;
  690         alist_blk_t i;
  691 
  692         /*
  693          * Break the free down into its components.  Because it is so easy
  694          * to implement, frees are not limited to power-of-2 sizes.
  695          *
  696          * Each block in a meta-node bitmap takes two bits.
  697          */
  698         radix /= ALIST_META_RADIX;
  699 
  700         i = (freeBlk - blk) / radix;
  701         blk += i * radix;
  702         mask = 0x00000003 << (i * 2);
  703         pmask = 0x00000001 << (i * 2);
  704 
  705         i = i * next_skip + 1;
  706 
  707         while (i <= skip && blk < freeBlk + count) {
  708                 alist_blk_t v;
  709 
  710                 v = blk + radix - freeBlk;
  711                 if (v > count)
  712                         v = count;
  713 
  714                 if (scan->bm_bighint == (alist_blk_t)-1)
  715                         panic("alst_meta_free: freeing unexpected range");
  716 
  717                 /*
  718                  * WARNING on bighint updates.  When we free an element in
  719                  * a chunk if the chunk becomes wholely free it is possible
  720                  * that the whole node is now free, so bighint must be set
  721                  * to cover the whole node.  Otherwise address-specific
  722                  * allocations may fail.
  723                  *
  724                  * We don't bother figuring out how much of the node is
  725                  * actually free in this case.
  726                  */
  727                 if (freeBlk == blk && count >= radix) {
  728                         /*
  729                          * The area being freed covers the entire block,
  730                          * assert that we are marked all-allocated and
  731                          * then mark it all-free.
  732                          */
  733                         KKASSERT((scan->bm_bitmap & mask) == 0);
  734                         scan->bm_bitmap |= mask;
  735                         scan->bm_bighint = radix * ALIST_META_RADIX;
  736                 } else {
  737                         /*
  738                          * If we were previously marked all-allocated, fix-up
  739                          * the next layer so we can recurse down into it.
  740                          */
  741                         if ((scan->bm_bitmap & mask) == 0) {
  742                                 scan[i].bm_bitmap = (alist_bmap_t)0;
  743                                 scan[i].bm_bighint = 0;
  744                         } 
  745 
  746                         /*
  747                          * Recursion case, then either mark all-free or
  748                          * partially free.
  749                          */
  750                         if (next_skip == 1) {
  751                                 alst_leaf_free(&scan[i], freeBlk, v);
  752                         } else {
  753                                 alst_meta_free(&scan[i], freeBlk, v,
  754                                                radix, next_skip - 1, blk);
  755                         }
  756                         if (scan[i].bm_bitmap == (alist_bmap_t)-1) {
  757                                 scan->bm_bitmap |= mask;
  758                                 scan->bm_bighint = radix * ALIST_META_RADIX;
  759                         } else {
  760                                 scan->bm_bitmap &= ~mask;
  761                                 scan->bm_bitmap |= pmask;
  762                                 if (scan->bm_bighint < scan[i].bm_bighint)
  763                                     scan->bm_bighint = scan[i].bm_bighint;
  764                         }
  765                 }
  766                 mask <<= 2;
  767                 pmask <<= 2;
  768                 count -= v;
  769                 freeBlk += v;
  770                 blk += radix;
  771                 i += next_skip;
  772         }
  773 }
  774 
  775 /*
  776  * Initialize our meta structures and bitmaps and calculate the exact
  777  * amount of space required to manage 'count' blocks - this space may
  778  * be considerably less then the calculated radix due to the large
  779  * RADIX values we use.
  780  */
  781 static alist_blk_t
  782 alst_radix_init(almeta_t *scan, alist_blk_t blk, alist_blk_t radix,
  783                 alist_blk_t skip, alist_blk_t count)
  784 {
  785         alist_blk_t i;
  786         alist_blk_t next_skip;
  787         alist_bmap_t mask;
  788         alist_bmap_t pmask;
  789         alist_blk_t memindex;
  790 
  791         /*
  792          * Leaf node
  793          */
  794         if (radix == ALIST_BMAP_RADIX) {
  795                 if (scan) {
  796                         scan->bm_bighint = 0;
  797                         scan->bm_bitmap = 0;
  798                 }
  799                 return(0);
  800         }
  801 
  802         /*
  803          * Meta node.  If allocating the entire object we can special
  804          * case it.  However, we need to figure out how much memory
  805          * is required to manage 'count' blocks, so we continue on anyway.
  806          */
  807         if (scan) {
  808                 scan->bm_bighint = 0;
  809                 scan->bm_bitmap = 0;
  810         }
  811         memindex = 0;
  812 
  813         radix /= ALIST_META_RADIX;
  814         next_skip = skip / ALIST_META_RADIX;
  815         mask = 0x00000003;
  816         pmask = 0x00000001;
  817 
  818         for (i = 1; i <= skip; i += next_skip) {
  819                 if (count >= blk + radix) {
  820                         /*
  821                          * Allocate the entire object
  822                          */
  823                         memindex += alst_radix_init(((scan) ? &scan[i] : NULL),
  824                                                     blk, radix,
  825                                                     next_skip - 1, count);
  826                         /* already marked as wholely allocated */
  827                 } else if (count > blk) {
  828                         /*
  829                          * Allocate a partial object, well it's really
  830                          * all-allocated, we just aren't allowed to
  831                          * free the whole thing.
  832                          */
  833                         memindex += alst_radix_init(((scan) ? &scan[i] : NULL),
  834                                                     blk, radix,
  835                                                     next_skip - 1, count);
  836                         /* already marked as wholely allocated */
  837                 } else {
  838                         /*
  839                          * Add terminator but continue the loop.  Populate
  840                          * all terminators.
  841                          */
  842                         if (scan) {
  843                                 scan[i].bm_bighint = (alist_blk_t)-1;
  844                                 scan[i].bm_bitmap = 0;
  845                         }
  846                         /* already marked as wholely allocated */
  847                 }
  848                 mask <<= 2;
  849                 pmask <<= 2;
  850                 blk += radix;
  851         }
  852         memindex += ALIST_META_RADIX;
  853         return(memindex);
  854 }
  855 
  856 #ifdef ALIST_DEBUG
  857 
  858 static void     
  859 alst_radix_print(almeta_t *scan, alist_blk_t blk, alist_blk_t radix,
  860                  alist_blk_t skip, int tab)
  861 {
  862         alist_blk_t i;
  863         alist_blk_t next_skip;
  864         alist_bmap_t mask;
  865 
  866         if (radix == ALIST_BMAP_RADIX) {
  867                 kprintf(
  868                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
  869                     tab, tab, "",
  870                     blk, radix,
  871                     scan->bm_bitmap,
  872                     scan->bm_bighint
  873                 );
  874                 return;
  875         }
  876 
  877         if (scan->bm_bitmap == 0) {
  878                 kprintf(
  879                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
  880                     tab, tab, "",
  881                     blk,
  882                     radix
  883                 );
  884                 return;
  885         }
  886         if (scan->bm_bitmap == (alist_bmap_t)-1) {
  887                 kprintf(
  888                     "%*.*s(%04x,%d) ALL FREE\n",
  889                     tab, tab, "",
  890                     blk,
  891                     radix
  892                 );
  893                 return;
  894         }
  895 
  896         kprintf(
  897             "%*.*s(%04x,%d): subtree (%d) bitmap=%08x big=%d {\n",
  898             tab, tab, "",
  899             blk, radix,
  900             radix,
  901             scan->bm_bitmap,
  902             scan->bm_bighint
  903         );
  904 
  905         radix /= ALIST_META_RADIX;
  906         next_skip = skip / ALIST_META_RADIX;
  907         tab += 4;
  908         mask = 0x00000003;
  909 
  910         for (i = 1; i <= skip; i += next_skip) {
  911                 if (scan[i].bm_bighint == (alist_blk_t)-1) {
  912                         kprintf(
  913                             "%*.*s(%04x,%d): Terminator\n",
  914                             tab, tab, "",
  915                             blk, radix
  916                         );
  917                         break;
  918                 }
  919                 if ((scan->bm_bitmap & mask) == mask) {
  920                         kprintf(
  921                             "%*.*s(%04x,%d): ALL FREE\n",
  922                             tab, tab, "",
  923                             blk, radix
  924                         );
  925                 } else if ((scan->bm_bitmap & mask) == 0) {
  926                         kprintf(
  927                             "%*.*s(%04x,%d): ALL ALLOCATED\n",
  928                             tab, tab, "",
  929                             blk, radix
  930                         );
  931                 } else {
  932                         alst_radix_print(
  933                             &scan[i],
  934                             blk,
  935                             radix,
  936                             next_skip - 1,
  937                             tab
  938                         );
  939                 }
  940                 blk += radix;
  941                 mask <<= 2;
  942         }
  943         tab -= 4;
  944 
  945         kprintf("%*.*s}\n", tab, tab, "");
  946 }
  947 
  948 #endif
  949 
  950 #ifdef ALIST_DEBUG
  951 
  952 int
  953 main(int ac, char **av)
  954 {
  955         alist_blk_t size = 1024;
  956         alist_blk_t da = 0;
  957         alist_blk_t count = 0;
  958         alist_t bl;
  959         int i;
  960 
  961         for (i = 1; i < ac; ++i) {
  962                 const char *ptr = av[i];
  963                 if (*ptr != '-') {
  964                         size = strtol(ptr, NULL, 0);
  965                         continue;
  966                 }
  967                 ptr += 2;
  968                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
  969                 exit(1);
  970         }
  971         bl = alist_create(size, NULL);
  972         alist_free(bl, 0, size);
  973 
  974         for (;;) {
  975                 char buf[1024];
  976                 alist_blk_t bfree;
  977 
  978 
  979                 kprintf("%d/%d/%d> ", bl->bl_free, size, bl->bl_radix);
  980                 fflush(stdout);
  981                 if (fgets(buf, sizeof(buf), stdin) == NULL)
  982                         break;
  983                 switch(buf[0]) {
  984                 case 'p':
  985                         alist_print(bl);
  986                         break;
  987                 case 'a':
  988                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
  989                                 da = alist_alloc(bl, da, count);
  990                                 kprintf("    R=%04x\n", da);
  991                         } else if (sscanf(buf + 1, "%d", &count) == 1) {
  992                                 da =  alist_alloc(bl, 0, count);
  993                                 kprintf("    R=%04x\n", da);
  994                         } else if (count) {
  995                                 kprintf("alloc 0x%04x/%d\n", da, count);
  996                                 alist_blk_t blk = alist_alloc(bl, da, count);
  997                                 kprintf("    R=%04x\n", blk);
  998                         } else {
  999                                 kprintf("?\n");
 1000                         }
 1001                         break;
 1002                 case 'f':
 1003                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
 1004                                 alist_free(bl, da, count);
 1005                         } else if (count) {
 1006                                 kprintf("free 0x%04x/%d\n", da, count);
 1007                                 alist_free(bl, da, count);
 1008                         } else {
 1009                                 kprintf("?\n");
 1010                         }
 1011                         break;
 1012                 case '?':
 1013                 case 'h':
 1014                         puts("p          -print\n"
 1015                              "a %d       -allocate\n"
 1016                              "f %x %d    -free\n"
 1017                              "h/?        -help");
 1018                         break;
 1019                 case 'i':
 1020                         bfree = alist_free_info(bl, &da, &count);
 1021                         kprintf("info: %d free trailing: 0x%04x/%d\n",
 1022                                 bfree, da, count);
 1023                         break;
 1024                 default:
 1025                         kprintf("?\n");
 1026                         break;
 1027                 }
 1028         }
 1029         return(0);
 1030 }
 1031 
 1032 void
 1033 panic(const char *ctl, ...)
 1034 {
 1035         __va_list va;
 1036 
 1037         __va_start(va, ctl);
 1038         vfprintf(stderr, ctl, va);
 1039         fprintf(stderr, "\n");
 1040         __va_end(va);
 1041         exit(1);
 1042 }
 1043 
 1044 #endif

Cache object: e85c9f07fd1e2de3dcc189e37fd842b2


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.