]> git.llucax.com Git - software/dgc/cdgc.git/blobdiff - rt/gc/cdgc/gc.d
opts: Fix parsing a single boolean option without args
[software/dgc/cdgc.git] / rt / gc / cdgc / gc.d
index bb137f1b7cb984daafc4b7bfa126153b7741317a..f5e088c54cd04351d0de17f8b0c26d9f0bab453f 100644 (file)
@@ -43,7 +43,7 @@ version = STACKGROWSDOWN;       // growing the stack means subtracting from the
 /***************************************************/
 
 import rt.gc.cdgc.bits: GCBits;
-import rt.gc.cdgc.stats: GCStats;
+import rt.gc.cdgc.stats: GCStats, Stats;
 import rt.gc.cdgc.dynarray: DynArray;
 import alloc = rt.gc.cdgc.alloc;
 import opts = rt.gc.cdgc.opts;
@@ -84,15 +84,16 @@ struct BlkInfo
     uint   attr;
 }
 
+package enum BlkAttr : uint
+{
+    FINALIZE = 0b0000_0001,
+    NO_SCAN  = 0b0000_0010,
+    NO_MOVE  = 0b0000_0100,
+    ALL_BITS = 0b1111_1111
+}
+
 private
 {
-    enum BlkAttr : uint
-    {
-        FINALIZE = 0b0000_0001,
-        NO_SCAN  = 0b0000_0010,
-        NO_MOVE  = 0b0000_0100,
-        ALL_BITS = 0b1111_1111
-    }
 
     extern (C) void* rt_stackBottom();
     extern (C) void* rt_stackTop();
@@ -135,6 +136,8 @@ class GCLock { }                // just a dummy so we can get a global lock
 const uint GCVERSION = 1;       // increment every time we change interface
                                 // to GC.
 
+Stats stats;
+
 class GC
 {
     // For passing to debug code
@@ -156,6 +159,7 @@ class GC
             onOutOfMemoryError();
         gcx.initialize();
         setStackBottom(rt_stackBottom());
+        stats = Stats(this);
     }
 
 
@@ -206,15 +210,15 @@ class GC
         uint go()
         {
             Pool* pool = gcx.findPool(p);
-            uint  oldb = 0;
+            uint  old_attrs = 0;
 
             if (pool)
             {
-                auto biti = cast(size_t)(p - pool.baseAddr) / 16;
+                auto bit_i = cast(size_t)(p - pool.baseAddr) / 16;
 
-                oldb = gcx.getBits(pool, biti);
+                old_attrs = gcx.getAttr(pool, bit_i);
             }
-            return oldb;
+            return old_attrs;
         }
 
         if (!thread_needLock())
@@ -241,16 +245,16 @@ class GC
         uint go()
         {
             Pool* pool = gcx.findPool(p);
-            uint  oldb = 0;
+            uint  old_attrs = 0;
 
             if (pool)
             {
-                auto biti = cast(size_t)(p - pool.baseAddr) / 16;
+                auto bit_i = cast(size_t)(p - pool.baseAddr) / 16;
 
-                oldb = gcx.getBits(pool, biti);
-                gcx.setBits(pool, biti, mask);
+                old_attrs = gcx.getAttr(pool, bit_i);
+                gcx.setAttr(pool, bit_i, mask);
             }
-            return oldb;
+            return old_attrs;
         }
 
         if (!thread_needLock())
@@ -277,16 +281,16 @@ class GC
         uint go()
         {
             Pool* pool = gcx.findPool(p);
-            uint  oldb = 0;
+            uint  old_attrs = 0;
 
             if (pool)
             {
-                auto biti = cast(size_t)(p - pool.baseAddr) / 16;
+                auto bit_i = cast(size_t)(p - pool.baseAddr) / 16;
 
-                oldb = gcx.getBits(pool, biti);
-                gcx.clrBits(pool, biti, mask);
+                old_attrs = gcx.getAttr(pool, bit_i);
+                gcx.clrAttr(pool, bit_i, mask);
             }
-            return oldb;
+            return old_attrs;
         }
 
         if (!thread_needLock())
@@ -303,7 +307,7 @@ class GC
     /**
      *
      */
-    void *malloc(size_t size, uint bits = 0)
+    void *malloc(size_t size, uint attrs, PointerMap ptrmap)
     {
         if (!size)
         {
@@ -312,11 +316,11 @@ class GC
 
         if (!thread_needLock())
         {
-            return mallocNoSync(size, bits);
+            return mallocNoSync(size, attrs, ptrmap.bits.ptr);
         }
         else synchronized (gcLock)
         {
-            return mallocNoSync(size, bits);
+            return mallocNoSync(size, attrs, ptrmap.bits.ptr);
         }
     }
 
@@ -324,10 +328,14 @@ class GC
     //
     //
     //
-    private void *mallocNoSync(size_t size, uint bits = 0)
+    private void *mallocNoSync(size_t size, uint attrs, size_t* pm_bitmask)
     {
         assert(size != 0);
 
+        stats.malloc_started(size, attrs, pm_bitmask);
+        scope (exit)
+            stats.malloc_finished(p);
+
         void *p = null;
         Bins bin;
 
@@ -336,6 +344,12 @@ class GC
         if (opts.options.sentinel)
             size += SENTINEL_EXTRA;
 
+        bool has_pm = !(attrs & BlkAttr.NO_SCAN);
+        size_t pm_bitmask_size;
+        if (has_pm)
+            pm_bitmask_size = (size_t*).sizeof;
+        size += pm_bitmask_size;
+
         // Compute size bin
         // Cache previous binsize lookup - Dave Fladebo.
         static size_t lastsize = -1;
@@ -349,6 +363,7 @@ class GC
             lastbin = bin;
         }
 
+        size_t capacity; // to figure out where to store the bitmask
         if (bin < B_PAGE)
         {
             p = gcx.bucket[bin];
@@ -381,11 +396,12 @@ class GC
                 }
                 p = gcx.bucket[bin];
             }
+            capacity = binsize[bin];
 
             // Return next item from free list
             gcx.bucket[bin] = (cast(List*)p).next;
-            if( !(bits & BlkAttr.NO_SCAN) )
-                memset(p + size, 0, binsize[bin] - size);
+            if( !(attrs & BlkAttr.NO_SCAN) )
+                memset(p + size, 0, capacity - size);
             if (opts.options.mem_stomp)
                 memset(p, 0xF0, size);
         }
@@ -394,19 +410,31 @@ class GC
             p = gcx.bigAlloc(size);
             if (!p)
                 onOutOfMemoryError();
+            // Round the size up to the number of pages needed to store it
+            size_t npages = (size + PAGESIZE - 1) / PAGESIZE;
+            capacity = npages * PAGESIZE;
         }
+
+        // Store the bit mask AFTER SENTINEL_POST
+        // TODO: store it BEFORE, so the bitmask is protected too
+        if (has_pm) {
+            auto end_of_blk = cast(size_t**)(p + capacity - pm_bitmask_size);
+            *end_of_blk = pm_bitmask;
+            size -= pm_bitmask_size;
+        }
+
         if (opts.options.sentinel) {
             size -= SENTINEL_EXTRA;
             p = sentinel_add(p);
             sentinel_init(p, size);
         }
 
-        if (bits)
+        if (attrs)
         {
             Pool *pool = gcx.findPool(p);
             assert(pool);
 
-            gcx.setBits(pool, cast(size_t)(p - pool.baseAddr) / 16, bits);
+            gcx.setAttr(pool, cast(size_t)(p - pool.baseAddr) / 16, attrs);
         }
         return p;
     }
@@ -415,7 +443,7 @@ class GC
     /**
      *
      */
-    void *calloc(size_t size, uint bits = 0)
+    void *calloc(size_t size, uint attrs, PointerMap ptrmap)
     {
         if (!size)
         {
@@ -424,11 +452,11 @@ class GC
 
         if (!thread_needLock())
         {
-            return callocNoSync(size, bits);
+            return callocNoSync(size, attrs, ptrmap.bits.ptr);
         }
         else synchronized (gcLock)
         {
-            return callocNoSync(size, bits);
+            return callocNoSync(size, attrs, ptrmap.bits.ptr);
         }
     }
 
@@ -436,11 +464,11 @@ class GC
     //
     //
     //
-    private void *callocNoSync(size_t size, uint bits = 0)
+    private void *callocNoSync(size_t size, uint attrs, size_t* pm_bitmask)
     {
         assert(size != 0);
 
-        void *p = mallocNoSync(size, bits);
+        void *p = mallocNoSync(size, attrs, pm_bitmask);
         memset(p, 0, size);
         return p;
     }
@@ -449,15 +477,15 @@ class GC
     /**
      *
      */
-    void *realloc(void *p, size_t size, uint bits = 0)
+    void *realloc(void *p, size_t size, uint attrs, PointerMap ptrmap)
     {
         if (!thread_needLock())
         {
-            return reallocNoSync(p, size, bits);
+            return reallocNoSync(p, size, attrs, ptrmap.bits.ptr);
         }
         else synchronized (gcLock)
         {
-            return reallocNoSync(p, size, bits);
+            return reallocNoSync(p, size, attrs, ptrmap.bits.ptr);
         }
     }
 
@@ -465,7 +493,8 @@ class GC
     //
     //
     //
-    private void *reallocNoSync(void *p, size_t size, uint bits = 0)
+    private void *reallocNoSync(void *p, size_t size, uint attrs,
+            size_t* pm_bitmask)
     {
         if (!size)
         {
@@ -477,56 +506,60 @@ class GC
         }
         else if (!p)
         {
-            p = mallocNoSync(size, bits);
+            p = mallocNoSync(size, attrs, pm_bitmask);
         }
         else
         {
-            void *p2;
-            size_t psize;
+            Pool* pool = gcx.findPool(p);
+            if (pool is null)
+                return null;
+
+            // Set or retrieve attributes as appropriate
+            auto bit_i = cast(size_t)(p - pool.baseAddr) / 16;
+            if (attrs) {
+                gcx.clrAttr(pool, bit_i, BlkAttr.ALL_BITS);
+                gcx.setAttr(pool, bit_i, attrs);
+            }
+            else
+                attrs = gcx.getAttr(pool, bit_i);
+
+            void* blk_base_addr = gcx.findBase(p);
+            size_t blk_size = gcx.findSize(p);
+            bool has_pm = !(attrs & BlkAttr.NO_SCAN);
+            size_t pm_bitmask_size = 0;
+            if (has_pm) {
+                pm_bitmask_size = (size_t*).sizeof;
+                // Retrieve pointer map bit mask if appropriate
+                if (pm_bitmask is null) {
+                    auto end_of_blk = cast(size_t**)(blk_base_addr +
+                            blk_size - pm_bitmask_size);
+                    pm_bitmask = *end_of_blk;
+                }
+            }
 
             if (opts.options.sentinel)
             {
                 sentinel_Invariant(p);
-                psize = *sentinel_size(p);
-                if (psize != size)
+                size_t sentinel_stored_size = *sentinel_size(p);
+                if (sentinel_stored_size != size)
                 {
-                    if (psize)
-                    {
-                        Pool *pool = gcx.findPool(p);
-
-                        if (pool)
-                        {
-                            auto biti = cast(size_t)(p - pool.baseAddr) / 16;
-
-                            if (bits)
-                            {
-                                gcx.clrBits(pool, biti, BlkAttr.ALL_BITS);
-                                gcx.setBits(pool, biti, bits);
-                            }
-                            else
-                            {
-                                bits = gcx.getBits(pool, biti);
-                            }
-                        }
-                    }
-                    p2 = mallocNoSync(size, bits);
-                    if (psize < size)
-                        size = psize;
+                    void* p2 = mallocNoSync(size, attrs, pm_bitmask);
+                    if (sentinel_stored_size < size)
+                        size = sentinel_stored_size;
                     cstring.memcpy(p2, p, size);
                     p = p2;
                 }
             }
             else
             {
-                psize = gcx.findSize(p);        // find allocated size
-                if (psize >= PAGESIZE && size >= PAGESIZE)
+                size += pm_bitmask_size;
+                if (blk_size >= PAGESIZE && size >= PAGESIZE)
                 {
-                    auto psz = psize / PAGESIZE;
+                    auto psz = blk_size / PAGESIZE;
                     auto newsz = (size + PAGESIZE - 1) / PAGESIZE;
                     if (newsz == psz)
                         return p;
 
-                    auto pool = gcx.findPool(p);
                     auto pagenum = (p - pool.baseAddr) / PAGESIZE;
 
                     if (newsz < psz)
@@ -535,9 +568,16 @@ class GC
                         synchronized (gcLock)
                         {
                             if (opts.options.mem_stomp)
-                                memset(p + size, 0xF2, psize - size);
+                                memset(p + size - pm_bitmask_size, 0xF2,
+                                        blk_size - size - pm_bitmask_size);
                             pool.freePages(pagenum + newsz, psz - newsz);
                         }
+                        if (has_pm) {
+                            auto end_of_blk = cast(size_t**)(
+                                    blk_base_addr + (PAGESIZE * newsz) -
+                                    pm_bitmask_size);
+                            *end_of_blk = pm_bitmask;
+                        }
                         return p;
                     }
                     else if (pagenum + newsz <= pool.npages)
@@ -550,9 +590,18 @@ class GC
                                 if (i == pagenum + newsz)
                                 {
                                     if (opts.options.mem_stomp)
-                                        memset(p + psize, 0xF0, size - psize);
+                                        memset(p + blk_size - pm_bitmask_size,
+                                                0xF0, size - blk_size
+                                                - pm_bitmask_size);
                                     memset(pool.pagetable + pagenum +
                                             psz, B_PAGEPLUS, newsz - psz);
+                                    if (has_pm) {
+                                        auto end_of_blk = cast(size_t**)(
+                                                blk_base_addr +
+                                                (PAGESIZE * newsz) -
+                                                pm_bitmask_size);
+                                        *end_of_blk = pm_bitmask;
+                                    }
                                     return p;
                                 }
                                 if (i == pool.npages)
@@ -566,31 +615,14 @@ class GC
                         }
                     }
                 }
-                if (psize < size ||             // if new size is bigger
-                    psize > size * 2)           // or less than half
+                // if new size is bigger or less than half
+                if (blk_size < size || blk_size > size * 2)
                 {
-                    if (psize)
-                    {
-                        Pool *pool = gcx.findPool(p);
-
-                        if (pool)
-                        {
-                            auto biti = cast(size_t)(p - pool.baseAddr) / 16;
-
-                            if (bits)
-                            {
-                                gcx.clrBits(pool, biti, BlkAttr.ALL_BITS);
-                                gcx.setBits(pool, biti, bits);
-                            }
-                            else
-                            {
-                                bits = gcx.getBits(pool, biti);
-                            }
-                        }
-                    }
-                    p2 = mallocNoSync(size, bits);
-                    if (psize < size)
-                        size = psize;
+                    size -= pm_bitmask_size;
+                    blk_size -= pm_bitmask_size;
+                    void* p2 = mallocNoSync(size, attrs, pm_bitmask);
+                    if (blk_size < size)
+                        size = blk_size;
                     cstring.memcpy(p2, p, size);
                     p = p2;
                 }
@@ -633,18 +665,39 @@ class GC
     body
     {
         if (opts.options.sentinel)
-        {
             return 0;
+
+        Pool* pool = gcx.findPool(p);
+        if (pool is null)
+            return 0;
+
+        // Retrieve attributes
+        auto bit_i = cast(size_t)(p - pool.baseAddr) / 16;
+        uint attrs = gcx.getAttr(pool, bit_i);
+
+        void* blk_base_addr = gcx.findBase(p);
+        size_t blk_size = gcx.findSize(p);
+        bool has_pm = !(attrs & BlkAttr.NO_SCAN);
+        size_t* pm_bitmask = null;
+        size_t pm_bitmask_size = 0;
+        if (has_pm) {
+            pm_bitmask_size = (size_t*).sizeof;
+            // Retrieve pointer map bit mask
+            auto end_of_blk = cast(size_t**)(blk_base_addr +
+                    blk_size - pm_bitmask_size);
+            pm_bitmask = *end_of_blk;
         }
-        auto psize = gcx.findSize(p);   // find allocated size
-        if (psize < PAGESIZE)
-            return 0;                   // cannot extend buckets
 
-        auto psz = psize / PAGESIZE;
+        if (blk_size < PAGESIZE)
+            return 0; // cannot extend buckets
+
+        minsize += pm_bitmask_size;
+        maxsize += pm_bitmask_size;
+
+        auto psz = blk_size / PAGESIZE;
         auto minsz = (minsize + PAGESIZE - 1) / PAGESIZE;
         auto maxsz = (maxsize + PAGESIZE - 1) / PAGESIZE;
 
-        auto pool = gcx.findPool(p);
         auto pagenum = (p - pool.baseAddr) / PAGESIZE;
 
         size_t sz;
@@ -662,12 +715,22 @@ class GC
         }
         if (sz < minsz)
             return 0;
+
+        size_t new_size = (psz + sz) * PAGESIZE;
+
         if (opts.options.mem_stomp)
-            memset(p + psize, 0xF0, (psz + sz) * PAGESIZE - psize);
+            memset(p + blk_size - pm_bitmask_size, 0xF0,
+                    new_size - blk_size - pm_bitmask_size);
         memset(pool.pagetable + pagenum + psz, B_PAGEPLUS, sz);
         gcx.p_cache = null;
         gcx.size_cache = 0;
-        return (psz + sz) * PAGESIZE;
+
+        if (has_pm) {
+            new_size -= pm_bitmask_size;
+            auto end_of_blk = cast(size_t**)(blk_base_addr + new_size);
+            *end_of_blk = pm_bitmask;
+        }
+        return new_size;
     }
 
 
@@ -735,7 +798,7 @@ class GC
         Pool*  pool;
         size_t pagenum;
         Bins   bin;
-        size_t biti;
+        size_t bit_i;
 
         // Find which page it is in
         pool = gcx.findPool(p);
@@ -746,8 +809,8 @@ class GC
             p = sentinel_sub(p);
         }
         pagenum = cast(size_t)(p - pool.baseAddr) / PAGESIZE;
-        biti = cast(size_t)(p - pool.baseAddr) / 16;
-        gcx.clrBits(pool, biti, BlkAttr.ALL_BITS);
+        bit_i = cast(size_t)(p - pool.baseAddr) / 16;
+        gcx.clrAttr(pool, bit_i, BlkAttr.ALL_BITS);
 
         bin = cast(Bins)pool.pagetable[pagenum];
         if (bin == B_PAGE)              // if large alloc
@@ -841,38 +904,45 @@ class GC
         assert (p);
 
         if (opts.options.sentinel)
-        {
             p = sentinel_sub(p);
-            size_t size = gcx.findSize(p);
 
+        Pool* pool = gcx.findPool(p);
+        if (pool is null)
+            return 0;
+
+        auto biti = cast(size_t)(p - pool.baseAddr) / 16;
+        uint attrs = gcx.getAttr(pool, biti);
+
+        size_t size = gcx.findSize(p);
+        bool has_pm = !(attrs & BlkAttr.NO_SCAN);
+        size_t pm_bitmask_size = 0;
+        if (has_pm)
+            pm_bitmask_size = (size_t*).sizeof;
+
+        if (opts.options.sentinel) {
             // Check for interior pointer
             // This depends on:
             // 1) size is a power of 2 for less than PAGESIZE values
             // 2) base of memory pool is aligned on PAGESIZE boundary
             if (cast(size_t)p & (size - 1) & (PAGESIZE - 1))
-                size = 0;
-            return size ? size - SENTINEL_EXTRA : 0;
+                return 0;
+            return size - SENTINEL_EXTRA - pm_bitmask_size;
         }
-        else
-        {
+        else {
             if (p == gcx.p_cache)
                 return gcx.size_cache;
 
-            size_t size = gcx.findSize(p);
-
             // Check for interior pointer
             // This depends on:
             // 1) size is a power of 2 for less than PAGESIZE values
             // 2) base of memory pool is aligned on PAGESIZE boundary
             if (cast(size_t)p & (size - 1) & (PAGESIZE - 1))
-                size = 0;
-            else
-            {
-                gcx.p_cache = p;
-                gcx.size_cache = size;
-            }
+                return 0;
+
+            gcx.p_cache = p;
+            gcx.size_cache = size - pm_bitmask_size;
 
-            return size;
+            return gcx.size_cache;
         }
     }
 
@@ -1602,10 +1672,12 @@ struct Gcx
             }
 
             ////////////////////////////////////////////////////////////////////
-            // getBits
+            // getAttr
             ////////////////////////////////////////////////////////////////////
 
-            info.attr = getBits(pool, cast(size_t)(offset / 16));
+            info.attr = getAttr(pool, cast(size_t)(offset / 16));
+            if (!(info.attr & BlkAttr.NO_SCAN))
+                info.size -= (size_t*).sizeof;  // bitmask
         }
         return info;
     }
@@ -1878,38 +1950,61 @@ struct Gcx
     }
 
 
+    /**
+     * Marks a range of memory using the conservative bit mask.  Used for
+     * the stack, for the data segment, and additional memory ranges.
+     */
+    void mark_conservative(void* pbot, void* ptop)
+    {
+        mark(pbot, ptop, PointerMap.init.bits.ptr);
+    }
+
+
     /**
      * Search a range of memory values and mark any pointers into the GC pool.
      */
-    void mark(void *pbot, void *ptop)
+    void mark(void *pbot, void *ptop, size_t* pm_bitmask)
     {
+        const BITS_PER_WORD = size_t.sizeof * 8;
+
         void **p1 = cast(void **)pbot;
         void **p2 = cast(void **)ptop;
         size_t pcache = 0;
         uint changes = 0;
 
+        // TODO: add option to be conservative
+        // force conservative scanning
+        //pm_bitmask = PointerMap.init.bits.ptr;
+
+        size_t type_size = pm_bitmask[0];
+        size_t* pm_bits = pm_bitmask + 1;
+
         //printf("marking range: %p -> %p\n", pbot, ptop);
-        for (; p1 < p2; p1++)
-        {
-            Pool *pool;
-            byte *p = cast(byte *)(*p1);
+        for (; p1 + type_size <= p2; p1 += type_size) {
+            for (size_t n = 0; n < type_size; n++) {
+                // scan bit set for this word
+                if (!(pm_bits[n / BITS_PER_WORD] & (1 << (n % BITS_PER_WORD))))
+                    continue;
+
+                void* p = *(p1 + n);
+
+                if (p < minAddr || p >= maxAddr)
+                    continue;
 
-            if (p >= minAddr && p < maxAddr)
-            {
                 if ((cast(size_t)p & ~(PAGESIZE-1)) == pcache)
                     continue;
 
-                pool = findPool(p);
+                Pool* pool = findPool(p);
                 if (pool)
                 {
                     size_t offset = cast(size_t)(p - pool.baseAddr);
-                    size_t biti;
+                    size_t bit_i;
                     size_t pn = offset / PAGESIZE;
                     Bins   bin = cast(Bins)pool.pagetable[pn];
 
                     // Adjust bit to be at start of allocated memory block
                     if (bin <= B_PAGE)
-                        biti = (offset & notbinsize[bin]) >> 4;
+                        bit_i = (offset & notbinsize[bin]) >> 4;
                     else if (bin == B_PAGEPLUS)
                     {
                         do
@@ -1917,7 +2012,7 @@ struct Gcx
                             --pn;
                         }
                         while (cast(Bins)pool.pagetable[pn] == B_PAGEPLUS);
-                        biti = pn * (PAGESIZE / 16);
+                        bit_i = pn * (PAGESIZE / 16);
                     }
                     else
                     {
@@ -1928,12 +2023,12 @@ struct Gcx
                     if (bin >= B_PAGE) // Cache B_PAGE and B_PAGEPLUS lookups
                         pcache = cast(size_t)p & ~(PAGESIZE-1);
 
-                    if (!pool.mark.test(biti))
+                    if (!pool.mark.test(bit_i))
                     {
-                        pool.mark.set(biti);
-                        if (!pool.noscan.test(biti))
+                        pool.mark.set(bit_i);
+                        if (!pool.noscan.test(bit_i))
                         {
-                            pool.scan.set(biti);
+                            pool.scan.set(bit_i);
                             changes = 1;
                         }
                     }
@@ -1943,12 +2038,15 @@ struct Gcx
         anychanges |= changes;
     }
 
-
     /**
      * Return number of full pages free'd.
      */
     size_t fullcollectshell()
     {
+        stats.collection_started();
+        scope (exit)
+            stats.collection_finished();
+
         // The purpose of the 'shell' is to ensure all the registers
         // get put on the stack so they'll be scanned
         void *sp;
@@ -2042,6 +2140,7 @@ struct Gcx
         debug(COLLECT_PRINTF) printf("Gcx.fullcollect()\n");
 
         thread_suspendAll();
+        stats.world_stopped();
 
         p_cache = null;
         size_cache = 0;
@@ -2072,17 +2171,17 @@ struct Gcx
             pool.mark.copy(&pool.freebits);
         }
 
-        rt_scanStaticData( &mark );
+        rt_scanStaticData( &mark_conservative );
 
         if (!noStack)
         {
             // Scan stacks and registers for each paused thread
-            thread_scanAll( &mark, stackTop );
+            thread_scanAll( &mark_conservative, stackTop );
         }
 
         // Scan roots
         debug(COLLECT_PRINTF) printf("scan roots[]\n");
-        mark(roots.ptr, roots.ptr + roots.length);
+        mark_conservative(roots.ptr, roots.ptr + roots.length);
 
         // Scan ranges
         debug(COLLECT_PRINTF) printf("scan ranges[]\n");
@@ -2090,7 +2189,7 @@ struct Gcx
         for (n = 0; n < ranges.length; n++)
         {
             debug(COLLECT_PRINTF) printf("\t%x .. %x\n", ranges[n].pbot, ranges[n].ptop);
-            mark(ranges[n].pbot, ranges[n].ptop);
+            mark_conservative(ranges[n].pbot, ranges[n].ptop);
         }
         //log--;
 
@@ -2137,9 +2236,11 @@ struct Gcx
 
                         pn = cast(size_t)(o - pool.baseAddr) / PAGESIZE;
                         bin = cast(Bins)pool.pagetable[pn];
-                        if (bin < B_PAGE)
-                        {
-                            mark(o, o + binsize[bin]);
+                        if (bin < B_PAGE) {
+                            auto end_of_blk = cast(size_t**)(o + binsize[bin] -
+                                    (size_t*).sizeof);
+                            size_t* pm_bitmask = *end_of_blk;
+                            mark(o, end_of_blk, pm_bitmask);
                         }
                         else if (bin == B_PAGE || bin == B_PAGEPLUS)
                         {
@@ -2149,9 +2250,15 @@ struct Gcx
                                     pn--;
                             }
                             u = 1;
-                            while (pn + u < pool.npages && pool.pagetable[pn + u] == B_PAGEPLUS)
+                            while (pn + u < pool.npages &&
+                                    pool.pagetable[pn + u] == B_PAGEPLUS)
                                 u++;
-                            mark(o, o + u * PAGESIZE);
+
+                            size_t blk_size = u * PAGESIZE;
+                            auto end_of_blk = cast(size_t**)(o + blk_size -
+                                    (size_t*).sizeof);
+                            size_t* pm_bitmask = *end_of_blk;
+                            mark(o, end_of_blk, pm_bitmask);
                         }
                     }
                 }
@@ -2159,6 +2266,7 @@ struct Gcx
         }
 
         thread_resumeAll();
+        stats.world_started();
 
         // Free up everything not marked
         debug(COLLECT_PRINTF) printf("\tfree'ing\n");
@@ -2178,24 +2286,25 @@ struct Gcx
                     auto size = binsize[bin];
                     byte* p = pool.baseAddr + pn * PAGESIZE;
                     byte* ptop = p + PAGESIZE;
-                    size_t biti = pn * (PAGESIZE/16);
-                    size_t bitstride = size / 16;
+                    size_t bit_i = pn * (PAGESIZE/16);
+                    size_t bit_stride = size / 16;
 
     version(none) // BUG: doesn't work because freebits() must also be cleared
     {
                     // If free'd entire page
-                    if (bbase[0] == 0 && bbase[1] == 0 && bbase[2] == 0 && bbase[3] == 0 &&
-                        bbase[4] == 0 && bbase[5] == 0 && bbase[6] == 0 && bbase[7] == 0)
+                    if (bbase[0] == 0 && bbase[1] == 0 && bbase[2] == 0 &&
+                            bbase[3] == 0 && bbase[4] == 0 && bbase[5] == 0 &&
+                            bbase[6] == 0 && bbase[7] == 0)
                     {
-                        for (; p < ptop; p += size, biti += bitstride)
+                        for (; p < ptop; p += size, bit_i += bit_stride)
                         {
-                            if (pool.finals.nbits && pool.finals.testClear(biti)) {
+                            if (pool.finals.nbits && pool.finals.testClear(bit_i)) {
                                 if (opts.options.sentinel)
                                     rt_finalize(cast(List *)sentinel_add(p), false/*noStack > 0*/);
                                 else
                                     rt_finalize(cast(List *)p, false/*noStack > 0*/);
                             }
-                            gcx.clrBits(pool, biti, BlkAttr.ALL_BITS);
+                            gcx.clrAttr(pool, bit_i, BlkAttr.ALL_BITS);
 
                             List *list = cast(List *)p;
 
@@ -2207,21 +2316,21 @@ struct Gcx
                         continue;
                     }
     }
-                    for (; p < ptop; p += size, biti += bitstride)
+                    for (; p < ptop; p += size, bit_i += bit_stride)
                     {
-                        if (!pool.mark.test(biti))
+                        if (!pool.mark.test(bit_i))
                         {
                             if (opts.options.sentinel)
                                 sentinel_Invariant(sentinel_add(p));
 
-                            pool.freebits.set(biti);
-                            if (pool.finals.nbits && pool.finals.testClear(biti)) {
+                            pool.freebits.set(bit_i);
+                            if (pool.finals.nbits && pool.finals.testClear(bit_i)) {
                                 if (opts.options.sentinel)
                                     rt_finalize(cast(List *)sentinel_add(p), false/*noStack > 0*/);
                                 else
                                     rt_finalize(cast(List *)p, false/*noStack > 0*/);
                             }
-                            clrBits(pool, biti, BlkAttr.ALL_BITS);
+                            clrAttr(pool, bit_i, BlkAttr.ALL_BITS);
 
                             List *list = cast(List *)p;
 
@@ -2234,19 +2343,19 @@ struct Gcx
                 }
                 else if (bin == B_PAGE)
                 {
-                    size_t biti = pn * (PAGESIZE / 16);
-                    if (!pool.mark.test(biti))
+                    size_t bit_i = pn * (PAGESIZE / 16);
+                    if (!pool.mark.test(bit_i))
                     {
                         byte *p = pool.baseAddr + pn * PAGESIZE;
                         if (opts.options.sentinel)
                             sentinel_Invariant(sentinel_add(p));
-                        if (pool.finals.nbits && pool.finals.testClear(biti)) {
+                        if (pool.finals.nbits && pool.finals.testClear(bit_i)) {
                             if (opts.options.sentinel)
                                 rt_finalize(sentinel_add(p), false/*noStack > 0*/);
                             else
                                 rt_finalize(p, false/*noStack > 0*/);
                         }
-                        clrBits(pool, biti, BlkAttr.ALL_BITS);
+                        clrAttr(pool, bit_i, BlkAttr.ALL_BITS);
 
                         debug(COLLECT_PRINTF) printf("\tcollecting big %x\n", p);
                         pool.pagetable[pn] = B_FREE;
@@ -2282,21 +2391,21 @@ struct Gcx
             for (size_t pn = 0; pn < pool.npages; pn++)
             {
                 Bins   bin = cast(Bins)pool.pagetable[pn];
-                size_t biti;
+                size_t bit_i;
                 size_t u;
 
                 if (bin < B_PAGE)
                 {
                     size_t size = binsize[bin];
-                    size_t bitstride = size / 16;
-                    size_t bitbase = pn * (PAGESIZE / 16);
-                    size_t bittop = bitbase + (PAGESIZE / 16);
+                    size_t bit_stride = size / 16;
+                    size_t bit_base = pn * (PAGESIZE / 16);
+                    size_t bit_top = bit_base + (PAGESIZE / 16);
                     byte*  p;
 
-                    biti = bitbase;
-                    for (biti = bitbase; biti < bittop; biti += bitstride)
+                    bit_i = bit_base;
+                    for (; bit_i < bit_top; bit_i += bit_stride)
                     {
-                        if (!pool.freebits.test(biti))
+                        if (!pool.freebits.test(bit_i))
                             goto Lnotfree;
                     }
                     pool.pagetable[pn] = B_FREE;
@@ -2307,11 +2416,12 @@ struct Gcx
                     p = pool.baseAddr + pn * PAGESIZE;
                     for (u = 0; u < PAGESIZE; u += size)
                     {
-                        biti = bitbase + u / 16;
-                        if (pool.freebits.test(biti))
+                        bit_i = bit_base + u / 16;
+                        if (pool.freebits.test(bit_i))
                         {
                             List *list = cast(List *)(p + u);
-                            if (list.next != bucket[bin])       // avoid unnecessary writes
+                            // avoid unnecessary writes
+                            if (list.next != bucket[bin])
                                 list.next = bucket[bin];
                             bucket[bin] = list;
                         }
@@ -2330,31 +2440,31 @@ struct Gcx
     /**
      *
      */
-    uint getBits(Pool* pool, size_t biti)
+    uint getAttr(Pool* pool, size_t bit_i)
     in
     {
         assert( pool );
     }
     body
     {
-        uint bits;
+        uint attrs;
 
         if (pool.finals.nbits &&
-            pool.finals.test(biti))
-            bits |= BlkAttr.FINALIZE;
-        if (pool.noscan.test(biti))
-            bits |= BlkAttr.NO_SCAN;
+            pool.finals.test(bit_i))
+            attrs |= BlkAttr.FINALIZE;
+        if (pool.noscan.test(bit_i))
+            attrs |= BlkAttr.NO_SCAN;
 //        if (pool.nomove.nbits &&
-//            pool.nomove.test(biti))
-//            bits |= BlkAttr.NO_MOVE;
-        return bits;
+//            pool.nomove.test(bit_i))
+//            attrs |= BlkAttr.NO_MOVE;
+        return attrs;
     }
 
 
     /**
      *
      */
-    void setBits(Pool* pool, size_t biti, uint mask)
+    void setAttr(Pool* pool, size_t bit_i, uint mask)
     in
     {
         assert( pool );
@@ -2365,17 +2475,17 @@ struct Gcx
         {
             if (!pool.finals.nbits)
                 pool.finals.alloc(pool.mark.nbits);
-            pool.finals.set(biti);
+            pool.finals.set(bit_i);
         }
         if (mask & BlkAttr.NO_SCAN)
         {
-            pool.noscan.set(biti);
+            pool.noscan.set(bit_i);
         }
 //        if (mask & BlkAttr.NO_MOVE)
 //        {
 //            if (!pool.nomove.nbits)
 //                pool.nomove.alloc(pool.mark.nbits);
-//            pool.nomove.set(biti);
+//            pool.nomove.set(bit_i);
 //        }
     }
 
@@ -2383,7 +2493,7 @@ struct Gcx
     /**
      *
      */
-    void clrBits(Pool* pool, size_t biti, uint mask)
+    void clrAttr(Pool* pool, size_t bit_i, uint mask)
     in
     {
         assert( pool );
@@ -2391,11 +2501,11 @@ struct Gcx
     body
     {
         if (mask & BlkAttr.FINALIZE && pool.finals.nbits)
-            pool.finals.clear(biti);
+            pool.finals.clear(bit_i);
         if (mask & BlkAttr.NO_SCAN)
-            pool.noscan.clear(biti);
+            pool.noscan.clear(bit_i);
 //        if (mask & BlkAttr.NO_MOVE && pool.nomove.nbits)
-//            pool.nomove.clear(biti);
+//            pool.nomove.clear(bit_i);
     }
 
 }