Allow census without live word count
authorBen Gamari <ben@smart-cactus.org>
Sat, 16 Mar 2019 14:24:24 +0000 (10:24 -0400)
committerBen Gamari <ben@smart-cactus.org>
Tue, 22 Oct 2019 16:17:00 +0000 (12:17 -0400)
Otherwise the census is unsafe when mutators are running due to
concurrent mutation.

rts/sm/NonMovingCensus.c
rts/sm/NonMovingCensus.h

index 349ac77..1f28f4e 100644 (file)
 #include "Trace.h"
 #include "NonMovingCensus.h"
 
-struct NonmovingAllocCensus {
-    uint32_t n_active_segs;
-    uint32_t n_filled_segs;
-    uint32_t n_live_blocks;
-    uint32_t n_live_words;
-};
-
 // N.B. This may miss segments in the event of concurrent mutation (e.g. if a
 // mutator retires its current segment to the filled list).
-static struct NonmovingAllocCensus
-nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
+//
+// all_stopped is whether we can guarantee that all mutators and minor GCs are
+// stopped. In this case is safe to look at active and current segments so we can
+// also collect statistics on live words.
+static inline struct NonmovingAllocCensus
+nonmovingAllocatorCensus_(struct NonmovingAllocator *alloc, bool collect_live_words)
 {
     struct NonmovingAllocCensus census = {0, 0, 0, 0};
 
@@ -32,12 +29,14 @@ nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
          seg != NULL;
          seg = seg->link)
     {
-        census.n_filled_segs++;
-        census.n_live_blocks += nonmovingSegmentBlockCount(seg);
         unsigned int n = nonmovingSegmentBlockCount(seg);
-        for (unsigned int i=0; i < n; i++) {
-            StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
-            census.n_live_words += closure_sizeW(c);
+        census.n_filled_segs++;
+        census.n_live_blocks += n;
+        if (collect_live_words) {
+            for (unsigned int i=0; i < n; i++) {
+                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
+                census.n_live_words += closure_sizeW(c);
+            }
         }
     }
 
@@ -50,7 +49,8 @@ nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
         for (unsigned int i=0; i < n; i++) {
             if (nonmovingGetMark(seg, i)) {
                 StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
-                census.n_live_words += closure_sizeW(c);
+                if (collect_live_words)
+                    census.n_live_words += closure_sizeW(c);
                 census.n_live_blocks++;
             }
         }
@@ -63,7 +63,8 @@ nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
         for (unsigned int i=0; i < n; i++) {
             if (nonmovingGetMark(seg, i)) {
                 StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
-                census.n_live_words += closure_sizeW(c);
+                if (collect_live_words)
+                    census.n_live_words += closure_sizeW(c);
                 census.n_live_blocks++;
             }
         }
@@ -71,6 +72,22 @@ nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
     return census;
 }
 
+/* This must not be used when mutators are active since it assumes that
+ * all blocks in nonmoving heap are valid closures.
+ */
+struct NonmovingAllocCensus
+nonmovingAllocatorCensusWithWords(struct NonmovingAllocator *alloc)
+{
+    return nonmovingAllocatorCensus_(alloc, true);
+}
+
+struct NonmovingAllocCensus
+nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
+{
+    return nonmovingAllocatorCensus_(alloc, false);
+}
+
+
 void nonmovingPrintAllocatorCensus()
 {
     for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
index a4f84c4..1c7c657 100644 (file)
@@ -8,4 +8,18 @@
 
 #pragma once
 
+struct NonmovingAllocCensus {
+    uint32_t n_active_segs;
+    uint32_t n_filled_segs;
+    uint32_t n_live_blocks;
+    uint32_t n_live_words;
+};
+
+
+struct NonmovingAllocCensus
+nonmovingAllocatorCensusWithWords(struct NonmovingAllocator *alloc);
+
+struct NonmovingAllocCensus
+nonmovingAllocatorCensus(struct NonmovingAllocator *alloc);
+
 void nonmovingPrintAllocatorCensus(void);