NonmovingCensus: Emit samples to eventlog
[ghc.git] / rts / sm / NonMovingCensus.c
1 /* -----------------------------------------------------------------------------
2 *
3 * (c) The GHC Team, 1998-2018
4 *
5 * Non-moving garbage collector and allocator: Accounting census
6 *
7 * This is a simple space accounting census useful for characterising
8 * fragmentation in the nonmoving heap.
9 *
10 * ---------------------------------------------------------------------------*/
11
12 #include "Rts.h"
13 #include "NonMoving.h"
14 #include "Trace.h"
15 #include "NonMovingCensus.h"
16
17 // N.B. This may miss segments in the event of concurrent mutation (e.g. if a
18 // mutator retires its current segment to the filled list).
19 //
20 // all_stopped is whether we can guarantee that all mutators and minor GCs are
21 // stopped. In this case is safe to look at active and current segments so we can
22 // also collect statistics on live words.
23 static inline struct NonmovingAllocCensus
24 nonmovingAllocatorCensus_(struct NonmovingAllocator *alloc, bool collect_live_words)
25 {
26 struct NonmovingAllocCensus census = {0, 0, 0, 0};
27
28 for (struct NonmovingSegment *seg = alloc->filled;
29 seg != NULL;
30 seg = seg->link)
31 {
32 unsigned int n = nonmovingSegmentBlockCount(seg);
33 census.n_filled_segs++;
34 census.n_live_blocks += n;
35 if (collect_live_words) {
36 for (unsigned int i=0; i < n; i++) {
37 StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
38 census.n_live_words += closure_sizeW(c);
39 }
40 }
41 }
42
43 for (struct NonmovingSegment *seg = alloc->active;
44 seg != NULL;
45 seg = seg->link)
46 {
47 census.n_active_segs++;
48 unsigned int n = nonmovingSegmentBlockCount(seg);
49 for (unsigned int i=0; i < n; i++) {
50 if (nonmovingGetMark(seg, i)) {
51 StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
52 if (collect_live_words)
53 census.n_live_words += closure_sizeW(c);
54 census.n_live_blocks++;
55 }
56 }
57 }
58
59 for (unsigned int cap=0; cap < n_capabilities; cap++)
60 {
61 struct NonmovingSegment *seg = alloc->current[cap];
62 unsigned int n = nonmovingSegmentBlockCount(seg);
63 for (unsigned int i=0; i < n; i++) {
64 if (nonmovingGetMark(seg, i)) {
65 StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
66 if (collect_live_words)
67 census.n_live_words += closure_sizeW(c);
68 census.n_live_blocks++;
69 }
70 }
71 }
72 return census;
73 }
74
75 /* This must not be used when mutators are active since it assumes that
76 * all blocks in nonmoving heap are valid closures.
77 */
78 struct NonmovingAllocCensus
79 nonmovingAllocatorCensusWithWords(struct NonmovingAllocator *alloc)
80 {
81 return nonmovingAllocatorCensus_(alloc, true);
82 }
83
84 struct NonmovingAllocCensus
85 nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
86 {
87 return nonmovingAllocatorCensus_(alloc, false);
88 }
89
90
91 void nonmovingPrintAllocatorCensus()
92 {
93 if (!RtsFlags.GcFlags.useNonmoving)
94 return;
95
96 for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
97 struct NonmovingAllocCensus census =
98 nonmovingAllocatorCensus(nonmovingHeap.allocators[i]);
99
100 uint32_t blk_size = 1 << (i + NONMOVING_ALLOCA0);
101 // We define occupancy as the fraction of space that is used for useful
102 // data (that is, live and not slop).
103 double occupancy = 100.0 * census.n_live_words * sizeof(W_)
104 / (census.n_live_blocks * blk_size);
105 if (census.n_live_blocks == 0) occupancy = 100;
106 (void) occupancy; // silence warning if !DEBUG
107 debugTrace(DEBUG_nonmoving_gc, "Allocator %d (%d bytes - %d bytes): "
108 "%d active segs, %d filled segs, %d live blocks, %d live words "
109 "(%2.1f%% occupancy)",
110 i, 1 << (i + NONMOVING_ALLOCA0 - 1), 1 << (i + NONMOVING_ALLOCA0),
111 census.n_active_segs, census.n_filled_segs, census.n_live_blocks, census.n_live_words,
112 occupancy);
113 }
114 }
115
116 void nonmovingTraceAllocatorCensus()
117 {
118 #if defined(TRACING)
119 if (!RtsFlags.GcFlags.useNonmoving && !TRACE_nonmoving_gc)
120 return;
121
122 for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
123 const struct NonmovingAllocCensus census =
124 nonmovingAllocatorCensus(nonmovingHeap.allocators[i]);
125 const uint32_t log_blk_size = i + NONMOVING_ALLOCA0;
126 traceNonmovingHeapCensus(log_blk_size, &census);
127 }
128 #endif
129 }