da8236e1803f8ec8ffd9a74365aa6cead946c5c6
[ghc.git] / rts / sm / NonMovingScav.c
1 #include "Rts.h"
2 #include "RtsUtils.h"
3 #include "NonMoving.h"
4 #include "NonMovingScav.h"
5 #include "Capability.h"
6 #include "Scav.h"
7 #include "Evac.h"
8 #include "GCThread.h" // for GCUtils.h
9 #include "GCUtils.h"
10 #include "Printer.h"
11 #include "TraceDump.h"
12 #include "MarkWeak.h" // scavengeLiveWeak
13 #include "Trace.h"
14
15 void
16 nonmovingScavengeOne (StgClosure *q)
17 {
18 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q));
19 StgPtr p = (StgPtr)q;
20 const StgInfoTable *info = get_itbl(q);
21 const bool saved_eager_promotion = gct->eager_promotion;
22
23 switch (info->type) {
24
25 case MVAR_CLEAN:
26 case MVAR_DIRTY:
27 {
28 StgMVar *mvar = ((StgMVar *)p);
29 gct->eager_promotion = false;
30 evacuate((StgClosure **)&mvar->head);
31 evacuate((StgClosure **)&mvar->tail);
32 evacuate((StgClosure **)&mvar->value);
33 gct->eager_promotion = saved_eager_promotion;
34 if (gct->failed_to_evac) {
35 mvar->header.info = &stg_MVAR_DIRTY_info;
36 } else {
37 mvar->header.info = &stg_MVAR_CLEAN_info;
38 }
39 break;
40 }
41
42 case TVAR:
43 {
44 StgTVar *tvar = ((StgTVar *)p);
45 gct->eager_promotion = false;
46 evacuate((StgClosure **)&tvar->current_value);
47 evacuate((StgClosure **)&tvar->first_watch_queue_entry);
48 gct->eager_promotion = saved_eager_promotion;
49 if (gct->failed_to_evac) {
50 tvar->header.info = &stg_TVAR_DIRTY_info;
51 } else {
52 tvar->header.info = &stg_TVAR_CLEAN_info;
53 }
54 break;
55 }
56
57 case FUN_2_0:
58 scavenge_fun_srt(info);
59 evacuate(&((StgClosure *)p)->payload[1]);
60 evacuate(&((StgClosure *)p)->payload[0]);
61 break;
62
63 case THUNK_2_0:
64 scavenge_thunk_srt(info);
65 evacuate(&((StgThunk *)p)->payload[1]);
66 evacuate(&((StgThunk *)p)->payload[0]);
67 break;
68
69 case CONSTR_2_0:
70 evacuate(&((StgClosure *)p)->payload[1]);
71 evacuate(&((StgClosure *)p)->payload[0]);
72 break;
73
74 case THUNK_1_0:
75 scavenge_thunk_srt(info);
76 evacuate(&((StgThunk *)p)->payload[0]);
77 break;
78
79 case FUN_1_0:
80 scavenge_fun_srt(info);
81 FALLTHROUGH;
82 case CONSTR_1_0:
83 evacuate(&((StgClosure *)p)->payload[0]);
84 break;
85
86 case THUNK_0_1:
87 scavenge_thunk_srt(info);
88 break;
89
90 case FUN_0_1:
91 scavenge_fun_srt(info);
92 FALLTHROUGH;
93 case CONSTR_0_1:
94 break;
95
96 case THUNK_0_2:
97 scavenge_thunk_srt(info);
98 break;
99
100 case FUN_0_2:
101 scavenge_fun_srt(info);
102 FALLTHROUGH;
103 case CONSTR_0_2:
104 break;
105
106 case THUNK_1_1:
107 scavenge_thunk_srt(info);
108 evacuate(&((StgThunk *)p)->payload[0]);
109 break;
110
111 case FUN_1_1:
112 scavenge_fun_srt(info);
113 FALLTHROUGH;
114 case CONSTR_1_1:
115 evacuate(&q->payload[0]);
116 break;
117
118 case FUN:
119 scavenge_fun_srt(info);
120 goto gen_obj;
121
122 case THUNK:
123 {
124 scavenge_thunk_srt(info);
125 StgPtr end = (P_)((StgThunk *)p)->payload + info->layout.payload.ptrs;
126 for (p = (P_)((StgThunk *)p)->payload; p < end; p++) {
127 evacuate((StgClosure **)p);
128 }
129 break;
130 }
131
132 case WEAK:
133 {
134 // We must evacuate the key since it may refer to an object in the
135 // moving heap which may be long gone by the time we call
136 // nonmovingTidyWeaks.
137 StgWeak *weak = (StgWeak *) p;
138 debugTrace(DEBUG_weak, "nonmovingScav: evac key %p (gen=%d) of weak %p\n",
139 weak->key, Bdescr(weak->key)->gen_no, weak);
140 //gct->eager_promotion = true;
141 //evacuate(&weak->key);
142 //gct->eager_promotion = saved_eager_promotion;
143 goto gen_obj;
144 }
145
146 gen_obj:
147 case CONSTR:
148 case CONSTR_NOCAF:
149 case PRIM:
150 {
151 StgPtr end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
152 for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
153 evacuate((StgClosure **)p);
154 }
155 break;
156 }
157
158 case BCO: {
159 StgBCO *bco = (StgBCO *)p;
160 evacuate((StgClosure **)&bco->instrs);
161 evacuate((StgClosure **)&bco->literals);
162 evacuate((StgClosure **)&bco->ptrs);
163 break;
164 }
165
166 case MUT_VAR_CLEAN:
167 case MUT_VAR_DIRTY:
168 gct->eager_promotion = false;
169 evacuate(&((StgMutVar *)p)->var);
170 gct->eager_promotion = saved_eager_promotion;
171 if (gct->failed_to_evac) {
172 ((StgClosure *)q)->header.info = &stg_MUT_VAR_DIRTY_info;
173 } else {
174 ((StgClosure *)q)->header.info = &stg_MUT_VAR_CLEAN_info;
175 }
176 break;
177
178 case BLOCKING_QUEUE:
179 {
180 StgBlockingQueue *bq = (StgBlockingQueue *)p;
181
182 gct->eager_promotion = false;
183 evacuate(&bq->bh);
184 evacuate((StgClosure**)&bq->owner);
185 evacuate((StgClosure**)&bq->queue);
186 evacuate((StgClosure**)&bq->link);
187 gct->eager_promotion = saved_eager_promotion;
188
189 if (gct->failed_to_evac) {
190 bq->header.info = &stg_BLOCKING_QUEUE_DIRTY_info;
191 } else {
192 bq->header.info = &stg_BLOCKING_QUEUE_CLEAN_info;
193 }
194 break;
195 }
196
197 case THUNK_SELECTOR:
198 {
199 StgSelector *s = (StgSelector *)p;
200 evacuate(&s->selectee);
201 break;
202 }
203
204 // A chunk of stack saved in a heap object
205 case AP_STACK:
206 {
207 StgAP_STACK *ap = (StgAP_STACK *)p;
208
209 evacuate(&ap->fun);
210 scavenge_stack((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
211 break;
212 }
213
214 case PAP:
215 p = scavenge_PAP((StgPAP *)p);
216 break;
217
218 case AP:
219 scavenge_AP((StgAP *)p);
220 break;
221
222 case ARR_WORDS:
223 // nothing to follow
224 break;
225
226 case MUT_ARR_PTRS_CLEAN:
227 case MUT_ARR_PTRS_DIRTY:
228 {
229 gct->eager_promotion = false;
230 scavenge_mut_arr_ptrs((StgMutArrPtrs*)p);
231 gct->eager_promotion = saved_eager_promotion;
232 if (gct->failed_to_evac) {
233 ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_DIRTY_info;
234 } else {
235 ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_CLEAN_info;
236 }
237 gct->failed_to_evac = true; // always put it on the mutable list.
238 break;
239 }
240
241 case MUT_ARR_PTRS_FROZEN_CLEAN:
242 case MUT_ARR_PTRS_FROZEN_DIRTY:
243 // follow everything
244 {
245 scavenge_mut_arr_ptrs((StgMutArrPtrs*)p);
246
247 if (gct->failed_to_evac) {
248 ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN_DIRTY_info;
249 } else {
250 ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN_CLEAN_info;
251 }
252 break;
253 }
254
255 case SMALL_MUT_ARR_PTRS_CLEAN:
256 case SMALL_MUT_ARR_PTRS_DIRTY:
257 // follow everything
258 {
259 StgPtr next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
260 gct->eager_promotion = false;
261 for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
262 evacuate((StgClosure **)p);
263 }
264 gct->eager_promotion = saved_eager_promotion;
265
266 if (gct->failed_to_evac) {
267 ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_DIRTY_info;
268 } else {
269 ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_CLEAN_info;
270 }
271 gct->failed_to_evac = true; // always put it on the mutable list.
272 break;
273 }
274
275 case SMALL_MUT_ARR_PTRS_FROZEN_CLEAN:
276 case SMALL_MUT_ARR_PTRS_FROZEN_DIRTY:
277 // follow everything
278 {
279 StgPtr next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
280 for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
281 evacuate((StgClosure **)p);
282 }
283
284 if (gct->failed_to_evac) {
285 ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN_DIRTY_info;
286 } else {
287 ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN_CLEAN_info;
288 }
289 break;
290 }
291
292 case TSO:
293 {
294 scavengeTSO((StgTSO *)p);
295 break;
296 }
297
298 case STACK:
299 {
300 StgStack *stack = (StgStack*)p;
301
302 gct->eager_promotion = false;
303 scavenge_stack(stack->sp, stack->stack + stack->stack_size);
304 gct->eager_promotion = saved_eager_promotion;
305 stack->dirty = gct->failed_to_evac;
306 break;
307 }
308
309 case MUT_PRIM:
310 {
311 StgPtr end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
312 gct->eager_promotion = false;
313 for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
314 evacuate((StgClosure **)p);
315 }
316 gct->eager_promotion = saved_eager_promotion;
317 gct->failed_to_evac = true; // mutable
318 break;
319 }
320
321 case TREC_CHUNK:
322 {
323 StgWord i;
324 StgTRecChunk *tc = ((StgTRecChunk *) p);
325 TRecEntry *e = &(tc -> entries[0]);
326 gct->eager_promotion = false;
327 evacuate((StgClosure **)&tc->prev_chunk);
328 for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) {
329 evacuate((StgClosure **)&e->tvar);
330 evacuate((StgClosure **)&e->expected_value);
331 evacuate((StgClosure **)&e->new_value);
332 }
333 gct->eager_promotion = saved_eager_promotion;
334 gct->failed_to_evac = true; // mutable
335 break;
336 }
337
338 case IND:
339 case BLACKHOLE:
340 case IND_STATIC:
341 evacuate(&((StgInd *)p)->indirectee);
342 break;
343
344 case COMPACT_NFDATA:
345 scavenge_compact((StgCompactNFData*)p);
346 break;
347
348 default:
349 barf("nonmoving scavenge: unimplemented/strange closure type %d @ %p",
350 info->type, p);
351 }
352
353 if (gct->failed_to_evac) {
354 // Mutable object or points to a younger object, add to the mut_list
355 gct->failed_to_evac = false;
356 if (oldest_gen->no > 0) {
357 recordMutableGen_GC(q, oldest_gen->no);
358 }
359 }
360 }
361
362 /* Scavenge objects evacuated into a nonmoving segment by a minor GC */
363 void
364 scavengeNonmovingSegment (struct NonmovingSegment *seg)
365 {
366 const StgWord blk_size = nonmovingSegmentBlockSize(seg);
367 gct->evac_gen_no = oldest_gen->no;
368 gct->failed_to_evac = false;
369
370 // scavenge objects between scan and free_ptr whose bitmap bits are 0
371 bdescr *seg_block = Bdescr((P_)seg);
372
373 ASSERT(seg_block->u.scan >= (P_)nonmovingSegmentGetBlock(seg, 0));
374 ASSERT(seg_block->u.scan <= (P_)nonmovingSegmentGetBlock(seg, seg->next_free));
375
376 nonmoving_block_idx p_idx = nonmovingGetBlockIdx(seg_block->u.scan);
377 if (p_idx == seg->next_free)
378 return;
379 trace_dump_note("scavenging segment");
380
381 while (p_idx < seg->next_free) {
382 StgClosure *p = (StgClosure*)seg_block->u.scan;
383 trace_dump_set_source_closure(p);
384
385 // bit set = was allocated in a previous GC, no need to scavenge
386 // bit not set = new allocation, so scavenge
387 if (nonmovingGetMark(seg, p_idx) == 0) {
388 nonmovingScavengeOne(p);
389 }
390
391 p_idx++;
392 seg_block->u.scan = (P_)(((uint8_t*)seg_block->u.scan) + blk_size);
393 }
394 }