Merge branch 'wip/simd'
[ghc.git] / includes / Cmm.h
1 /* -----------------------------------------------------------------------------
2 *
3 * (c) The University of Glasgow 2004-2013
4 *
5 * This file is included at the top of all .cmm source files (and
6 * *only* .cmm files). It defines a collection of useful macros for
7 * making .cmm code a bit less error-prone to write, and a bit easier
8 * on the eye for the reader.
9 *
10 * For the syntax of .cmm files, see the parser in ghc/compiler/cmm/CmmParse.y.
11 *
12 * Accessing fields of structures defined in the RTS header files is
13 * done via automatically-generated macros in DerivedConstants.h. For
14 * example, where previously we used
15 *
16 * CurrentTSO->what_next = x
17 *
18 * in C-- we now use
19 *
20 * StgTSO_what_next(CurrentTSO) = x
21 *
22 * where the StgTSO_what_next() macro is automatically generated by
23 * mkDerivedConstants.c. If you need to access a field that doesn't
24 * already have a macro, edit that file (it's pretty self-explanatory).
25 *
26 * -------------------------------------------------------------------------- */
27
28 #ifndef CMM_H
29 #define CMM_H
30
31 /*
32 * In files that are included into both C and C-- (and perhaps
33 * Haskell) sources, we sometimes need to conditionally compile bits
34 * depending on the language. CMINUSMINUS==1 in .cmm sources:
35 */
36 #define CMINUSMINUS 1
37
38 #include "ghcconfig.h"
39
40 /* -----------------------------------------------------------------------------
41 Types
42
43 The following synonyms for C-- types are declared here:
44
45 I8, I16, I32, I64 MachRep-style names for convenience
46
47 W_ is shorthand for the word type (== StgWord)
48 F_ shorthand for float (F_ == StgFloat == C's float)
49 D_ shorthand for double (D_ == StgDouble == C's double)
50
51 CInt has the same size as an int in C on this platform
52 CLong has the same size as a long in C on this platform
53
54 --------------------------------------------------------------------------- */
55
56 #define I8 bits8
57 #define I16 bits16
58 #define I32 bits32
59 #define I64 bits64
60 #define P_ gcptr
61
62 #if SIZEOF_VOID_P == 4
63 #define W_ bits32
64 /* Maybe it's better to include MachDeps.h */
65 #define TAG_BITS 2
66 #elif SIZEOF_VOID_P == 8
67 #define W_ bits64
68 /* Maybe it's better to include MachDeps.h */
69 #define TAG_BITS 3
70 #else
71 #error Unknown word size
72 #endif
73
74 /*
75 * The RTS must sometimes UNTAG a pointer before dereferencing it.
76 * See the wiki page Commentary/Rts/HaskellExecution/PointerTagging
77 */
78 #define TAG_MASK ((1 << TAG_BITS) - 1)
79 #define UNTAG(p) (p & ~TAG_MASK)
80 #define GETTAG(p) (p & TAG_MASK)
81
82 #if SIZEOF_INT == 4
83 #define CInt bits32
84 #elif SIZEOF_INT == 8
85 #define CInt bits64
86 #else
87 #error Unknown int size
88 #endif
89
90 #if SIZEOF_LONG == 4
91 #define CLong bits32
92 #elif SIZEOF_LONG == 8
93 #define CLong bits64
94 #else
95 #error Unknown long size
96 #endif
97
98 #define F_ float32
99 #define D_ float64
100 #define L_ bits64
101 #define V16_ bits128
102 #define V32_ bits256
103 #define V64_ bits512
104
105 #define SIZEOF_StgDouble 8
106 #define SIZEOF_StgWord64 8
107
108 /* -----------------------------------------------------------------------------
109 Misc useful stuff
110 -------------------------------------------------------------------------- */
111
112 #define ccall foreign "C"
113
114 #define NULL (0::W_)
115
116 #define STRING(name,str) \
117 section "rodata" { \
118 name : bits8[] str; \
119 } \
120
121 #ifdef TABLES_NEXT_TO_CODE
122 #define RET_LBL(f) f##_info
123 #else
124 #define RET_LBL(f) f##_ret
125 #endif
126
127 #ifdef TABLES_NEXT_TO_CODE
128 #define ENTRY_LBL(f) f##_info
129 #else
130 #define ENTRY_LBL(f) f##_entry
131 #endif
132
133 /* -----------------------------------------------------------------------------
134 Byte/word macros
135
136 Everything in C-- is in byte offsets (well, most things). We use
137 some macros to allow us to express offsets in words and to try to
138 avoid byte/word confusion.
139 -------------------------------------------------------------------------- */
140
141 #define SIZEOF_W SIZEOF_VOID_P
142 #define W_MASK (SIZEOF_W-1)
143
144 #if SIZEOF_W == 4
145 #define W_SHIFT 2
146 #elif SIZEOF_W == 8
147 #define W_SHIFT 3
148 #endif
149
150 /* Converting quantities of words to bytes */
151 #define WDS(n) ((n)*SIZEOF_W)
152
153 /*
154 * Converting quantities of bytes to words
155 * NB. these work on *unsigned* values only
156 */
157 #define BYTES_TO_WDS(n) ((n) / SIZEOF_W)
158 #define ROUNDUP_BYTES_TO_WDS(n) (((n) + SIZEOF_W - 1) / SIZEOF_W)
159
160 /* TO_W_(n) converts n to W_ type from a smaller type */
161 #if SIZEOF_W == 4
162 #define TO_W_(x) %sx32(x)
163 #define HALF_W_(x) %lobits16(x)
164 #elif SIZEOF_W == 8
165 #define TO_W_(x) %sx64(x)
166 #define HALF_W_(x) %lobits32(x)
167 #endif
168
169 #if SIZEOF_INT == 4 && SIZEOF_W == 8
170 #define W_TO_INT(x) %lobits32(x)
171 #elif SIZEOF_INT == SIZEOF_W
172 #define W_TO_INT(x) (x)
173 #endif
174
175 #if SIZEOF_LONG == 4 && SIZEOF_W == 8
176 #define W_TO_LONG(x) %lobits32(x)
177 #elif SIZEOF_LONG == SIZEOF_W
178 #define W_TO_LONG(x) (x)
179 #endif
180
181 /* -----------------------------------------------------------------------------
182 Heap/stack access, and adjusting the heap/stack pointers.
183 -------------------------------------------------------------------------- */
184
185 #define Sp(n) W_[Sp + WDS(n)]
186 #define Hp(n) W_[Hp + WDS(n)]
187
188 #define Sp_adj(n) Sp = Sp + WDS(n) /* pronounced "spadge" */
189 #define Hp_adj(n) Hp = Hp + WDS(n)
190
191 /* -----------------------------------------------------------------------------
192 Assertions and Debuggery
193 -------------------------------------------------------------------------- */
194
195 #ifdef DEBUG
196 #define ASSERT(predicate) \
197 if (predicate) { \
198 /*null*/; \
199 } else { \
200 foreign "C" _assertFail(NULL, __LINE__) never returns; \
201 }
202 #else
203 #define ASSERT(p) /* nothing */
204 #endif
205
206 #ifdef DEBUG
207 #define DEBUG_ONLY(s) s
208 #else
209 #define DEBUG_ONLY(s) /* nothing */
210 #endif
211
212 /*
213 * The IF_DEBUG macro is useful for debug messages that depend on one
214 * of the RTS debug options. For example:
215 *
216 * IF_DEBUG(RtsFlags_DebugFlags_apply,
217 * foreign "C" fprintf(stderr, stg_ap_0_ret_str));
218 *
219 * Note the syntax is slightly different to the C version of this macro.
220 */
221 #ifdef DEBUG
222 #define IF_DEBUG(c,s) if (RtsFlags_DebugFlags_##c(RtsFlags) != 0::I32) { s; }
223 #else
224 #define IF_DEBUG(c,s) /* nothing */
225 #endif
226
227 /* -----------------------------------------------------------------------------
228 Entering
229
230 It isn't safe to "enter" every closure. Functions in particular
231 have no entry code as such; their entry point contains the code to
232 apply the function.
233
234 ToDo: range should end in N_CLOSURE_TYPES-1, not N_CLOSURE_TYPES,
235 but switch doesn't allow us to use exprs there yet.
236
237 If R1 points to a tagged object it points either to
238 * A constructor.
239 * A function with arity <= TAG_MASK.
240 In both cases the right thing to do is to return.
241 Note: it is rather lucky that we can use the tag bits to do this
242 for both objects. Maybe it points to a brittle design?
243
244 Indirections can contain tagged pointers, so their tag is checked.
245 -------------------------------------------------------------------------- */
246
247 #ifdef PROFILING
248
249 // When profiling, we cannot shortcut ENTER() by checking the tag,
250 // because LDV profiling relies on entering closures to mark them as
251 // "used".
252
253 #define LOAD_INFO(ret,x) \
254 info = %INFO_PTR(UNTAG(x));
255
256 #define UNTAG_IF_PROF(x) UNTAG(x)
257
258 #else
259
260 #define LOAD_INFO(ret,x) \
261 if (GETTAG(x) != 0) { \
262 ret(x); \
263 } \
264 info = %INFO_PTR(x);
265
266 #define UNTAG_IF_PROF(x) (x) /* already untagged */
267
268 #endif
269
270 // We need two versions of ENTER():
271 // - ENTER(x) takes the closure as an argument and uses return(),
272 // for use in civilized code where the stack is handled by GHC
273 //
274 // - ENTER_NOSTACK() where the closure is in R1, and returns are
275 // explicit jumps, for use when we are doing the stack management
276 // ourselves.
277
278 #define ENTER(x) ENTER_(return,x)
279 #define ENTER_R1() ENTER_(RET_R1,R1)
280
281 #define RET_R1(x) jump %ENTRY_CODE(Sp(0)) [R1]
282
283 #define ENTER_(ret,x) \
284 again: \
285 W_ info; \
286 LOAD_INFO(ret,x) \
287 switch [INVALID_OBJECT .. N_CLOSURE_TYPES] \
288 (TO_W_( %INFO_TYPE(%STD_INFO(info)) )) { \
289 case \
290 IND, \
291 IND_PERM, \
292 IND_STATIC: \
293 { \
294 x = StgInd_indirectee(x); \
295 goto again; \
296 } \
297 case \
298 FUN, \
299 FUN_1_0, \
300 FUN_0_1, \
301 FUN_2_0, \
302 FUN_1_1, \
303 FUN_0_2, \
304 FUN_STATIC, \
305 BCO, \
306 PAP: \
307 { \
308 ret(x); \
309 } \
310 default: \
311 { \
312 x = UNTAG_IF_PROF(x); \
313 jump %ENTRY_CODE(info) (x); \
314 } \
315 }
316
317 // The FUN cases almost never happen: a pointer to a non-static FUN
318 // should always be tagged. This unfortunately isn't true for the
319 // interpreter right now, which leaves untagged FUNs on the stack.
320
321 /* -----------------------------------------------------------------------------
322 Constants.
323 -------------------------------------------------------------------------- */
324
325 #include "rts/Constants.h"
326 #include "DerivedConstants.h"
327 #include "rts/storage/ClosureTypes.h"
328 #include "rts/storage/FunTypes.h"
329 #include "rts/storage/SMPClosureOps.h"
330 #include "rts/OSThreads.h"
331
332 /*
333 * Need MachRegs, because some of the RTS code is conditionally
334 * compiled based on REG_R1, REG_R2, etc.
335 */
336 #include "stg/RtsMachRegs.h"
337
338 #include "rts/prof/LDV.h"
339
340 #undef BLOCK_SIZE
341 #undef MBLOCK_SIZE
342 #include "rts/storage/Block.h" /* For Bdescr() */
343
344
345 #define MyCapability() (BaseReg - OFFSET_Capability_r)
346
347 /* -------------------------------------------------------------------------
348 Info tables
349 ------------------------------------------------------------------------- */
350
351 #if defined(PROFILING)
352 #define PROF_HDR_FIELDS(w_,hdr1,hdr2) \
353 w_ hdr1, \
354 w_ hdr2,
355 #else
356 #define PROF_HDR_FIELDS(w_,hdr1,hdr2) /* nothing */
357 #endif
358
359 /* -------------------------------------------------------------------------
360 Allocation and garbage collection
361 ------------------------------------------------------------------------- */
362
363 /*
364 * ALLOC_PRIM is for allocating memory on the heap for a primitive
365 * object. It is used all over PrimOps.cmm.
366 *
367 * We make the simplifying assumption that the "admin" part of a
368 * primitive closure is just the header when calculating sizes for
369 * ticky-ticky. It's not clear whether eg. the size field of an array
370 * should be counted as "admin", or the various fields of a BCO.
371 */
372 #define ALLOC_PRIM(bytes) \
373 HP_CHK_GEN_TICKY(bytes); \
374 TICK_ALLOC_PRIM(SIZEOF_StgHeader,bytes-SIZEOF_StgHeader,0); \
375 CCCS_ALLOC(bytes);
376
377 #define HEAP_CHECK(bytes,failure) \
378 TICK_BUMP(HEAP_CHK_ctr); \
379 Hp = Hp + (bytes); \
380 if (Hp > HpLim) { HpAlloc = (bytes); failure; } \
381 TICK_ALLOC_HEAP_NOCTR(bytes);
382
383 #define ALLOC_PRIM_WITH_CUSTOM_FAILURE(bytes,failure) \
384 HEAP_CHECK(bytes,failure) \
385 TICK_ALLOC_PRIM(SIZEOF_StgHeader,bytes-SIZEOF_StgHeader,0); \
386 CCCS_ALLOC(bytes);
387
388 #define ALLOC_PRIM_(bytes,fun) \
389 ALLOC_PRIM_WITH_CUSTOM_FAILURE(bytes,GC_PRIM(fun));
390
391 #define ALLOC_PRIM_P(bytes,fun,arg) \
392 ALLOC_PRIM_WITH_CUSTOM_FAILURE(bytes,GC_PRIM_P(fun,arg));
393
394 #define ALLOC_PRIM_N(bytes,fun,arg) \
395 ALLOC_PRIM_WITH_CUSTOM_FAILURE(bytes,GC_PRIM_N(fun,arg));
396
397 /* CCS_ALLOC wants the size in words, because ccs->mem_alloc is in words */
398 #define CCCS_ALLOC(__alloc) CCS_ALLOC(BYTES_TO_WDS(__alloc), CCCS)
399
400 #define HP_CHK_GEN_TICKY(bytes) \
401 HP_CHK_GEN(bytes); \
402 TICK_ALLOC_HEAP_NOCTR(bytes);
403
404 #define HP_CHK_P(bytes, fun, arg) \
405 HEAP_CHECK(bytes, GC_PRIM_P(fun,arg))
406
407 // TODO I'm not seeing where ALLOC_P_TICKY is used; can it be removed?
408 // -NSF March 2013
409 #define ALLOC_P_TICKY(bytes, fun, arg) \
410 HP_CHK_P(bytes); \
411 TICK_ALLOC_HEAP_NOCTR(bytes);
412
413 #define CHECK_GC() \
414 (bdescr_link(CurrentNursery) == NULL || \
415 generation_n_new_large_words(W_[g0]) >= TO_W_(CLong[large_alloc_lim]))
416
417 // allocate() allocates from the nursery, so we check to see
418 // whether the nursery is nearly empty in any function that uses
419 // allocate() - this includes many of the primops.
420 //
421 // HACK alert: the __L__ stuff is here to coax the common-block
422 // eliminator into commoning up the call stg_gc_noregs() with the same
423 // code that gets generated by a STK_CHK_GEN() in the same proc. We
424 // also need an if (0) { goto __L__; } so that the __L__ label isn't
425 // optimised away by the control-flow optimiser prior to common-block
426 // elimination (it will be optimised away later).
427 //
428 // This saves some code in gmp-wrappers.cmm where we have lots of
429 // MAYBE_GC() in the same proc as STK_CHK_GEN().
430 //
431 #define MAYBE_GC(retry) \
432 if (CHECK_GC()) { \
433 HpAlloc = 0; \
434 goto __L__; \
435 __L__: \
436 call stg_gc_noregs(); \
437 goto retry; \
438 } \
439 if (0) { goto __L__; }
440
441 #define GC_PRIM(fun) \
442 R9 = fun; \
443 jump stg_gc_prim();
444
445 #define GC_PRIM_N(fun,arg) \
446 R9 = fun; \
447 jump stg_gc_prim_n(arg);
448
449 #define GC_PRIM_P(fun,arg) \
450 R9 = fun; \
451 jump stg_gc_prim_p(arg);
452
453 #define GC_PRIM_PP(fun,arg1,arg2) \
454 R9 = fun; \
455 jump stg_gc_prim_pp(arg1,arg2);
456
457 #define MAYBE_GC_(fun) \
458 if (CHECK_GC()) { \
459 HpAlloc = 0; \
460 GC_PRIM(fun) \
461 }
462
463 #define MAYBE_GC_N(fun,arg) \
464 if (CHECK_GC()) { \
465 HpAlloc = 0; \
466 GC_PRIM_N(fun,arg) \
467 }
468
469 #define MAYBE_GC_P(fun,arg) \
470 if (CHECK_GC()) { \
471 HpAlloc = 0; \
472 GC_PRIM_P(fun,arg) \
473 }
474
475 #define MAYBE_GC_PP(fun,arg1,arg2) \
476 if (CHECK_GC()) { \
477 HpAlloc = 0; \
478 GC_PRIM_PP(fun,arg1,arg2) \
479 }
480
481 #define STK_CHK(n, fun) \
482 TICK_BUMP(STK_CHK_ctr); \
483 if (Sp - (n) < SpLim) { \
484 GC_PRIM(fun) \
485 }
486
487 #define STK_CHK_P(n, fun, arg) \
488 if (Sp - (n) < SpLim) { \
489 GC_PRIM_P(fun,arg) \
490 }
491
492 #define STK_CHK_PP(n, fun, arg1, arg2) \
493 if (Sp - (n) < SpLim) { \
494 GC_PRIM_PP(fun,arg1,arg2) \
495 }
496
497 #define STK_CHK_ENTER(n, closure) \
498 if (Sp - (n) < SpLim) { \
499 jump __stg_gc_enter_1(closure); \
500 }
501
502 // A funky heap check used by AutoApply.cmm
503
504 #define HP_CHK_NP_ASSIGN_SP0(size,f) \
505 HEAP_CHECK(size, Sp(0) = f; jump __stg_gc_enter_1 [R1];)
506
507 /* -----------------------------------------------------------------------------
508 Closure headers
509 -------------------------------------------------------------------------- */
510
511 /*
512 * This is really ugly, since we don't do the rest of StgHeader this
513 * way. The problem is that values from DerivedConstants.h cannot be
514 * dependent on the way (SMP, PROF etc.). For SIZEOF_StgHeader we get
515 * the value from GHC, but it seems like too much trouble to do that
516 * for StgThunkHeader.
517 */
518 #define SIZEOF_StgThunkHeader SIZEOF_StgHeader+SIZEOF_StgSMPThunkHeader
519
520 #define StgThunk_payload(__ptr__,__ix__) \
521 W_[__ptr__+SIZEOF_StgThunkHeader+ WDS(__ix__)]
522
523 /* -----------------------------------------------------------------------------
524 Closures
525 -------------------------------------------------------------------------- */
526
527 /* The offset of the payload of an array */
528 #define BYTE_ARR_CTS(arr) ((arr) + SIZEOF_StgArrWords)
529
530 /* The number of words allocated in an array payload */
531 #define BYTE_ARR_WDS(arr) ROUNDUP_BYTES_TO_WDS(StgArrWords_bytes(arr))
532
533 /* Getting/setting the info pointer of a closure */
534 #define SET_INFO(p,info) StgHeader_info(p) = info
535 #define GET_INFO(p) StgHeader_info(p)
536
537 /* Determine the size of an ordinary closure from its info table */
538 #define sizeW_fromITBL(itbl) \
539 SIZEOF_StgHeader + WDS(%INFO_PTRS(itbl)) + WDS(%INFO_NPTRS(itbl))
540
541 /* NB. duplicated from InfoTables.h! */
542 #define BITMAP_SIZE(bitmap) ((bitmap) & BITMAP_SIZE_MASK)
543 #define BITMAP_BITS(bitmap) ((bitmap) >> BITMAP_BITS_SHIFT)
544
545 /* Debugging macros */
546 #define LOOKS_LIKE_INFO_PTR(p) \
547 ((p) != NULL && \
548 LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
549
550 #define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
551 ( (TO_W_(%INFO_TYPE(%STD_INFO(p))) != INVALID_OBJECT) && \
552 (TO_W_(%INFO_TYPE(%STD_INFO(p))) < N_CLOSURE_TYPES))
553
554 #define LOOKS_LIKE_CLOSURE_PTR(p) (LOOKS_LIKE_INFO_PTR(GET_INFO(UNTAG(p))))
555
556 /*
557 * The layout of the StgFunInfoExtra part of an info table changes
558 * depending on TABLES_NEXT_TO_CODE. So we define field access
559 * macros which use the appropriate version here:
560 */
561 #ifdef TABLES_NEXT_TO_CODE
562 /*
563 * when TABLES_NEXT_TO_CODE, slow_apply is stored as an offset
564 * instead of the normal pointer.
565 */
566
567 #define StgFunInfoExtra_slow_apply(fun_info) \
568 (TO_W_(StgFunInfoExtraRev_slow_apply_offset(fun_info)) \
569 + (fun_info) + SIZEOF_StgFunInfoExtraRev + SIZEOF_StgInfoTable)
570
571 #define StgFunInfoExtra_fun_type(i) StgFunInfoExtraRev_fun_type(i)
572 #define StgFunInfoExtra_arity(i) StgFunInfoExtraRev_arity(i)
573 #define StgFunInfoExtra_bitmap(i) StgFunInfoExtraRev_bitmap(i)
574 #else
575 #define StgFunInfoExtra_slow_apply(i) StgFunInfoExtraFwd_slow_apply(i)
576 #define StgFunInfoExtra_fun_type(i) StgFunInfoExtraFwd_fun_type(i)
577 #define StgFunInfoExtra_arity(i) StgFunInfoExtraFwd_arity(i)
578 #define StgFunInfoExtra_bitmap(i) StgFunInfoExtraFwd_bitmap(i)
579 #endif
580
581 #define mutArrCardMask ((1 << MUT_ARR_PTRS_CARD_BITS) - 1)
582 #define mutArrPtrCardDown(i) ((i) >> MUT_ARR_PTRS_CARD_BITS)
583 #define mutArrPtrCardUp(i) (((i) + mutArrCardMask) >> MUT_ARR_PTRS_CARD_BITS)
584 #define mutArrPtrsCardWords(n) ROUNDUP_BYTES_TO_WDS(mutArrPtrCardUp(n))
585
586 #if defined(PROFILING) || (!defined(THREADED_RTS) && defined(DEBUG))
587 #define OVERWRITING_CLOSURE(c) foreign "C" overwritingClosure(c "ptr")
588 #else
589 #define OVERWRITING_CLOSURE(c) /* nothing */
590 #endif
591
592 #ifdef THREADED_RTS
593 #define prim_write_barrier prim %write_barrier()
594 #else
595 #define prim_write_barrier /* nothing */
596 #endif
597
598 /* -----------------------------------------------------------------------------
599 Ticky macros
600 -------------------------------------------------------------------------- */
601
602 #ifdef TICKY_TICKY
603 #define TICK_BUMP_BY(ctr,n) CLong[ctr] = CLong[ctr] + n
604 #else
605 #define TICK_BUMP_BY(ctr,n) /* nothing */
606 #endif
607
608 #define TICK_BUMP(ctr) TICK_BUMP_BY(ctr,1)
609
610 #define TICK_ENT_DYN_IND() TICK_BUMP(ENT_DYN_IND_ctr)
611 #define TICK_ENT_DYN_THK() TICK_BUMP(ENT_DYN_THK_ctr)
612 #define TICK_ENT_VIA_NODE() TICK_BUMP(ENT_VIA_NODE_ctr)
613 #define TICK_ENT_STATIC_IND() TICK_BUMP(ENT_STATIC_IND_ctr)
614 #define TICK_ENT_PERM_IND() TICK_BUMP(ENT_PERM_IND_ctr)
615 #define TICK_ENT_PAP() TICK_BUMP(ENT_PAP_ctr)
616 #define TICK_ENT_AP() TICK_BUMP(ENT_AP_ctr)
617 #define TICK_ENT_AP_STACK() TICK_BUMP(ENT_AP_STACK_ctr)
618 #define TICK_ENT_BH() TICK_BUMP(ENT_BH_ctr)
619 #define TICK_ENT_LNE() TICK_BUMP(ENT_LNE_ctr)
620 #define TICK_UNKNOWN_CALL() TICK_BUMP(UNKNOWN_CALL_ctr)
621 #define TICK_UPDF_PUSHED() TICK_BUMP(UPDF_PUSHED_ctr)
622 #define TICK_CATCHF_PUSHED() TICK_BUMP(CATCHF_PUSHED_ctr)
623 #define TICK_UPDF_OMITTED() TICK_BUMP(UPDF_OMITTED_ctr)
624 #define TICK_UPD_NEW_IND() TICK_BUMP(UPD_NEW_IND_ctr)
625 #define TICK_UPD_NEW_PERM_IND() TICK_BUMP(UPD_NEW_PERM_IND_ctr)
626 #define TICK_UPD_OLD_IND() TICK_BUMP(UPD_OLD_IND_ctr)
627 #define TICK_UPD_OLD_PERM_IND() TICK_BUMP(UPD_OLD_PERM_IND_ctr)
628
629 #define TICK_SLOW_CALL_FUN_TOO_FEW() TICK_BUMP(SLOW_CALL_FUN_TOO_FEW_ctr)
630 #define TICK_SLOW_CALL_FUN_CORRECT() TICK_BUMP(SLOW_CALL_FUN_CORRECT_ctr)
631 #define TICK_SLOW_CALL_FUN_TOO_MANY() TICK_BUMP(SLOW_CALL_FUN_TOO_MANY_ctr)
632 #define TICK_SLOW_CALL_PAP_TOO_FEW() TICK_BUMP(SLOW_CALL_PAP_TOO_FEW_ctr)
633 #define TICK_SLOW_CALL_PAP_CORRECT() TICK_BUMP(SLOW_CALL_PAP_CORRECT_ctr)
634 #define TICK_SLOW_CALL_PAP_TOO_MANY() TICK_BUMP(SLOW_CALL_PAP_TOO_MANY_ctr)
635
636 #define TICK_SLOW_CALL_fast_v16() TICK_BUMP(SLOW_CALL_fast_v16_ctr)
637 #define TICK_SLOW_CALL_fast_v() TICK_BUMP(SLOW_CALL_fast_v_ctr)
638 #define TICK_SLOW_CALL_fast_p() TICK_BUMP(SLOW_CALL_fast_p_ctr)
639 #define TICK_SLOW_CALL_fast_pv() TICK_BUMP(SLOW_CALL_fast_pv_ctr)
640 #define TICK_SLOW_CALL_fast_pp() TICK_BUMP(SLOW_CALL_fast_pp_ctr)
641 #define TICK_SLOW_CALL_fast_ppv() TICK_BUMP(SLOW_CALL_fast_ppv_ctr)
642 #define TICK_SLOW_CALL_fast_ppp() TICK_BUMP(SLOW_CALL_fast_ppp_ctr)
643 #define TICK_SLOW_CALL_fast_pppv() TICK_BUMP(SLOW_CALL_fast_pppv_ctr)
644 #define TICK_SLOW_CALL_fast_pppp() TICK_BUMP(SLOW_CALL_fast_pppp_ctr)
645 #define TICK_SLOW_CALL_fast_ppppp() TICK_BUMP(SLOW_CALL_fast_ppppp_ctr)
646 #define TICK_SLOW_CALL_fast_pppppp() TICK_BUMP(SLOW_CALL_fast_pppppp_ctr)
647 #define TICK_VERY_SLOW_CALL() TICK_BUMP(VERY_SLOW_CALL_ctr)
648
649 /* NOTE: TICK_HISTO_BY and TICK_HISTO
650 currently have no effect.
651 The old code for it didn't typecheck and I
652 just commented it out to get ticky to work.
653 - krc 1/2007 */
654
655 #define TICK_HISTO_BY(histo,n,i) /* nothing */
656
657 #define TICK_HISTO(histo,n) TICK_HISTO_BY(histo,n,1)
658
659 /* An unboxed tuple with n components. */
660 #define TICK_RET_UNBOXED_TUP(n) \
661 TICK_BUMP(RET_UNBOXED_TUP_ctr++); \
662 TICK_HISTO(RET_UNBOXED_TUP,n)
663
664 /*
665 * A slow call with n arguments. In the unevald case, this call has
666 * already been counted once, so don't count it again.
667 */
668 #define TICK_SLOW_CALL(n) \
669 TICK_BUMP(SLOW_CALL_ctr); \
670 TICK_HISTO(SLOW_CALL,n)
671
672 /*
673 * This slow call was found to be to an unevaluated function; undo the
674 * ticks we did in TICK_SLOW_CALL.
675 */
676 #define TICK_SLOW_CALL_UNEVALD(n) \
677 TICK_BUMP(SLOW_CALL_UNEVALD_ctr); \
678 TICK_BUMP_BY(SLOW_CALL_ctr,-1); \
679 TICK_HISTO_BY(SLOW_CALL,n,-1);
680
681 /* Updating a closure with a new CON */
682 #define TICK_UPD_CON_IN_NEW(n) \
683 TICK_BUMP(UPD_CON_IN_NEW_ctr); \
684 TICK_HISTO(UPD_CON_IN_NEW,n)
685
686 #define TICK_ALLOC_HEAP_NOCTR(bytes) \
687 TICK_BUMP(ALLOC_RTS_ctr); \
688 TICK_BUMP_BY(ALLOC_RTS_tot,bytes)
689
690 /* -----------------------------------------------------------------------------
691 Saving and restoring STG registers
692
693 STG registers must be saved around a C call, just in case the STG
694 register is mapped to a caller-saves machine register. Normally we
695 don't need to worry about this the code generator has already
696 loaded any live STG registers into variables for us, but in
697 hand-written low-level Cmm code where we don't know which registers
698 are live, we might have to save them all.
699 -------------------------------------------------------------------------- */
700
701 #define SAVE_STGREGS \
702 W_ r1, r2, r3, r4, r5, r6, r7, r8; \
703 F_ f1, f2, f3, f4, f5, f6; \
704 D_ d1, d2, d3, d4, d5, d6; \
705 L_ l1; \
706 \
707 r1 = R1; \
708 r2 = R2; \
709 r3 = R3; \
710 r4 = R4; \
711 r5 = R5; \
712 r6 = R6; \
713 r7 = R7; \
714 r8 = R8; \
715 \
716 f1 = F1; \
717 f2 = F2; \
718 f3 = F3; \
719 f4 = F4; \
720 f5 = F5; \
721 f6 = F6; \
722 \
723 d1 = D1; \
724 d2 = D2; \
725 d3 = D3; \
726 d4 = D4; \
727 d5 = D5; \
728 d6 = D6; \
729 \
730 l1 = L1;
731
732
733 #define RESTORE_STGREGS \
734 R1 = r1; \
735 R2 = r2; \
736 R3 = r3; \
737 R4 = r4; \
738 R5 = r5; \
739 R6 = r6; \
740 R7 = r7; \
741 R8 = r8; \
742 \
743 F1 = f1; \
744 F2 = f2; \
745 F3 = f3; \
746 F4 = f4; \
747 F5 = f5; \
748 F6 = f6; \
749 \
750 D1 = d1; \
751 D2 = d2; \
752 D3 = d3; \
753 D4 = d4; \
754 D5 = d5; \
755 D6 = d6; \
756 \
757 L1 = l1;
758
759 /* -----------------------------------------------------------------------------
760 Misc junk
761 -------------------------------------------------------------------------- */
762
763 #define NO_TREC stg_NO_TREC_closure
764 #define END_TSO_QUEUE stg_END_TSO_QUEUE_closure
765 #define STM_AWOKEN stg_STM_AWOKEN_closure
766 #define END_INVARIANT_CHECK_QUEUE stg_END_INVARIANT_CHECK_QUEUE_closure
767
768 #define recordMutableCap(p, gen) \
769 W_ __bd; \
770 W_ mut_list; \
771 mut_list = Capability_mut_lists(MyCapability()) + WDS(gen); \
772 __bd = W_[mut_list]; \
773 if (bdescr_free(__bd) >= bdescr_start(__bd) + BLOCK_SIZE) { \
774 W_ __new_bd; \
775 ("ptr" __new_bd) = foreign "C" allocBlock_lock(); \
776 bdescr_link(__new_bd) = __bd; \
777 __bd = __new_bd; \
778 W_[mut_list] = __bd; \
779 } \
780 W_ free; \
781 free = bdescr_free(__bd); \
782 W_[free] = p; \
783 bdescr_free(__bd) = free + WDS(1);
784
785 #define recordMutable(p) \
786 P_ __p; \
787 W_ __bd; \
788 W_ __gen; \
789 __p = p; \
790 __bd = Bdescr(__p); \
791 __gen = TO_W_(bdescr_gen_no(__bd)); \
792 if (__gen > 0) { recordMutableCap(__p, __gen); }
793
794 #endif /* CMM_H */