Refactor named wildcards (again)
[ghc.git] / compiler / prelude / PrimOp.hs
1 {-
2 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
3
4 \section[PrimOp]{Primitive operations (machine-level)}
5 -}
6
7 {-# LANGUAGE CPP #-}
8
9 module PrimOp (
10 PrimOp(..), PrimOpVecCat(..), allThePrimOps,
11 primOpType, primOpSig,
12 primOpTag, maxPrimOpTag, primOpOcc,
13
14 tagToEnumKey,
15
16 primOpOutOfLine, primOpCodeSize,
17 primOpOkForSpeculation, primOpOkForSideEffects,
18 primOpIsCheap, primOpFixity,
19
20 getPrimOpResultInfo, PrimOpResultInfo(..),
21
22 PrimCall(..)
23 ) where
24
25 #include "HsVersions.h"
26
27 import TysPrim
28 import TysWiredIn
29
30 import CmmType
31 import Demand
32 import OccName ( OccName, pprOccName, mkVarOccFS )
33 import TyCon ( TyCon, isPrimTyCon, tyConPrimRep, PrimRep(..) )
34 import Type
35 import BasicTypes ( Arity, Fixity(..), FixityDirection(..), Boxity(..) )
36 import ForeignCall ( CLabelString )
37 import Unique ( Unique, mkPrimOpIdUnique )
38 import Outputable
39 import FastString
40 import Module ( UnitId )
41
42 {-
43 ************************************************************************
44 * *
45 \subsection[PrimOp-datatype]{Datatype for @PrimOp@ (an enumeration)}
46 * *
47 ************************************************************************
48
49 These are in \tr{state-interface.verb} order.
50 -}
51
52 -- supplies:
53 -- data PrimOp = ...
54 #include "primop-data-decl.hs-incl"
55
56 -- supplies
57 -- primOpTag :: PrimOp -> Int
58 #include "primop-tag.hs-incl"
59 primOpTag _ = error "primOpTag: unknown primop"
60
61
62 instance Eq PrimOp where
63 op1 == op2 = primOpTag op1 == primOpTag op2
64
65 instance Ord PrimOp where
66 op1 < op2 = primOpTag op1 < primOpTag op2
67 op1 <= op2 = primOpTag op1 <= primOpTag op2
68 op1 >= op2 = primOpTag op1 >= primOpTag op2
69 op1 > op2 = primOpTag op1 > primOpTag op2
70 op1 `compare` op2 | op1 < op2 = LT
71 | op1 == op2 = EQ
72 | otherwise = GT
73
74 instance Outputable PrimOp where
75 ppr op = pprPrimOp op
76
77 data PrimOpVecCat = IntVec
78 | WordVec
79 | FloatVec
80
81 -- An @Enum@-derived list would be better; meanwhile... (ToDo)
82
83 allThePrimOps :: [PrimOp]
84 allThePrimOps =
85 #include "primop-list.hs-incl"
86
87 tagToEnumKey :: Unique
88 tagToEnumKey = mkPrimOpIdUnique (primOpTag TagToEnumOp)
89
90 {-
91 ************************************************************************
92 * *
93 \subsection[PrimOp-info]{The essential info about each @PrimOp@}
94 * *
95 ************************************************************************
96
97 The @String@ in the @PrimOpInfos@ is the ``base name'' by which the user may
98 refer to the primitive operation. The conventional \tr{#}-for-
99 unboxed ops is added on later.
100
101 The reason for the funny characters in the names is so we do not
102 interfere with the programmer's Haskell name spaces.
103
104 We use @PrimKinds@ for the ``type'' information, because they're
105 (slightly) more convenient to use than @TyCons@.
106 -}
107
108 data PrimOpInfo
109 = Dyadic OccName -- string :: T -> T -> T
110 Type
111 | Monadic OccName -- string :: T -> T
112 Type
113 | Compare OccName -- string :: T -> T -> Int#
114 Type
115 | GenPrimOp OccName -- string :: \/a1..an . T1 -> .. -> Tk -> T
116 [TyVar]
117 [Type]
118 Type
119
120 mkDyadic, mkMonadic, mkCompare :: FastString -> Type -> PrimOpInfo
121 mkDyadic str ty = Dyadic (mkVarOccFS str) ty
122 mkMonadic str ty = Monadic (mkVarOccFS str) ty
123 mkCompare str ty = Compare (mkVarOccFS str) ty
124
125 mkGenPrimOp :: FastString -> [TyVar] -> [Type] -> Type -> PrimOpInfo
126 mkGenPrimOp str tvs tys ty = GenPrimOp (mkVarOccFS str) tvs tys ty
127
128 {-
129 ************************************************************************
130 * *
131 \subsubsection{Strictness}
132 * *
133 ************************************************************************
134
135 Not all primops are strict!
136 -}
137
138 primOpStrictness :: PrimOp -> Arity -> StrictSig
139 -- See Demand.StrictnessInfo for discussion of what the results
140 -- The arity should be the arity of the primop; that's why
141 -- this function isn't exported.
142 #include "primop-strictness.hs-incl"
143
144 {-
145 ************************************************************************
146 * *
147 \subsubsection{Fixity}
148 * *
149 ************************************************************************
150 -}
151
152 primOpFixity :: PrimOp -> Maybe Fixity
153 #include "primop-fixity.hs-incl"
154
155 {-
156 ************************************************************************
157 * *
158 \subsubsection[PrimOp-comparison]{PrimOpInfo basic comparison ops}
159 * *
160 ************************************************************************
161
162 @primOpInfo@ gives all essential information (from which everything
163 else, notably a type, can be constructed) for each @PrimOp@.
164 -}
165
166 primOpInfo :: PrimOp -> PrimOpInfo
167 #include "primop-primop-info.hs-incl"
168 primOpInfo _ = error "primOpInfo: unknown primop"
169
170 {-
171 Here are a load of comments from the old primOp info:
172
173 A @Word#@ is an unsigned @Int#@.
174
175 @decodeFloat#@ is given w/ Integer-stuff (it's similar).
176
177 @decodeDouble#@ is given w/ Integer-stuff (it's similar).
178
179 Decoding of floating-point numbers is sorta Integer-related. Encoding
180 is done with plain ccalls now (see PrelNumExtra.hs).
181
182 A @Weak@ Pointer is created by the @mkWeak#@ primitive:
183
184 mkWeak# :: k -> v -> f -> State# RealWorld
185 -> (# State# RealWorld, Weak# v #)
186
187 In practice, you'll use the higher-level
188
189 data Weak v = Weak# v
190 mkWeak :: k -> v -> IO () -> IO (Weak v)
191
192 The following operation dereferences a weak pointer. The weak pointer
193 may have been finalized, so the operation returns a result code which
194 must be inspected before looking at the dereferenced value.
195
196 deRefWeak# :: Weak# v -> State# RealWorld ->
197 (# State# RealWorld, v, Int# #)
198
199 Only look at v if the Int# returned is /= 0 !!
200
201 The higher-level op is
202
203 deRefWeak :: Weak v -> IO (Maybe v)
204
205 Weak pointers can be finalized early by using the finalize# operation:
206
207 finalizeWeak# :: Weak# v -> State# RealWorld ->
208 (# State# RealWorld, Int#, IO () #)
209
210 The Int# returned is either
211
212 0 if the weak pointer has already been finalized, or it has no
213 finalizer (the third component is then invalid).
214
215 1 if the weak pointer is still alive, with the finalizer returned
216 as the third component.
217
218 A {\em stable name/pointer} is an index into a table of stable name
219 entries. Since the garbage collector is told about stable pointers,
220 it is safe to pass a stable pointer to external systems such as C
221 routines.
222
223 \begin{verbatim}
224 makeStablePtr# :: a -> State# RealWorld -> (# State# RealWorld, StablePtr# a #)
225 freeStablePtr :: StablePtr# a -> State# RealWorld -> State# RealWorld
226 deRefStablePtr# :: StablePtr# a -> State# RealWorld -> (# State# RealWorld, a #)
227 eqStablePtr# :: StablePtr# a -> StablePtr# a -> Int#
228 \end{verbatim}
229
230 It may seem a bit surprising that @makeStablePtr#@ is a @IO@
231 operation since it doesn't (directly) involve IO operations. The
232 reason is that if some optimisation pass decided to duplicate calls to
233 @makeStablePtr#@ and we only pass one of the stable pointers over, a
234 massive space leak can result. Putting it into the IO monad
235 prevents this. (Another reason for putting them in a monad is to
236 ensure correct sequencing wrt the side-effecting @freeStablePtr@
237 operation.)
238
239 An important property of stable pointers is that if you call
240 makeStablePtr# twice on the same object you get the same stable
241 pointer back.
242
243 Note that we can implement @freeStablePtr#@ using @_ccall_@ (and,
244 besides, it's not likely to be used from Haskell) so it's not a
245 primop.
246
247 Question: Why @RealWorld@ - won't any instance of @_ST@ do the job? [ADR]
248
249 Stable Names
250 ~~~~~~~~~~~~
251
252 A stable name is like a stable pointer, but with three important differences:
253
254 (a) You can't deRef one to get back to the original object.
255 (b) You can convert one to an Int.
256 (c) You don't need to 'freeStableName'
257
258 The existence of a stable name doesn't guarantee to keep the object it
259 points to alive (unlike a stable pointer), hence (a).
260
261 Invariants:
262
263 (a) makeStableName always returns the same value for a given
264 object (same as stable pointers).
265
266 (b) if two stable names are equal, it implies that the objects
267 from which they were created were the same.
268
269 (c) stableNameToInt always returns the same Int for a given
270 stable name.
271
272
273 These primops are pretty weird.
274
275 dataToTag# :: a -> Int (arg must be an evaluated data type)
276 tagToEnum# :: Int -> a (result type must be an enumerated type)
277
278 The constraints aren't currently checked by the front end, but the
279 code generator will fall over if they aren't satisfied.
280
281 ************************************************************************
282 * *
283 Which PrimOps are out-of-line
284 * *
285 ************************************************************************
286
287 Some PrimOps need to be called out-of-line because they either need to
288 perform a heap check or they block.
289 -}
290
291 primOpOutOfLine :: PrimOp -> Bool
292 #include "primop-out-of-line.hs-incl"
293
294 {-
295 ************************************************************************
296 * *
297 Failure and side effects
298 * *
299 ************************************************************************
300
301 Note [PrimOp can_fail and has_side_effects]
302 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
303 Both can_fail and has_side_effects mean that the primop has
304 some effect that is not captured entirely by its result value.
305
306 ---------- has_side_effects ---------------------
307 A primop "has_side_effects" if it has some *write* effect, visible
308 elsewhere
309 - writing to the world (I/O)
310 - writing to a mutable data structure (writeIORef)
311 - throwing a synchronous Haskell exception
312
313 Often such primops have a type like
314 State -> input -> (State, output)
315 so the state token guarantees ordering. In general we rely *only* on
316 data dependencies of the state token to enforce write-effect ordering
317
318 * NB1: if you inline unsafePerformIO, you may end up with
319 side-effecting ops whose 'state' output is discarded.
320 And programmers may do that by hand; see Trac #9390.
321 That is why we (conservatively) do not discard write-effecting
322 primops even if both their state and result is discarded.
323
324 * NB2: We consider primops, such as raiseIO#, that can raise a
325 (Haskell) synchronous exception to "have_side_effects" but not
326 "can_fail". We must be careful about not discarding such things;
327 see the paper "A semantics for imprecise exceptions".
328
329 * NB3: *Read* effects (like reading an IORef) don't count here,
330 because it doesn't matter if we don't do them, or do them more than
331 once. *Sequencing* is maintained by the data dependency of the state
332 token.
333
334 ---------- can_fail ----------------------------
335 A primop "can_fail" if it can fail with an *unchecked* exception on
336 some elements of its input domain. Main examples:
337 division (fails on zero demoninator)
338 array indexing (fails if the index is out of bounds)
339
340 An "unchecked exception" is one that is an outright error, (not
341 turned into a Haskell exception,) such as seg-fault or
342 divide-by-zero error. Such can_fail primops are ALWAYS surrounded
343 with a test that checks for the bad cases, but we need to be
344 very careful about code motion that might move it out of
345 the scope of the test.
346
347 Note [Transformations affected by can_fail and has_side_effects]
348 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
349 The can_fail and has_side_effects properties have the following effect
350 on program transformations. Summary table is followed by details.
351
352 can_fail has_side_effects
353 Discard NO NO
354 Float in YES YES
355 Float out NO NO
356 Duplicate YES NO
357
358 * Discarding. case (a `op` b) of _ -> rhs ===> rhs
359 You should not discard a has_side_effects primop; e.g.
360 case (writeIntArray# a i v s of (# _, _ #) -> True
361 Arguably you should be able to discard this, since the
362 returned stat token is not used, but that relies on NEVER
363 inlining unsafePerformIO, and programmers sometimes write
364 this kind of stuff by hand (Trac #9390). So we (conservatively)
365 never discard a has_side_effects primop.
366
367 However, it's fine to discard a can_fail primop. For example
368 case (indexIntArray# a i) of _ -> True
369 We can discard indexIntArray#; it has can_fail, but not
370 has_side_effects; see Trac #5658 which was all about this.
371 Notice that indexIntArray# is (in a more general handling of
372 effects) read effect, but we don't care about that here, and
373 treat read effects as *not* has_side_effects.
374
375 Similarly (a `/#` b) can be discarded. It can seg-fault or
376 cause a hardware exception, but not a synchronous Haskell
377 exception.
378
379
380
381 Synchronous Haskell exceptions, e.g. from raiseIO#, are treated
382 as has_side_effects and hence are not discarded.
383
384 * Float in. You can float a can_fail or has_side_effects primop
385 *inwards*, but not inside a lambda (see Duplication below).
386
387 * Float out. You must not float a can_fail primop *outwards* lest
388 you escape the dynamic scope of the test. Example:
389 case d ># 0# of
390 True -> case x /# d of r -> r +# 1
391 False -> 0
392 Here we must not float the case outwards to give
393 case x/# d of r ->
394 case d ># 0# of
395 True -> r +# 1
396 False -> 0
397
398 Nor can you float out a has_side_effects primop. For example:
399 if blah then case writeMutVar# v True s0 of (# s1 #) -> s1
400 else s0
401 Notice that s0 is mentioned in both branches of the 'if', but
402 only one of these two will actually be consumed. But if we
403 float out to
404 case writeMutVar# v True s0 of (# s1 #) ->
405 if blah then s1 else s0
406 the writeMutVar will be performed in both branches, which is
407 utterly wrong.
408
409 * Duplication. You cannot duplicate a has_side_effect primop. You
410 might wonder how this can occur given the state token threading, but
411 just look at Control.Monad.ST.Lazy.Imp.strictToLazy! We get
412 something like this
413 p = case readMutVar# s v of
414 (# s', r #) -> (S# s', r)
415 s' = case p of (s', r) -> s'
416 r = case p of (s', r) -> r
417
418 (All these bindings are boxed.) If we inline p at its two call
419 sites, we get a catastrophe: because the read is performed once when
420 s' is demanded, and once when 'r' is demanded, which may be much
421 later. Utterly wrong. Trac #3207 is real example of this happening.
422
423 However, it's fine to duplicate a can_fail primop. That is really
424 the only difference between can_fail and has_side_effects.
425
426 Note [Implementation: how can_fail/has_side_effects affect transformations]
427 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
428 How do we ensure that that floating/duplication/discarding are done right
429 in the simplifier?
430
431 Two main predicates on primpops test these flags:
432 primOpOkForSideEffects <=> not has_side_effects
433 primOpOkForSpeculation <=> not (has_side_effects || can_fail)
434
435 * The "no-float-out" thing is achieved by ensuring that we never
436 let-bind a can_fail or has_side_effects primop. The RHS of a
437 let-binding (which can float in and out freely) satisfies
438 exprOkForSpeculation; this is the let/app invariant. And
439 exprOkForSpeculation is false of can_fail and has_side_effects.
440
441 * So can_fail and has_side_effects primops will appear only as the
442 scrutinees of cases, and that's why the FloatIn pass is capable
443 of floating case bindings inwards.
444
445 * The no-duplicate thing is done via primOpIsCheap, by making
446 has_side_effects things (very very very) not-cheap!
447 -}
448
449 primOpHasSideEffects :: PrimOp -> Bool
450 #include "primop-has-side-effects.hs-incl"
451
452 primOpCanFail :: PrimOp -> Bool
453 #include "primop-can-fail.hs-incl"
454
455 primOpOkForSpeculation :: PrimOp -> Bool
456 -- See Note [PrimOp can_fail and has_side_effects]
457 -- See comments with CoreUtils.exprOkForSpeculation
458 -- primOpOkForSpeculation => primOpOkForSideEffects
459 primOpOkForSpeculation op
460 = primOpOkForSideEffects op
461 && not (primOpOutOfLine op || primOpCanFail op)
462 -- I think the "out of line" test is because out of line things can
463 -- be expensive (eg sine, cosine), and so we may not want to speculate them
464
465 primOpOkForSideEffects :: PrimOp -> Bool
466 primOpOkForSideEffects op
467 = not (primOpHasSideEffects op)
468
469 {-
470 Note [primOpIsCheap]
471 ~~~~~~~~~~~~~~~~~~~~
472 @primOpIsCheap@, as used in \tr{SimplUtils.hs}. For now (HACK
473 WARNING), we just borrow some other predicates for a
474 what-should-be-good-enough test. "Cheap" means willing to call it more
475 than once, and/or push it inside a lambda. The latter could change the
476 behaviour of 'seq' for primops that can fail, so we don't treat them as cheap.
477 -}
478
479 primOpIsCheap :: PrimOp -> Bool
480 -- See Note [PrimOp can_fail and has_side_effects]
481 primOpIsCheap op = primOpOkForSpeculation op
482 -- In March 2001, we changed this to
483 -- primOpIsCheap op = False
484 -- thereby making *no* primops seem cheap. But this killed eta
485 -- expansion on case (x ==# y) of True -> \s -> ...
486 -- which is bad. In particular a loop like
487 -- doLoop n = loop 0
488 -- where
489 -- loop i | i == n = return ()
490 -- | otherwise = bar i >> loop (i+1)
491 -- allocated a closure every time round because it doesn't eta expand.
492 --
493 -- The problem that originally gave rise to the change was
494 -- let x = a +# b *# c in x +# x
495 -- were we don't want to inline x. But primopIsCheap doesn't control
496 -- that (it's exprIsDupable that does) so the problem doesn't occur
497 -- even if primOpIsCheap sometimes says 'True'.
498
499 {-
500 ************************************************************************
501 * *
502 PrimOp code size
503 * *
504 ************************************************************************
505
506 primOpCodeSize
507 ~~~~~~~~~~~~~~
508 Gives an indication of the code size of a primop, for the purposes of
509 calculating unfolding sizes; see CoreUnfold.sizeExpr.
510 -}
511
512 primOpCodeSize :: PrimOp -> Int
513 #include "primop-code-size.hs-incl"
514
515 primOpCodeSizeDefault :: Int
516 primOpCodeSizeDefault = 1
517 -- CoreUnfold.primOpSize already takes into account primOpOutOfLine
518 -- and adds some further costs for the args in that case.
519
520 primOpCodeSizeForeignCall :: Int
521 primOpCodeSizeForeignCall = 4
522
523 {-
524 ************************************************************************
525 * *
526 PrimOp types
527 * *
528 ************************************************************************
529 -}
530
531 primOpType :: PrimOp -> Type -- you may want to use primOpSig instead
532 primOpType op
533 = case primOpInfo op of
534 Dyadic _occ ty -> dyadic_fun_ty ty
535 Monadic _occ ty -> monadic_fun_ty ty
536 Compare _occ ty -> compare_fun_ty ty
537
538 GenPrimOp _occ tyvars arg_tys res_ty ->
539 mkInvForAllTys tyvars (mkFunTys arg_tys res_ty)
540
541 primOpOcc :: PrimOp -> OccName
542 primOpOcc op = case primOpInfo op of
543 Dyadic occ _ -> occ
544 Monadic occ _ -> occ
545 Compare occ _ -> occ
546 GenPrimOp occ _ _ _ -> occ
547
548 -- primOpSig is like primOpType but gives the result split apart:
549 -- (type variables, argument types, result type)
550 -- It also gives arity, strictness info
551
552 primOpSig :: PrimOp -> ([TyVar], [Type], Type, Arity, StrictSig)
553 primOpSig op
554 = (tyvars, arg_tys, res_ty, arity, primOpStrictness op arity)
555 where
556 arity = length arg_tys
557 (tyvars, arg_tys, res_ty)
558 = case (primOpInfo op) of
559 Monadic _occ ty -> ([], [ty], ty )
560 Dyadic _occ ty -> ([], [ty,ty], ty )
561 Compare _occ ty -> ([], [ty,ty], intPrimTy)
562 GenPrimOp _occ tyvars arg_tys res_ty -> (tyvars, arg_tys, res_ty )
563
564 data PrimOpResultInfo
565 = ReturnsPrim PrimRep
566 | ReturnsAlg TyCon
567
568 -- Some PrimOps need not return a manifest primitive or algebraic value
569 -- (i.e. they might return a polymorphic value). These PrimOps *must*
570 -- be out of line, or the code generator won't work.
571
572 getPrimOpResultInfo :: PrimOp -> PrimOpResultInfo
573 getPrimOpResultInfo op
574 = case (primOpInfo op) of
575 Dyadic _ ty -> ReturnsPrim (typePrimRep ty)
576 Monadic _ ty -> ReturnsPrim (typePrimRep ty)
577 Compare _ _ -> ReturnsPrim (tyConPrimRep intPrimTyCon)
578 GenPrimOp _ _ _ ty | isPrimTyCon tc -> ReturnsPrim (tyConPrimRep tc)
579 | otherwise -> ReturnsAlg tc
580 where
581 tc = tyConAppTyCon ty
582 -- All primops return a tycon-app result
583 -- The tycon can be an unboxed tuple, though, which
584 -- gives rise to a ReturnAlg
585
586 {-
587 We do not currently make use of whether primops are commutable.
588
589 We used to try to move constants to the right hand side for strength
590 reduction.
591 -}
592
593 {-
594 commutableOp :: PrimOp -> Bool
595 #include "primop-commutable.hs-incl"
596 -}
597
598 -- Utils:
599
600 dyadic_fun_ty, monadic_fun_ty, compare_fun_ty :: Type -> Type
601 dyadic_fun_ty ty = mkFunTys [ty, ty] ty
602 monadic_fun_ty ty = mkFunTy ty ty
603 compare_fun_ty ty = mkFunTys [ty, ty] intPrimTy
604
605 -- Output stuff:
606
607 pprPrimOp :: PrimOp -> SDoc
608 pprPrimOp other_op = pprOccName (primOpOcc other_op)
609
610 {-
611 ************************************************************************
612 * *
613 \subsubsection[PrimCall]{User-imported primitive calls}
614 * *
615 ************************************************************************
616 -}
617
618 data PrimCall = PrimCall CLabelString UnitId
619
620 instance Outputable PrimCall where
621 ppr (PrimCall lbl pkgId)
622 = text "__primcall" <+> ppr pkgId <+> ppr lbl