New stack layout algorithm
[ghc.git] / compiler / cmm / MkGraph.hs
1 {-# LANGUAGE GADTs #-}
2
3 module MkGraph
4 ( CmmAGraph, CgStmt(..)
5 , (<*>), catAGraphs
6 , mkLabel, mkMiddle, mkLast, outOfLine
7 , lgraphOfAGraph, labelAGraph
8
9 , stackStubExpr
10 , mkNop, mkAssign, mkStore, mkUnsafeCall, mkFinalCall, mkCallReturnsTo
11 , mkJump, mkDirectJump, mkForeignJump, mkForeignJumpExtra, mkJumpGC
12 , mkCbranch, mkSwitch
13 , mkReturn, mkReturnSimple, mkComment, mkCallEntry, mkBranch
14 , copyInOflow, copyOutOflow
15 , toCall, Transfer(..)
16 )
17 where
18
19 import BlockId
20 import Cmm
21 import CmmCallConv (assignArgumentsPos, ParamLocation(..))
22
23
24 import Compiler.Hoopl hiding (Unique, (<*>), mkFirst, mkMiddle, mkLast, mkLabel, mkBranch, Shape(..))
25 import FastString
26 import ForeignCall
27 import Outputable
28 import Prelude hiding (succ)
29 import SMRep (ByteOff)
30 import UniqSupply
31 import OrdList
32
33 #include "HsVersions.h"
34
35
36 -----------------------------------------------------------------------------
37 -- Building Graphs
38
39
40 -- | CmmAGraph is a chunk of code consisting of:
41 --
42 -- * ordinary statements (assignments, stores etc.)
43 -- * jumps
44 -- * labels
45 -- * out-of-line labelled blocks
46 --
47 -- The semantics is that control falls through labels and out-of-line
48 -- blocks. Everything after a jump up to the next label is by
49 -- definition unreachable code, and will be discarded.
50 --
51 -- Two CmmAGraphs can be stuck together with <*>, with the meaning that
52 -- control flows from the first to the second.
53 --
54 -- A 'CmmAGraph' can be turned into a 'CmmGraph' (closed at both ends)
55 -- by providing a label for the entry point; see 'labelAGraph'.
56 --
57 type CmmAGraph = OrdList CgStmt
58
59 data CgStmt
60 = CgLabel BlockId
61 | CgStmt (CmmNode O O)
62 | CgLast (CmmNode O C)
63 | CgFork BlockId CmmAGraph
64
65 flattenCmmAGraph :: BlockId -> CmmAGraph -> CmmGraph
66 flattenCmmAGraph id stmts =
67 CmmGraph { g_entry = id,
68 g_graph = GMany NothingO body NothingO }
69 where
70 (block, blocks) = flatten (fromOL stmts)
71 entry = blockJoinHead (CmmEntry id) block
72 body = foldr addBlock emptyBody (entry:blocks)
73
74 flatten :: [CgStmt] -> (Block CmmNode O C, [Block CmmNode C C])
75 flatten [] = panic "flatten []"
76
77 -- A label at the end of a function or fork: this label must not be reachable,
78 -- but it might be referred to from another BB that also isn't reachable.
79 -- Eliminating these has to be done with a dead-code analysis. For now,
80 -- we just make it into a well-formed block by adding a recursive jump.
81 flatten [CgLabel id]
82 = (goto_id, [blockJoinHead (CmmEntry id) goto_id] )
83 where goto_id = blockJoinTail emptyBlock (CmmBranch id)
84
85 -- A jump/branch: throw away all the code up to the next label, because
86 -- it is unreachable. Be careful to keep forks that we find on the way.
87 flatten (CgLast stmt : stmts)
88 = case dropWhile isOrdinaryStmt stmts of
89 [] ->
90 ( sing, [] )
91 [CgLabel id] ->
92 ( sing, [blockJoin (CmmEntry id) emptyBlock (CmmBranch id)] )
93 (CgLabel id : stmts) ->
94 ( sing, blockJoinHead (CmmEntry id) block : blocks )
95 where (block,blocks) = flatten stmts
96 (CgFork fork_id stmts : ss) ->
97 flatten (CgFork fork_id stmts : CgLast stmt : ss)
98 _ -> panic "MkGraph.flatten"
99 where
100 sing = blockJoinTail emptyBlock stmt
101
102 flatten (s:ss) =
103 case s of
104 CgStmt stmt -> (blockCons stmt block, blocks)
105 CgLabel id -> (blockJoinTail emptyBlock (CmmBranch id),
106 blockJoinHead (CmmEntry id) block : blocks)
107 CgFork fork_id stmts ->
108 (block, blockJoinHead (CmmEntry fork_id) fork_block : fork_blocks ++ blocks)
109 where (fork_block, fork_blocks) = flatten (fromOL stmts)
110 _ -> panic "MkGraph.flatten"
111 where (block,blocks) = flatten ss
112
113 isOrdinaryStmt :: CgStmt -> Bool
114 isOrdinaryStmt (CgStmt _) = True
115 isOrdinaryStmt (CgLast _) = True
116 isOrdinaryStmt _ = False
117
118
119
120 ---------- AGraph manipulation
121
122 (<*>) :: CmmAGraph -> CmmAGraph -> CmmAGraph
123 (<*>) = appOL
124
125 catAGraphs :: [CmmAGraph] -> CmmAGraph
126 catAGraphs = concatOL
127
128 -- | created a sequence "goto id; id:" as an AGraph
129 mkLabel :: BlockId -> CmmAGraph
130 mkLabel bid = unitOL (CgLabel bid)
131
132 -- | creates an open AGraph from a given node
133 mkMiddle :: CmmNode O O -> CmmAGraph
134 mkMiddle middle = unitOL (CgStmt middle)
135
136 -- | created a closed AGraph from a given node
137 mkLast :: CmmNode O C -> CmmAGraph
138 mkLast last = unitOL (CgLast last)
139
140 -- | A labelled code block; should end in a last node
141 outOfLine :: BlockId -> CmmAGraph -> CmmAGraph
142 outOfLine l g = unitOL (CgFork l g)
143
144 -- | allocate a fresh label for the entry point
145 lgraphOfAGraph :: CmmAGraph -> UniqSM CmmGraph
146 lgraphOfAGraph g = do u <- getUniqueM
147 return (flattenCmmAGraph (mkBlockId u) g)
148
149 -- | use the given BlockId as the label of the entry point
150 labelAGraph :: BlockId -> CmmAGraph -> UniqSM CmmGraph
151 labelAGraph lbl ag = return (flattenCmmAGraph lbl ag)
152
153 ---------- No-ops
154 mkNop :: CmmAGraph
155 mkNop = nilOL
156
157 mkComment :: FastString -> CmmAGraph
158 #ifdef DEBUG
159 -- SDM: generating all those comments takes time, this saved about 4% for me
160 mkComment fs = mkMiddle $ CmmComment fs
161 #else
162 mkComment _ = nilOL
163 #endif
164
165 ---------- Assignment and store
166 mkAssign :: CmmReg -> CmmExpr -> CmmAGraph
167 mkAssign l r = mkMiddle $ CmmAssign l r
168
169 mkStore :: CmmExpr -> CmmExpr -> CmmAGraph
170 mkStore l r = mkMiddle $ CmmStore l r
171
172 ---------- Control transfer
173 mkJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
174 mkJump e actuals updfr_off =
175 lastWithArgs Jump Old NativeNodeCall actuals updfr_off $
176 toCall e Nothing updfr_off 0
177
178 mkDirectJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
179 mkDirectJump e actuals updfr_off =
180 lastWithArgs Jump Old NativeDirectCall actuals updfr_off $
181 toCall e Nothing updfr_off 0
182
183 mkJumpGC :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
184 mkJumpGC e actuals updfr_off =
185 lastWithArgs Jump Old GC actuals updfr_off $
186 toCall e Nothing updfr_off 0
187
188 mkForeignJump :: Convention -> CmmExpr -> [CmmActual] -> UpdFrameOffset
189 -> CmmAGraph
190 mkForeignJump conv e actuals updfr_off =
191 lastWithArgs Jump Old conv actuals updfr_off $
192 toCall e Nothing updfr_off 0
193
194 mkForeignJumpExtra :: Convention -> CmmExpr -> [CmmActual]
195 -> UpdFrameOffset -> (ByteOff, [(CmmExpr, ByteOff)])
196 -> CmmAGraph
197 mkForeignJumpExtra conv e actuals updfr_off extra_stack =
198 lastWithArgsAndExtraStack Jump Old conv actuals updfr_off extra_stack $
199 toCall e Nothing updfr_off 0
200
201 mkCbranch :: CmmExpr -> BlockId -> BlockId -> CmmAGraph
202 mkCbranch pred ifso ifnot = mkLast (CmmCondBranch pred ifso ifnot)
203
204 mkSwitch :: CmmExpr -> [Maybe BlockId] -> CmmAGraph
205 mkSwitch e tbl = mkLast $ CmmSwitch e tbl
206
207 mkReturn :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
208 mkReturn e actuals updfr_off =
209 lastWithArgs Ret Old NativeReturn actuals updfr_off $
210 toCall e Nothing updfr_off 0
211 -- where e = CmmLoad (CmmStackSlot Old updfr_off) gcWord
212
213 mkReturnSimple :: [CmmActual] -> UpdFrameOffset -> CmmAGraph
214 mkReturnSimple actuals updfr_off =
215 lastWithArgs Ret Old NativeReturn actuals updfr_off $
216 toCall e Nothing updfr_off 0
217 where e = CmmLoad (CmmStackSlot Old updfr_off) gcWord
218
219 mkBranch :: BlockId -> CmmAGraph
220 mkBranch bid = mkLast (CmmBranch bid)
221
222 mkFinalCall :: CmmExpr -> CCallConv -> [CmmActual] -> UpdFrameOffset
223 -> CmmAGraph
224 mkFinalCall f _ actuals updfr_off =
225 lastWithArgs Call Old NativeDirectCall actuals updfr_off $
226 toCall f Nothing updfr_off 0
227
228 mkCallReturnsTo :: CmmExpr -> Convention -> [CmmActual]
229 -> BlockId
230 -> ByteOff
231 -> UpdFrameOffset
232 -> (ByteOff, [(CmmExpr,ByteOff)])
233 -> CmmAGraph
234 mkCallReturnsTo f callConv actuals ret_lbl ret_off updfr_off extra_stack = do
235 lastWithArgsAndExtraStack Call (Young ret_lbl) callConv actuals
236 updfr_off extra_stack $
237 toCall f (Just ret_lbl) updfr_off ret_off
238
239 mkUnsafeCall :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> CmmAGraph
240 mkUnsafeCall t fs as = mkMiddle $ CmmUnsafeForeignCall t fs as
241
242
243 --------------------------------------------------------------------------
244
245
246
247
248 -- Why are we inserting extra blocks that simply branch to the successors?
249 -- Because in addition to the branch instruction, @mkBranch@ will insert
250 -- a necessary adjustment to the stack pointer.
251
252
253 -- For debugging purposes, we can stub out dead stack slots:
254 stackStubExpr :: Width -> CmmExpr
255 stackStubExpr w = CmmLit (CmmInt 0 w)
256
257 -- When we copy in parameters, we usually want to put overflow
258 -- parameters on the stack, but sometimes we want to pass
259 -- the variables in their spill slots.
260 -- Therefore, for copying arguments and results, we provide different
261 -- functions to pass the arguments in an overflow area and to pass them in spill slots.
262 copyInOflow :: Convention -> Area -> [CmmFormal] -> (Int, CmmAGraph)
263
264 copyInOflow conv area formals = (offset, catAGraphs $ map mkMiddle nodes)
265 where (offset, nodes) = copyIn oneCopyOflowI conv area formals
266
267 type SlotCopier = Area -> (LocalReg, ByteOff) -> (ByteOff, [CmmNode O O]) ->
268 (ByteOff, [CmmNode O O])
269 type CopyIn = SlotCopier -> Convention -> Area -> [CmmFormal] -> (ByteOff, [CmmNode O O])
270
271 -- Return the number of bytes used for copying arguments, as well as the
272 -- instructions to copy the arguments.
273 copyIn :: CopyIn
274 copyIn oflow conv area formals =
275 foldr ci (init_offset, []) args'
276 where ci (reg, RegisterParam r) (n, ms) =
277 (n, CmmAssign (CmmLocal reg) (CmmReg $ CmmGlobal r) : ms)
278 ci (r, StackParam off) (n, ms) = oflow area (r, off) (n, ms)
279 init_offset = widthInBytes wordWidth -- infotable
280 args = assignArgumentsPos conv localRegType formals
281 args' = foldl adjust [] args
282 where adjust rst (v, StackParam off) = (v, StackParam (off + init_offset)) : rst
283 adjust rst x@(_, RegisterParam _) = x : rst
284
285 -- Copy-in one arg, using overflow space if needed.
286 oneCopyOflowI :: SlotCopier
287 oneCopyOflowI area (reg, off) (n, ms) =
288 (max n off, CmmAssign (CmmLocal reg) (CmmLoad (CmmStackSlot area off) ty) : ms)
289 where ty = localRegType reg
290
291 -- Factoring out the common parts of the copyout functions yielded something
292 -- more complicated:
293
294 data Transfer = Call | Jump | Ret deriving Eq
295
296 copyOutOflow :: Convention -> Transfer -> Area -> [CmmActual]
297 -> UpdFrameOffset
298 -> (ByteOff, [(CmmExpr,ByteOff)]) -- extra stack stuff
299 -> (Int, CmmAGraph)
300
301 -- Generate code to move the actual parameters into the locations
302 -- required by the calling convention. This includes a store for the
303 -- return address.
304 --
305 -- The argument layout function ignores the pointer to the info table,
306 -- so we slot that in here. When copying-out to a young area, we set
307 -- the info table for return and adjust the offsets of the other
308 -- parameters. If this is a call instruction, we adjust the offsets
309 -- of the other parameters.
310 copyOutOflow conv transfer area actuals updfr_off
311 (extra_stack_off, extra_stack_stuff)
312 = foldr co (init_offset, mkNop) (args' ++ stack_params)
313 where
314 co (v, RegisterParam r) (n, ms) = (n, mkAssign (CmmGlobal r) v <*> ms)
315 co (v, StackParam off) (n, ms) = (max n off, mkStore (CmmStackSlot area off) v <*> ms)
316
317 stack_params = [ (e, StackParam (off + init_offset))
318 | (e,off) <- extra_stack_stuff ]
319
320 (setRA, init_offset) =
321 case area of
322 Young id -> id `seq` -- Generate a store instruction for
323 -- the return address if making a call
324 if transfer == Call then
325 ([(CmmLit (CmmBlock id), StackParam init_offset)],
326 widthInBytes wordWidth)
327 else ([], 0)
328 Old -> ([], updfr_off)
329
330 arg_offset = init_offset + extra_stack_off
331
332 args :: [(CmmExpr, ParamLocation)] -- The argument and where to put it
333 args = assignArgumentsPos conv cmmExprType actuals
334
335 args' = foldl adjust setRA args
336 where adjust rst (v, StackParam off) = (v, StackParam (off + arg_offset)) : rst
337 adjust rst x@(_, RegisterParam _) = x : rst
338
339
340
341 mkCallEntry :: Convention -> [CmmFormal] -> (Int, CmmAGraph)
342 mkCallEntry conv formals = copyInOflow conv Old formals
343
344 lastWithArgs :: Transfer -> Area -> Convention -> [CmmActual]
345 -> UpdFrameOffset
346 -> (ByteOff -> CmmAGraph)
347 -> CmmAGraph
348 lastWithArgs transfer area conv actuals updfr_off last =
349 let (outArgs, copies) = copyOutOflow conv transfer area actuals
350 updfr_off noExtraStack in
351 copies <*> last outArgs
352
353 lastWithArgsAndExtraStack :: Transfer -> Area -> Convention -> [CmmActual]
354 -> UpdFrameOffset -> (ByteOff, [(CmmExpr,ByteOff)])
355 -> (ByteOff -> CmmAGraph)
356 -> CmmAGraph
357 lastWithArgsAndExtraStack transfer area conv actuals updfr_off
358 extra_stack last =
359 let (outArgs, copies) = copyOutOflow conv transfer area actuals
360 updfr_off extra_stack in
361 copies <*> last outArgs
362
363 noExtraStack :: (ByteOff, [(CmmExpr,ByteOff)])
364 noExtraStack = (0,[])
365
366 toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff -> ByteOff
367 -> CmmAGraph
368 toCall e cont updfr_off res_space arg_space =
369 mkLast $ CmmCall e cont arg_space res_space updfr_off