Better handling of live range joins via spill slots in spill cleaner
[ghc.git] / compiler / nativeGen / RegSpillClean.hs
1 -- | Clean out unneeded spill/reload instrs
2 --
3 -- * Handling of join points
4 --
5 -- B1: B2:
6 -- ... ...
7 -- RELOAD SLOT(0), %r1 RELOAD SLOT(0), %r1
8 -- ... A ... ... B ...
9 -- jump B3 jump B3
10 --
11 -- B3: ... C ...
12 -- RELOAD SLOT(0), %r1
13 -- ...
14 --
15 -- the plan:
16 -- So long as %r1 hasn't been written to in A, B or C then we don't need the
17 -- reload in B3.
18 --
19 -- What we really care about here is that on the entry to B3, %r1 will always
20 -- have the same value that is in SLOT(0) (ie, %r1 is _valid_)
21 --
22 -- This also works if the reloads in B1/B2 were spills instead, because
23 -- spilling %r1 to a slot makes that slot have the same value as %r1.
24 --
25
26 module RegSpillClean (
27 cleanSpills
28 )
29 where
30
31 import RegLiveness
32 import RegAllocInfo
33 import MachRegs
34 import MachInstrs
35 import Cmm
36
37 import UniqSet
38 import UniqFM
39 import Unique
40 import State
41 import Outputable
42
43 import Data.Maybe
44 import Data.List
45
46 -- | Clean out unneeded spill/reloads from this top level thing.
47 cleanSpills :: LiveCmmTop -> LiveCmmTop
48 cleanSpills cmm
49 = evalState (cleanSpin 0 cmm) initCleanS
50
51 -- | do one pass of cleaning
52 cleanSpin :: Int -> LiveCmmTop -> CleanM LiveCmmTop
53
54 {-
55 cleanSpin spinCount code
56 = do jumpValid <- gets sJumpValid
57 pprTrace "cleanSpin"
58 ( int spinCount
59 $$ text "--- code"
60 $$ ppr code
61 $$ text "--- joins"
62 $$ ppr jumpValid)
63 $ cleanSpin' spinCount code
64 -}
65
66 cleanSpin spinCount code
67 = do
68 -- init count of cleaned spills/reloads
69 modify $ \s -> s
70 { sCleanedSpillsAcc = 0
71 , sCleanedReloadsAcc = 0 }
72
73 code' <- mapBlockTopM cleanBlock code
74
75 -- During the cleaning of each block we collected information about what regs
76 -- were valid across each jump. Based on this, work out whether it will be
77 -- safe to erase reloads after join points for the next pass.
78 collateJoinPoints
79
80 -- remember how many spills/reloads we cleaned in this pass
81 spills <- gets sCleanedSpillsAcc
82 reloads <- gets sCleanedReloadsAcc
83 modify $ \s -> s
84 { sCleanedCount = (spills, reloads) : sCleanedCount s }
85
86 -- if nothing was cleaned in this pass or the last one
87 -- then we're done and it's time to bail out
88 cleanedCount <- gets sCleanedCount
89 if take 2 cleanedCount == [(0, 0), (0, 0)]
90 then return code
91
92 -- otherwise go around again
93 else cleanSpin (spinCount + 1) code'
94
95
96 -- | Clean one basic block
97 cleanBlock :: LiveBasicBlock -> CleanM LiveBasicBlock
98 cleanBlock (BasicBlock id instrs)
99 = do jumpValid <- gets sJumpValid
100 let assoc = case lookupUFM jumpValid id of
101 Just assoc -> assoc
102 Nothing -> emptyAssoc
103
104 instrs_reload <- cleanReload assoc [] instrs
105 instrs_spill <- cleanSpill emptyUniqSet [] instrs_reload
106 return $ BasicBlock id instrs_spill
107
108
109 -- | Clean out unneeded reload instructions.
110 -- Walking forwards across the code
111 -- On a reload, if we know a reg already has the same value as a slot
112 -- then we don't need to do the reload.
113 --
114 cleanReload
115 :: Assoc Store -- ^ two store locations are associated if they have the same value
116 -> [LiveInstr] -- ^ acc
117 -> [LiveInstr] -- ^ instrs to clean (in backwards order)
118 -> CleanM [LiveInstr] -- ^ cleaned instrs (in forward order)
119
120 cleanReload _ acc []
121 = return acc
122
123 -- write out live range joins via spill slots to just a spill and a reg-reg move
124 -- hopefully the spill will be also be cleaned in the next pass
125 --
126 cleanReload assoc acc (Instr i1 live1 : Instr i2 _ : instrs)
127
128 | SPILL reg1 slot1 <- i1
129 , RELOAD slot2 reg2 <- i2
130 , slot1 == slot2
131 = do
132 modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
133 cleanReload assoc acc
134 (Instr i1 live1 : Instr (mkRegRegMoveInstr reg1 reg2) Nothing : instrs)
135
136
137 cleanReload assoc acc (li@(Instr i1 _) : instrs)
138 | Just (r1, r2) <- isRegRegMove i1
139 = if r1 == r2
140 -- erase any left over nop reg reg moves while we're here
141 -- this will also catch any nop moves that the "write out live range joins" case above
142 -- happens to add
143 then cleanReload assoc acc instrs
144
145 -- if r1 has the same value as some slots and we copy r1 to r2,
146 -- then r2 is now associated with those slots instead
147 else do let assoc' = addAssoc (SReg r1) (SReg r2)
148 $ delAssoc (SReg r2)
149 $ assoc
150
151 cleanReload assoc' (li : acc) instrs
152
153
154 cleanReload assoc acc (li@(Instr instr _) : instrs)
155
156 | SPILL reg slot <- instr
157 = let assoc' = addAssoc (SReg reg) (SSlot slot) -- doing the spill makes reg and slot the same value
158 $ delAssoc (SSlot slot) -- slot value changes on spill
159 $ assoc
160 in cleanReload assoc' (li : acc) instrs
161
162 | RELOAD slot reg <- instr
163 = if elemAssoc (SSlot slot) (SReg reg) assoc
164
165 -- if the reg and slot had the same value before reload
166 -- then we don't need the reload.
167 then do
168 modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
169 cleanReload assoc acc instrs
170
171 -- reg and slot had different values before reload
172 else
173 let assoc' = addAssoc (SReg reg) (SSlot slot) -- doing the reload makes reg and slot the same value
174 $ delAssoc (SReg reg) -- reg value changes on reload
175 $ assoc
176 in cleanReload assoc' (li : acc) instrs
177
178 -- remember the association over a jump
179 | targets <- jumpDests instr []
180 , not $ null targets
181 = do mapM_ (accJumpValid assoc) targets
182 cleanReload assoc (li : acc) instrs
183
184 -- writing to a reg changes its value.
185 | RU _ written <- regUsage instr
186 = let assoc' = foldr delAssoc assoc (map SReg $ nub written)
187 in cleanReload assoc' (li : acc) instrs
188
189
190 -- | Clean out unneeded spill instructions.
191 -- Walking backwards across the code.
192 -- If there were no reloads from a slot between a spill and the last one
193 -- then the slot was never read and we don't need the spill.
194
195 cleanSpill
196 :: UniqSet Int -- ^ slots that have been spilled, but not reloaded from
197 -> [LiveInstr] -- ^ acc
198 -> [LiveInstr] -- ^ instrs to clean (in forwards order)
199 -> CleanM [LiveInstr] -- ^ cleaned instrs (in backwards order)
200
201 cleanSpill _ acc []
202 = return acc
203
204 cleanSpill unused acc (li@(Instr instr _) : instrs)
205 | SPILL _ slot <- instr
206 = if elementOfUniqSet slot unused
207
208 -- we can erase this spill because the slot won't be read until after the next one
209 then do
210 modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
211 cleanSpill unused acc instrs
212
213 else do
214 -- slots start off unused
215 let unused' = addOneToUniqSet unused slot
216 cleanSpill unused' (li : acc) instrs
217
218 -- if we reload from a slot then it's no longer unused
219 | RELOAD slot _ <- instr
220 , unused' <- delOneFromUniqSet unused slot
221 = cleanSpill unused' (li : acc) instrs
222
223 -- some other instruction
224 | otherwise
225 = cleanSpill unused (li : acc) instrs
226
227
228 -- collateJoinPoints:
229 --
230 -- | combine the associations from all the inward control flow edges.
231 --
232 collateJoinPoints :: CleanM ()
233 collateJoinPoints
234 = modify $ \s -> s
235 { sJumpValid = mapUFM intersects (sJumpValidAcc s)
236 , sJumpValidAcc = emptyUFM }
237
238 intersects :: [Assoc Store] -> Assoc Store
239 intersects [] = emptyAssoc
240 intersects assocs = foldl1' intersectAssoc assocs
241
242
243
244 ---------------
245 type CleanM = State CleanS
246 data CleanS
247 = CleanS
248 { -- regs which are valid at the start of each block.
249 sJumpValid :: UniqFM (Assoc Store)
250
251 -- collecting up what regs were valid across each jump.
252 -- in the next pass we can collate these and write the results
253 -- to sJumpValid.
254 , sJumpValidAcc :: UniqFM [Assoc Store]
255
256 -- spills/reloads cleaned each pass (latest at front)
257 , sCleanedCount :: [(Int, Int)]
258
259 -- spills/reloads that have been cleaned in this pass so far.
260 , sCleanedSpillsAcc :: Int
261 , sCleanedReloadsAcc :: Int }
262
263 initCleanS :: CleanS
264 initCleanS
265 = CleanS
266 { sJumpValid = emptyUFM
267 , sJumpValidAcc = emptyUFM
268
269 , sCleanedCount = []
270
271 , sCleanedSpillsAcc = 0
272 , sCleanedReloadsAcc = 0 }
273
274
275 -- | Remember the associations before a jump
276 accJumpValid :: Assoc Store -> BlockId -> CleanM ()
277 accJumpValid assocs target
278 = modify $ \s -> s {
279 sJumpValidAcc = addToUFM_C (++)
280 (sJumpValidAcc s)
281 target
282 [assocs] }
283
284 --------------
285 -- A store location can be a stack slot or a register
286 --
287 data Store
288 = SSlot Int
289 | SReg Reg
290
291 -- spill cleaning is only done once all virtuals have been allocated to realRegs
292 --
293 instance Uniquable Store where
294 getUnique (SReg r)
295 | RealReg i <- r
296 = mkUnique 'R' i
297
298 | otherwise
299 = error "RegSpillClean.getUnique: found virtual reg during spill clean, only real regs expected."
300
301 getUnique (SSlot i) = mkUnique 'S' i
302
303 instance Outputable Store where
304 ppr (SSlot i) = text "slot" <> int i
305 ppr (SReg r) = ppr r
306
307
308 --------------
309 -- Association graphs.
310 -- In the spill cleaner, two store locations are associated if they are known
311 -- to hold the same value.
312 --
313 type Assoc a = UniqFM (UniqSet a)
314
315 -- | an empty association
316 emptyAssoc :: Assoc a
317 emptyAssoc = emptyUFM
318
319
320 -- | add an association between these two things
321 addAssoc :: Uniquable a
322 => a -> a -> Assoc a -> Assoc a
323
324 addAssoc a b m
325 = let m1 = addToUFM_C unionUniqSets m a (unitUniqSet b)
326 m2 = addToUFM_C unionUniqSets m1 b (unitUniqSet a)
327 in m2
328
329
330 -- | delete all associations to a node
331 delAssoc :: (Outputable a, Uniquable a)
332 => a -> Assoc a -> Assoc a
333
334 delAssoc a m
335 | Just aSet <- lookupUFM m a
336 , m1 <- delFromUFM m a
337 = foldUniqSet (\x m -> delAssoc1 x a m) m1 aSet
338
339 | otherwise = m
340
341
342 -- | delete a single association edge (a -> b)
343 delAssoc1 :: Uniquable a
344 => a -> a -> Assoc a -> Assoc a
345
346 delAssoc1 a b m
347 | Just aSet <- lookupUFM m a
348 = addToUFM m a (delOneFromUniqSet aSet b)
349
350 | otherwise = m
351
352
353 -- | check if these two things are associated
354 elemAssoc :: (Outputable a, Uniquable a)
355 => a -> a -> Assoc a -> Bool
356
357 elemAssoc a b m
358 = elementOfUniqSet b (closeAssoc a m)
359
360 -- | find the refl. trans. closure of the association from this point
361 closeAssoc :: (Outputable a, Uniquable a)
362 => a -> Assoc a -> UniqSet a
363
364 closeAssoc a assoc
365 = closeAssoc' assoc emptyUniqSet (unitUniqSet a)
366 where
367 closeAssoc' assoc visited toVisit
368 = case uniqSetToList toVisit of
369
370 -- nothing else to visit, we're done
371 [] -> visited
372
373 (x:_)
374
375 -- we've already seen this node
376 | elementOfUniqSet x visited
377 -> closeAssoc' assoc visited (delOneFromUniqSet toVisit x)
378
379 -- haven't seen this node before,
380 -- remember to visit all its neighbors
381 | otherwise
382 -> let neighbors
383 = case lookupUFM assoc x of
384 Nothing -> emptyUniqSet
385 Just set -> set
386
387 in closeAssoc' assoc
388 (addOneToUniqSet visited x)
389 (unionUniqSets toVisit neighbors)
390
391 -- | intersect
392 intersectAssoc
393 :: Uniquable a
394 => Assoc a -> Assoc a -> Assoc a
395
396 intersectAssoc a b
397 = intersectUFM_C (intersectUniqSets) a b
398